spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [1/2] spark git commit: [SPARK-16590][SQL] Improve LogicalPlanToSQLSuite to check generated SQL directly
Date Tue, 19 Jul 2016 00:17:48 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 7889585cc -> aac860802


http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_one.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_one.sql b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_one.sql
new file mode 100644
index 0000000..dd62289
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_one.sql
@@ -0,0 +1,6 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT TRANSFORM (key) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
+USING 'cat' AS (tKey) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
+FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `tKey` FROM (SELECT TRANSFORM (`gen_attr`) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' USING 'cat' AS (`gen_attr` string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_serde.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_serde.sql b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_serde.sql
new file mode 100644
index 0000000..2ad3698
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_serde.sql
@@ -0,0 +1,10 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT TRANSFORM (key, value)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES('field.delim' = '|')
+USING 'cat' AS (tKey, tValue)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES('field.delim' = '|')
+FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `tKey`, `gen_attr` AS `tValue` FROM (SELECT TRANSFORM (`gen_attr`, `gen_attr`) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES('field.delim' = '|') USING 'cat' AS (`gen_attr` string, `gen_attr` string) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES('field.delim' = '|') FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_without_serde.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_without_serde.sql b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_without_serde.sql
new file mode 100644
index 0000000..a90b42d
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/script_transformation_row_format_without_serde.sql
@@ -0,0 +1,8 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT TRANSFORM (key, value)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+USING 'cat' AS (tKey, tValue)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `tKey`, `gen_attr` AS `tValue` FROM (SELECT TRANSFORM (`gen_attr`, `gen_attr`) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' USING 'cat' AS (`gen_attr` string, `gen_attr` string) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/select_distinct.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/select_distinct.sql b/sql/hive/src/test/resources/sqlgen/select_distinct.sql
new file mode 100644
index 0000000..3bc8e55
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/select_distinct.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT DISTINCT id FROM parquet_t0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT DISTINCT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_0) AS parquet_t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/select_orc_table.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/select_orc_table.sql b/sql/hive/src/test/resources/sqlgen/select_orc_table.sql
new file mode 100644
index 0000000..eae67f9
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/select_orc_table.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+select * from orc_t
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `c1`, `gen_attr` AS `c2` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `c1` AS `gen_attr`, `c2` AS `gen_attr` FROM `default`.`orc_t`) AS gen_subquery_0) AS orc_t

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/select_parquet_table.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/select_parquet_table.sql b/sql/hive/src/test/resources/sqlgen/select_parquet_table.sql
new file mode 100644
index 0000000..a085bab
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/select_parquet_table.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+select * from parquet_t
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `c1`, `gen_attr` AS `c2` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `c1` AS `gen_attr`, `c2` AS `gen_attr` FROM `default`.`parquet_t`) AS gen_subquery_0) AS parquet_t

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/self_join.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/self_join.sql b/sql/hive/src/test/resources/sqlgen/self_join.sql
new file mode 100644
index 0000000..8947ccd
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/self_join.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT x.key FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key` FROM (SELECT `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 INNER JOIN (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_1 ON (`gen_attr` = `gen_attr`)) AS x

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/self_join_with_group_by.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/self_join_with_group_by.sql b/sql/hive/src/test/resources/sqlgen/self_join_with_group_by.sql
new file mode 100644
index 0000000..6db053f
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/self_join_with_group_by.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT x.key, COUNT(*) FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key group by x.key
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `count(1)` FROM (SELECT `gen_attr`, count(1) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 INNER JOIN (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_1 ON (`gen_attr` = `gen_attr`) GROUP BY `gen_attr`) AS x

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/sort_by_after_having.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/sort_by_after_having.sql b/sql/hive/src/test/resources/sqlgen/sort_by_after_having.sql
new file mode 100644
index 0000000..e9a6afd
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/sort_by_after_having.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT COUNT(value) FROM parquet_t1 GROUP BY key HAVING MAX(key) > 0 SORT BY key
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `count(value)` FROM (SELECT `gen_attr` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT count(`gen_attr`) AS `gen_attr`, max(`gen_attr`) AS `gen_attr`, `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 GROUP BY `gen_attr` HAVING (`gen_attr` > CAST(0 AS BIGINT))) AS gen_subquery_1 SORT BY `gen_attr` ASC) AS gen_subquery_2) AS gen_subquery_3

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_1.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_1.sql b/sql/hive/src/test/resources/sqlgen/tablesample_1.sql
new file mode 100644
index 0000000..54c8dee
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_1.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT s.id FROM parquet_t0 TABLESAMPLE(100 PERCENT) s
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0` TABLESAMPLE(100.0 PERCENT)) AS gen_subquery_0) AS s

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_2.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_2.sql b/sql/hive/src/test/resources/sqlgen/tablesample_2.sql
new file mode 100644
index 0000000..13dcadb
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_2.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT * FROM parquet_t0 TABLESAMPLE(100 PERCENT)
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0` TABLESAMPLE(100.0 PERCENT)) AS gen_subquery_0) AS parquet_t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_3.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_3.sql b/sql/hive/src/test/resources/sqlgen/tablesample_3.sql
new file mode 100644
index 0000000..c1ed57f
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_3.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT s.id FROM t0 TABLESAMPLE(100 PERCENT) s
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`t0` TABLESAMPLE(100.0 PERCENT)) AS gen_subquery_0) AS s

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_4.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_4.sql b/sql/hive/src/test/resources/sqlgen/tablesample_4.sql
new file mode 100644
index 0000000..c22b208
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_4.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT * FROM t0 TABLESAMPLE(100 PERCENT)
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`t0` TABLESAMPLE(100.0 PERCENT)) AS gen_subquery_0) AS t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_5.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_5.sql b/sql/hive/src/test/resources/sqlgen/tablesample_5.sql
new file mode 100644
index 0000000..fa69dd4
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_5.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT s.id FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) s WHERE 1=0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0` TABLESAMPLE(0.1 PERCENT)) AS gen_subquery_0 WHERE (1 = 0)) AS s

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/tablesample_6.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/tablesample_6.sql b/sql/hive/src/test/resources/sqlgen/tablesample_6.sql
new file mode 100644
index 0000000..bc72560
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/tablesample_6.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT * FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) WHERE 1=0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0` TABLESAMPLE(0.1 PERCENT)) AS gen_subquery_0 WHERE (1 = 0)) AS parquet_t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/three_child_union.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/three_child_union.sql b/sql/hive/src/test/resources/sqlgen/three_child_union.sql
new file mode 100644
index 0000000..c378111
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/three_child_union.sql
@@ -0,0 +1,6 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT id FROM parquet_t0
+UNION ALL SELECT id FROM parquet_t0
+UNION ALL SELECT id FROM parquet_t0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM ((SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_0) UNION ALL (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_1) UNION ALL (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_2)) AS parquet_t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/type_widening.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/type_widening.sql b/sql/hive/src/test/resources/sqlgen/type_widening.sql
new file mode 100644
index 0000000..96e6cc2
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/type_widening.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT id FROM parquet_t0 UNION ALL SELECT CAST(id AS INT) AS id FROM parquet_t0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM ((SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_0) UNION ALL (SELECT CAST(CAST(`gen_attr` AS INT) AS BIGINT) AS `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`parquet_t0`) AS gen_subquery_1)) AS parquet_t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/union_distinct.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/union_distinct.sql b/sql/hive/src/test/resources/sqlgen/union_distinct.sql
new file mode 100644
index 0000000..c5895ae
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/union_distinct.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT * FROM t0 UNION SELECT * FROM t0
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `id` FROM ((SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`t0`) AS gen_subquery_0) UNION DISTINCT (SELECT `gen_attr` FROM (SELECT `id` AS `gen_attr` FROM `default`.`t0`) AS gen_subquery_1)) AS t0

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_basic_1.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_basic_1.sql b/sql/hive/src/test/resources/sqlgen/window_basic_1.sql
new file mode 100644
index 0000000..73f343d
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_basic_1.sql
@@ -0,0 +1,4 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT MAX(value) OVER (PARTITION BY key % 3) FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `max(value) OVER (PARTITION BY (key % CAST(3 AS BIGINT))  ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)` FROM (SELECT `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr`  ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` % CAST(3 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS gen_subquery_3

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_basic_2.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_basic_2.sql b/sql/hive/src/test/resources/sqlgen/window_basic_2.sql
new file mode 100644
index 0000000..3ddb87f
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_basic_2.sql
@@ -0,0 +1,5 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT key, value, ROUND(AVG(key) OVER (), 2)
+FROM parquet_t1 ORDER BY key
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `round(avg(key) OVER (  ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), 2)` FROM (SELECT `gen_attr`, `gen_attr`, round(`gen_attr`, 2) AS `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, avg(`gen_attr`) OVER (  ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2 ORDER BY `gen_attr` ASC) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_basic_3.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_basic_3.sql b/sql/hive/src/test/resources/sqlgen/window_basic_3.sql
new file mode 100644
index 0000000..43f9928
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_basic_3.sql
@@ -0,0 +1,5 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT value, MAX(key + 1) OVER (PARTITION BY key % 5 ORDER BY key % 7) AS max
+FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `value`, `gen_attr` AS `max` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, (`gen_attr` + CAST(1 AS BIGINT)) AS `gen_attr`, (`gen_attr` % CAST(5 AS BIGINT)) AS `gen_attr`, (`gen_attr` % CAST(7 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_with_join.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_with_join.sql b/sql/hive/src/test/resources/sqlgen/window_with_join.sql
new file mode 100644
index 0000000..00c45c8
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_with_join.sql
@@ -0,0 +1,5 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT x.key, MAX(y.key) OVER (PARTITION BY x.key % 5 ORDER BY x.key)
+FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `max(key) OVER (PARTITION BY (key % CAST(5 AS BIGINT)) ORDER BY key ASC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_2.`gen_attr`, gen_subquery_2.`gen_attr`, gen_subquery_2.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr`, (`gen_attr` % CAST(5 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 INNER JOIN (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_1 ON (`gen_attr` = `gen_attr`)) AS gen_subquery_2) AS gen_subquery_3) AS x

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg.sql b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg.sql
new file mode 100644
index 0000000..65d3d3a
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg.sql
@@ -0,0 +1,7 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT key, value,
+DENSE_RANK() OVER (DISTRIBUTE BY key SORT BY key, value) AS dr,
+COUNT(key)
+FROM parquet_t1 GROUP BY key, value
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `dr`, `gen_attr` AS `count(key)` FROM (SELECT `gen_attr`, `gen_attr`, `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, DENSE_RANK() OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC, `gen_attr` ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr`, count(`gen_attr`) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 GROUP BY `gen_attr`, `gen_attr`) AS gen_subquery_1) AS gen_subquery_2) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_filter.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_filter.sql b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_filter.sql
new file mode 100644
index 0000000..03e0962
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_filter.sql
@@ -0,0 +1,7 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT key, value,
+DENSE_RANK() OVER (DISTRIBUTE BY key SORT BY key, value) AS dr,
+COUNT(key) OVER(DISTRIBUTE BY key SORT BY key, value) AS ca
+FROM parquet_t1
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `dr`, `gen_attr` AS `ca` FROM (SELECT `gen_attr`, `gen_attr`, `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, DENSE_RANK() OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC, `gen_attr` ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr`, count(`gen_attr`) OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC, `gen_attr` ASC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0) AS gen_subquery_1) AS gen_subquery_2) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_functions.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_functions.sql b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_functions.sql
new file mode 100644
index 0000000..090207f
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_functions.sql
@@ -0,0 +1,6 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT key, value,
+MAX(value) OVER (PARTITION BY key % 5 ORDER BY key) AS max
+FROM parquet_t1 GROUP BY key, value
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `max` FROM (SELECT `gen_attr`, `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` ASC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr`, (`gen_attr` % CAST(5 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 GROUP BY `gen_attr`, `gen_attr`) AS gen_subquery_1) AS gen_subquery_2) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_having.sql
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_having.sql b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_having.sql
new file mode 100644
index 0000000..fcc2cf7
--- /dev/null
+++ b/sql/hive/src/test/resources/sqlgen/window_with_the_same_window_with_agg_having.sql
@@ -0,0 +1,6 @@
+-- This file is automatically generated by LogicalPlanToSQLSuite.
+SELECT key, value,
+MAX(value) OVER (PARTITION BY key % 5 ORDER BY key DESC) AS max
+FROM parquet_t1 GROUP BY key, value HAVING key > 5
+--------------------------------------------------------------------------------
+SELECT `gen_attr` AS `key`, `gen_attr` AS `value`, `gen_attr` AS `max` FROM (SELECT `gen_attr`, `gen_attr`, `gen_attr` FROM (SELECT gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, gen_subquery_1.`gen_attr`, max(`gen_attr`) OVER (PARTITION BY `gen_attr` ORDER BY `gen_attr` DESC RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `gen_attr` FROM (SELECT `gen_attr`, `gen_attr`, (`gen_attr` % CAST(5 AS BIGINT)) AS `gen_attr` FROM (SELECT `key` AS `gen_attr`, `value` AS `gen_attr` FROM `default`.`parquet_t1`) AS gen_subquery_0 GROUP BY `gen_attr`, `gen_attr` HAVING (`gen_attr` > CAST(5 AS BIGINT))) AS gen_subquery_1) AS gen_subquery_2) AS parquet_t1

http://git-wip-us.apache.org/repos/asf/spark/blob/aac86080/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
index 0827b04..698c7c3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala
@@ -17,15 +17,33 @@
 
 package org.apache.spark.sql.catalyst
 
+import java.nio.charset.StandardCharsets
+import java.nio.file.{Files, NoSuchFileException, Paths}
+
 import scala.util.control.NonFatal
 
 import org.apache.spark.sql.Column
+import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.test.SQLTestUtils
 
+/**
+ * A test suite for LogicalPlan-to-SQL conversion.
+ *
+ * Each query has a golden generated SQL file in test/resources/sqlgen. The test suite also has
+ * built-in functionality to automatically generate these golden files.
+ *
+ * To re-generate golden files, run:
+ *    SPARK_GENERATE_GOLDEN_FILES=1 build/sbt "hive/test-only *LogicalPlanToSQLSuite"
+ */
 class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   import testImplicits._
 
+  // Used for generating new query answer files by saving
+  private val regenerateGoldenFiles =
+    Option(System.getenv("SPARK_GENERATE_GOLDEN_FILES")).contains("1")
+  private val goldenSQLPath = "src/test/resources/sqlgen/"
+
   protected override def beforeAll(): Unit = {
     super.beforeAll()
     sql("DROP TABLE IF EXISTS parquet_t0")
@@ -76,22 +94,60 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
     }
   }
 
-  private def checkHiveQl(hiveQl: String): Unit = {
-    val df = sql(hiveQl)
+  /**
+   * Compare the generated SQL with the expected answer string.
+   * Note that there exists a normalization for both arguments for the convenience.
+   * - Remove the id from the generated attributes, e.g., `gen_attr_1` -> `gen_attr`.
+   */
+  private def checkSQLStructure(originalSQL: String, convertedSQL: String, answerFile: String) = {
+    val normalizedGenSQL = convertedSQL.replaceAll("`gen_attr_\\d+`", "`gen_attr`")
+    if (answerFile != null) {
+      val separator = "-" * 80
+      if (regenerateGoldenFiles) {
+        val path = Paths.get(s"$goldenSQLPath/$answerFile.sql")
+        val header = "-- This file is automatically generated by LogicalPlanToSQLSuite."
+        val answerText = s"$header\n${originalSQL.trim()}\n${separator}\n$normalizedGenSQL\n"
+        Files.write(path, answerText.getBytes(StandardCharsets.UTF_8))
+      } else {
+        val goldenFileName = s"sqlgen/$answerFile.sql"
+        val resourceFile = getClass.getClassLoader.getResource(goldenFileName)
+        if (resourceFile == null) {
+          throw new NoSuchFileException(goldenFileName)
+        }
+        val path = resourceFile.getPath
+        val answerText = new String(Files.readAllBytes(Paths.get(path)), StandardCharsets.UTF_8)
+        val sqls = answerText.split(separator)
+        assert(sqls.length == 2, "Golden sql files should have a separator.")
+        val normalizedExpectSQL = sqls(1).trim()
+        assert(normalizedGenSQL == normalizedExpectSQL)
+      }
+    }
+  }
+
+  /**
+   * 1. Checks if SQL parsing succeeds.
+   * 2. Checks if SQL generation succeeds.
+   * 3. Checks the generated SQL against golden files.
+   * 4. Verifies the execution result stays the same.
+   */
+  private def checkSQL(sqlString: String, answerFile: String = null): Unit = {
+    val df = sql(sqlString)
 
     val convertedSQL = try new SQLBuilder(df).toSQL catch {
       case NonFatal(e) =>
         fail(
-          s"""Cannot convert the following HiveQL query plan back to SQL query string:
+          s"""Cannot convert the following SQL query plan back to SQL query string:
              |
-             |# Original HiveQL query string:
-             |$hiveQl
+             |# Original SQL query string:
+             |$sqlString
              |
              |# Resolved query plan:
              |${df.queryExecution.analyzed.treeString}
            """.stripMargin, e)
     }
 
+    checkSQLStructure(sqlString, convertedSQL, answerFile)
+
     try {
       checkAnswer(sql(convertedSQL), df)
     } catch { case cause: Throwable =>
@@ -101,8 +157,8 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
            |# Converted SQL query string:
            |$convertedSQL
            |
-           |# Original HiveQL query string:
-           |$hiveQl
+           |# Original SQL query string:
+           |$sqlString
            |
            |# Resolved query plan:
            |${df.queryExecution.analyzed.treeString}
@@ -110,24 +166,57 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
     }
   }
 
+  // When saving golden files, these tests should be ignored to prevent making files.
+  if (!regenerateGoldenFiles) {
+    test("Test should fail if the SQL query cannot be parsed") {
+      val m = intercept[ParseException] {
+        checkSQL("SELE", "NOT_A_FILE")
+      }.getMessage
+      assert(m.contains("mismatched input"))
+    }
+
+    test("Test should fail if the golden file cannot be found") {
+      val m2 = intercept[NoSuchFileException] {
+        checkSQL("SELECT 1", "NOT_A_FILE")
+      }.getMessage
+      assert(m2.contains("NOT_A_FILE"))
+    }
+
+    test("Test should fail if the SQL query cannot be regenerated") {
+      spark.range(10).createOrReplaceTempView("not_sql_gen_supported_table_so_far")
+      sql("select * from not_sql_gen_supported_table_so_far")
+      val m3 = intercept[org.scalatest.exceptions.TestFailedException] {
+        checkSQL("select * from not_sql_gen_supported_table_so_far", "in")
+      }.getMessage
+      assert(m3.contains("Cannot convert the following SQL query plan back to SQL query string"))
+    }
+
+    test("Test should fail if the SQL query did not equal to the golden SQL") {
+      val m4 = intercept[org.scalatest.exceptions.TestFailedException] {
+        checkSQL("SELECT 1", "in")
+      }.getMessage
+      assert(m4.contains("did not equal"))
+    }
+  }
+
   test("in") {
-    checkHiveQl("SELECT id FROM parquet_t0 WHERE id IN (1, 2, 3)")
+    checkSQL("SELECT id FROM parquet_t0 WHERE id IN (1, 2, 3)", "in")
   }
 
   test("not in") {
-    checkHiveQl("SELECT id FROM t0 WHERE id NOT IN (1, 2, 3)")
+    checkSQL("SELECT id FROM t0 WHERE id NOT IN (1, 2, 3)", "not_in")
   }
 
   test("not like") {
-    checkHiveQl("SELECT id FROM t0 WHERE id + 5 NOT LIKE '1%'")
+    checkSQL("SELECT id FROM t0 WHERE id + 5 NOT LIKE '1%'", "not_like")
   }
 
   test("aggregate function in having clause") {
-    checkHiveQl("SELECT COUNT(value) FROM parquet_t1 GROUP BY key HAVING MAX(key) > 0")
+    checkSQL("SELECT COUNT(value) FROM parquet_t1 GROUP BY key HAVING MAX(key) > 0", "agg1")
   }
 
   test("aggregate function in order by clause") {
-    checkHiveQl("SELECT COUNT(value) FROM parquet_t1 GROUP BY key ORDER BY MAX(key)")
+    checkSQL("SELECT COUNT(value) FROM parquet_t1 GROUP BY key ORDER BY MAX(key)", "agg2")
   }
 
   // When there are multiple aggregate functions in ORDER BY clause, all of them are extracted into
@@ -135,61 +224,67 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   // execution since these aliases have different expression ID.  But this introduces name collision
   // when converting resolved plans back to SQL query strings as expression IDs are stripped.
   test("aggregate function in order by clause with multiple order keys") {
-    checkHiveQl("SELECT COUNT(value) FROM parquet_t1 GROUP BY key ORDER BY key, MAX(key)")
+    checkSQL("SELECT COUNT(value) FROM parquet_t1 GROUP BY key ORDER BY key, MAX(key)", "agg3")
   }
 
   test("type widening in union") {
-    checkHiveQl("SELECT id FROM parquet_t0 UNION ALL SELECT CAST(id AS INT) AS id FROM parquet_t0")
+    checkSQL("SELECT id FROM parquet_t0 UNION ALL SELECT CAST(id AS INT) AS id FROM parquet_t0",
+      "type_widening")
   }
 
   test("union distinct") {
-    checkHiveQl("SELECT * FROM t0 UNION SELECT * FROM t0")
+    checkSQL("SELECT * FROM t0 UNION SELECT * FROM t0", "union_distinct")
   }
 
   test("three-child union") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT id FROM parquet_t0
         |UNION ALL SELECT id FROM parquet_t0
         |UNION ALL SELECT id FROM parquet_t0
-      """.stripMargin)
+      """.stripMargin,
+      "three_child_union")
   }
 
   test("intersect") {
-    checkHiveQl("SELECT * FROM t0 INTERSECT SELECT * FROM t0")
+    checkSQL("SELECT * FROM t0 INTERSECT SELECT * FROM t0", "intersect")
   }
 
   test("except") {
-    checkHiveQl("SELECT * FROM t0 EXCEPT SELECT * FROM t0")
+    checkSQL("SELECT * FROM t0 EXCEPT SELECT * FROM t0", "except")
   }
 
   test("self join") {
-    checkHiveQl("SELECT x.key FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key")
+    checkSQL("SELECT x.key FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key", "self_join")
   }
 
   test("self join with group by") {
-    checkHiveQl(
-      "SELECT x.key, COUNT(*) FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key group by x.key")
+    checkSQL(
+      "SELECT x.key, COUNT(*) FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key group by x.key",
+      "self_join_with_group_by")
   }
 
   test("case") {
-    checkHiveQl("SELECT CASE WHEN id % 2 > 0 THEN 0 WHEN id % 2 = 0 THEN 1 END FROM parquet_t0")
+    checkSQL("SELECT CASE WHEN id % 2 > 0 THEN 0 WHEN id % 2 = 0 THEN 1 END FROM parquet_t0",
+      "case")
   }
 
   test("case with else") {
-    checkHiveQl("SELECT CASE WHEN id % 2 > 0 THEN 0 ELSE 1 END FROM parquet_t0")
+    checkSQL("SELECT CASE WHEN id % 2 > 0 THEN 0 ELSE 1 END FROM parquet_t0", "case_with_else")
   }
 
   test("case with key") {
-    checkHiveQl("SELECT CASE id WHEN 0 THEN 'foo' WHEN 1 THEN 'bar' END FROM parquet_t0")
+    checkSQL("SELECT CASE id WHEN 0 THEN 'foo' WHEN 1 THEN 'bar' END FROM parquet_t0",
+      "case_with_key")
   }
 
   test("case with key and else") {
-    checkHiveQl("SELECT CASE id WHEN 0 THEN 'foo' WHEN 1 THEN 'bar' ELSE 'baz' END FROM parquet_t0")
+    checkSQL("SELECT CASE id WHEN 0 THEN 'foo' WHEN 1 THEN 'bar' ELSE 'baz' END FROM parquet_t0",
+      "case_with_key_and_else")
   }
 
   test("select distinct without aggregate functions") {
-    checkHiveQl("SELECT DISTINCT id FROM parquet_t0")
+    checkSQL("SELECT DISTINCT id FROM parquet_t0", "select_distinct")
   }
 
   test("rollup/cube #1") {
@@ -213,146 +308,195 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
     //   FROM `default`.`t1`
     //   GROUP BY (`t1`.`key` % CAST(5 AS BIGINT))
     //   GROUPING SETS (((`t1`.`key` % CAST(5 AS BIGINT))), ())
-    checkHiveQl(
-      "SELECT count(*) as cnt, key%5, grouping_id() FROM parquet_t1 GROUP BY key % 5 WITH ROLLUP")
-    checkHiveQl(
-      "SELECT count(*) as cnt, key%5, grouping_id() FROM parquet_t1 GROUP BY key % 5 WITH CUBE")
+    checkSQL(
+      "SELECT count(*) as cnt, key%5, grouping_id() FROM parquet_t1 GROUP BY key % 5 WITH ROLLUP",
+      "rollup_cube_1_1")
+
+    checkSQL(
+      "SELECT count(*) as cnt, key%5, grouping_id() FROM parquet_t1 GROUP BY key % 5 WITH CUBE",
+      "rollup_cube_1_2")
   }
 
   test("rollup/cube #2") {
-    checkHiveQl("SELECT key, value, count(value) FROM parquet_t1 GROUP BY key, value WITH ROLLUP")
-    checkHiveQl("SELECT key, value, count(value) FROM parquet_t1 GROUP BY key, value WITH CUBE")
+    checkSQL("SELECT key, value, count(value) FROM parquet_t1 GROUP BY key, value WITH ROLLUP",
+      "rollup_cube_2_1")
+
+    checkSQL("SELECT key, value, count(value) FROM parquet_t1 GROUP BY key, value WITH CUBE",
+      "rollup_cube_2_2")
   }
 
   test("rollup/cube #3") {
-    checkHiveQl(
-      "SELECT key, count(value), grouping_id() FROM parquet_t1 GROUP BY key, value WITH ROLLUP")
-    checkHiveQl(
-      "SELECT key, count(value), grouping_id() FROM parquet_t1 GROUP BY key, value WITH CUBE")
+    checkSQL(
+      "SELECT key, count(value), grouping_id() FROM parquet_t1 GROUP BY key, value WITH ROLLUP",
+      "rollup_cube_3_1")
+
+    checkSQL(
+      "SELECT key, count(value), grouping_id() FROM parquet_t1 GROUP BY key, value WITH CUBE",
+      "rollup_cube_3_2")
   }
 
   test("rollup/cube #4") {
-    checkHiveQl(
+    checkSQL(
       s"""
         |SELECT count(*) as cnt, key % 5 as k1, key - 5 as k2, grouping_id() FROM parquet_t1
         |GROUP BY key % 5, key - 5 WITH ROLLUP
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "rollup_cube_4_1")
+
+    checkSQL(
       s"""
         |SELECT count(*) as cnt, key % 5 as k1, key - 5 as k2, grouping_id() FROM parquet_t1
         |GROUP BY key % 5, key - 5 WITH CUBE
-      """.stripMargin)
+      """.stripMargin,
+      "rollup_cube_4_2")
   }
 
   test("rollup/cube #5") {
-    checkHiveQl(
+    checkSQL(
       s"""
         |SELECT count(*) AS cnt, key % 5 AS k1, key - 5 AS k2, grouping_id(key % 5, key - 5) AS k3
         |FROM (SELECT key, key%2, key - 5 FROM parquet_t1) t GROUP BY key%5, key-5
         |WITH ROLLUP
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "rollup_cube_5_1")
+
+    checkSQL(
       s"""
         |SELECT count(*) AS cnt, key % 5 AS k1, key - 5 AS k2, grouping_id(key % 5, key - 5) AS k3
         |FROM (SELECT key, key % 2, key - 5 FROM parquet_t1) t GROUP BY key % 5, key - 5
         |WITH CUBE
-      """.stripMargin)
+      """.stripMargin,
+      "rollup_cube_5_2")
   }
 
   test("rollup/cube #6") {
-    checkHiveQl("SELECT a, b, sum(c) FROM parquet_t2 GROUP BY ROLLUP(a, b) ORDER BY a, b")
-    checkHiveQl("SELECT a, b, sum(c) FROM parquet_t2 GROUP BY CUBE(a, b) ORDER BY a, b")
-    checkHiveQl("SELECT a, b, sum(a) FROM parquet_t2 GROUP BY ROLLUP(a, b) ORDER BY a, b")
-    checkHiveQl("SELECT a, b, sum(a) FROM parquet_t2 GROUP BY CUBE(a, b) ORDER BY a, b")
-    checkHiveQl("SELECT a + b, b, sum(a - b) FROM parquet_t2 GROUP BY a + b, b WITH ROLLUP")
-    checkHiveQl("SELECT a + b, b, sum(a - b) FROM parquet_t2 GROUP BY a + b, b WITH CUBE")
+    checkSQL("SELECT a, b, sum(c) FROM parquet_t2 GROUP BY ROLLUP(a, b) ORDER BY a, b",
+      "rollup_cube_6_1")
+
+    checkSQL("SELECT a, b, sum(c) FROM parquet_t2 GROUP BY CUBE(a, b) ORDER BY a, b",
+      "rollup_cube_6_2")
+
+    checkSQL("SELECT a, b, sum(a) FROM parquet_t2 GROUP BY ROLLUP(a, b) ORDER BY a, b",
+      "rollup_cube_6_3")
+
+    checkSQL("SELECT a, b, sum(a) FROM parquet_t2 GROUP BY CUBE(a, b) ORDER BY a, b",
+      "rollup_cube_6_4")
+
+    checkSQL("SELECT a + b, b, sum(a - b) FROM parquet_t2 GROUP BY a + b, b WITH ROLLUP",
+      "rollup_cube_6_5")
+
+    checkSQL("SELECT a + b, b, sum(a - b) FROM parquet_t2 GROUP BY a + b, b WITH CUBE",
+      "rollup_cube_6_6")
   }
 
   test("rollup/cube #7") {
-    checkHiveQl("SELECT a, b, grouping_id(a, b) FROM parquet_t2 GROUP BY cube(a, b)")
-    checkHiveQl("SELECT a, b, grouping(b) FROM parquet_t2 GROUP BY cube(a, b)")
-    checkHiveQl("SELECT a, b, grouping(a) FROM parquet_t2 GROUP BY cube(a, b)")
+    checkSQL("SELECT a, b, grouping_id(a, b) FROM parquet_t2 GROUP BY cube(a, b)",
+      "rollup_cube_7_1")
+
+    checkSQL("SELECT a, b, grouping(b) FROM parquet_t2 GROUP BY cube(a, b)",
+      "rollup_cube_7_2")
+
+    checkSQL("SELECT a, b, grouping(a) FROM parquet_t2 GROUP BY cube(a, b)",
+      "rollup_cube_7_3")
   }
 
   test("rollup/cube #8") {
     // grouping_id() is part of another expression
-    checkHiveQl(
+    checkSQL(
       s"""
          |SELECT hkey AS k1, value - 5 AS k2, hash(grouping_id()) AS hgid
          |FROM (SELECT hash(key) as hkey, key as value FROM parquet_t1) t GROUP BY hkey, value-5
          |WITH ROLLUP
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "rollup_cube_8_1")
+
+    checkSQL(
       s"""
          |SELECT hkey AS k1, value - 5 AS k2, hash(grouping_id()) AS hgid
          |FROM (SELECT hash(key) as hkey, key as value FROM parquet_t1) t GROUP BY hkey, value-5
          |WITH CUBE
-      """.stripMargin)
+      """.stripMargin,
+      "rollup_cube_8_2")
   }
 
   test("rollup/cube #9") {
     // self join is used as the child node of ROLLUP/CUBE with replaced quantifiers
-    checkHiveQl(
+    checkSQL(
       s"""
          |SELECT t.key - 5, cnt, SUM(cnt)
          |FROM (SELECT x.key, COUNT(*) as cnt
          |FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key GROUP BY x.key) t
          |GROUP BY cnt, t.key - 5
          |WITH ROLLUP
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "rollup_cube_9_1")
+
+    checkSQL(
       s"""
          |SELECT t.key - 5, cnt, SUM(cnt)
          |FROM (SELECT x.key, COUNT(*) as cnt
          |FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key GROUP BY x.key) t
          |GROUP BY cnt, t.key - 5
          |WITH CUBE
-      """.stripMargin)
+      """.stripMargin,
+      "rollup_cube_9_2")
   }
 
   test("grouping sets #1") {
-    checkHiveQl(
+    checkSQL(
       s"""
          |SELECT count(*) AS cnt, key % 5 AS k1, key - 5 AS k2, grouping_id() AS k3
          |FROM (SELECT key, key % 2, key - 5 FROM parquet_t1) t GROUP BY key % 5, key - 5
          |GROUPING SETS (key % 5, key - 5)
-      """.stripMargin)
+      """.stripMargin,
+      "grouping_sets_1")
   }
 
   test("grouping sets #2") {
-    checkHiveQl(
-      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (a, b) ORDER BY a, b")
-    checkHiveQl(
-      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (a) ORDER BY a, b")
-    checkHiveQl(
-      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (b) ORDER BY a, b")
-    checkHiveQl(
-      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (()) ORDER BY a, b")
-    checkHiveQl(
+    checkSQL(
+      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (a, b) ORDER BY a, b",
+      "grouping_sets_2_1")
+
+    checkSQL(
+      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (a) ORDER BY a, b",
+      "grouping_sets_2_2")
+
+    checkSQL(
+      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (b) ORDER BY a, b",
+      "grouping_sets_2_3")
+
+    checkSQL(
+      "SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b GROUPING SETS (()) ORDER BY a, b",
+      "grouping_sets_2_4")
+
+    checkSQL(
       s"""
          |SELECT a, b, sum(c) FROM parquet_t2 GROUP BY a, b
          |GROUPING SETS ((), (a), (a, b)) ORDER BY a, b
-      """.stripMargin)
+      """.stripMargin,
+      "grouping_sets_2_5")
   }
 
   test("cluster by") {
-    checkHiveQl("SELECT id FROM parquet_t0 CLUSTER BY id")
+    checkSQL("SELECT id FROM parquet_t0 CLUSTER BY id", "cluster_by")
   }
 
   test("distribute by") {
-    checkHiveQl("SELECT id FROM parquet_t0 DISTRIBUTE BY id")
+    checkSQL("SELECT id FROM parquet_t0 DISTRIBUTE BY id", "distribute_by")
   }
 
   test("distribute by with sort by") {
-    checkHiveQl("SELECT id FROM parquet_t0 DISTRIBUTE BY id SORT BY id")
+    checkSQL("SELECT id FROM parquet_t0 DISTRIBUTE BY id SORT BY id",
+      "distribute_by_with_sort_by")
   }
 
   test("SPARK-13720: sort by after having") {
-    checkHiveQl("SELECT COUNT(value) FROM parquet_t1 GROUP BY key HAVING MAX(key) > 0 SORT BY key")
+    checkSQL("SELECT COUNT(value) FROM parquet_t1 GROUP BY key HAVING MAX(key) > 0 SORT BY key",
+      "sort_by_after_having")
   }
 
   test("distinct aggregation") {
-    checkHiveQl("SELECT COUNT(DISTINCT id) FROM parquet_t0")
+    checkSQL("SELECT COUNT(DISTINCT id) FROM parquet_t0", "distinct_aggregation")
   }
 
   test("TABLESAMPLE") {
@@ -361,33 +505,34 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
     //    +- Subquery s
     //       +- Subquery parquet_t0
     //          +- Relation[id#2L] ParquetRelation
-    checkHiveQl("SELECT s.id FROM parquet_t0 TABLESAMPLE(100 PERCENT) s")
+    checkSQL("SELECT s.id FROM parquet_t0 TABLESAMPLE(100 PERCENT) s", "tablesample_1")
 
     // Project [id#2L]
     // +- Sample 0.0, 1.0, false, ...
     //    +- Subquery parquet_t0
     //       +- Relation[id#2L] ParquetRelation
-    checkHiveQl("SELECT * FROM parquet_t0 TABLESAMPLE(100 PERCENT)")
+    checkSQL("SELECT * FROM parquet_t0 TABLESAMPLE(100 PERCENT)", "tablesample_2")
 
     // Project [id#21L]
     // +- Sample 0.0, 1.0, false, ...
     //    +- MetastoreRelation default, t0, Some(s)
-    checkHiveQl("SELECT s.id FROM t0 TABLESAMPLE(100 PERCENT) s")
+    checkSQL("SELECT s.id FROM t0 TABLESAMPLE(100 PERCENT) s", "tablesample_3")
 
     // Project [id#24L]
     // +- Sample 0.0, 1.0, false, ...
     //    +- MetastoreRelation default, t0, None
-    checkHiveQl("SELECT * FROM t0 TABLESAMPLE(100 PERCENT)")
+    checkSQL("SELECT * FROM t0 TABLESAMPLE(100 PERCENT)", "tablesample_4")
 
     // When a sampling fraction is not 100%, the returned results are random.
     // Thus, added an always-false filter here to check if the generated plan can be successfully
     // executed.
-    checkHiveQl("SELECT s.id FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) s WHERE 1=0")
-    checkHiveQl("SELECT * FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) WHERE 1=0")
+    checkSQL("SELECT s.id FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) s WHERE 1=0", "tablesample_5")
+    checkSQL("SELECT * FROM parquet_t0 TABLESAMPLE(0.1 PERCENT) WHERE 1=0", "tablesample_6")
   }
 
   test("multi-distinct columns") {
-    checkHiveQl("SELECT a, COUNT(DISTINCT b), COUNT(DISTINCT c), SUM(d) FROM parquet_t2 GROUP BY a")
+    checkSQL("SELECT a, COUNT(DISTINCT b), COUNT(DISTINCT c), SUM(d) FROM parquet_t2 GROUP BY a",
+      "multi_distinct")
   }
 
   test("persisted data source relations") {
@@ -395,48 +540,54 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
       val tableName = s"${format}_parquet_t0"
       withTable(tableName) {
         spark.range(10).write.format(format).saveAsTable(tableName)
-        checkHiveQl(s"SELECT id FROM $tableName")
+        checkSQL(s"SELECT id FROM $tableName", s"data_source_$tableName")
       }
     }
   }
 
   test("script transformation - schemaless") {
-    checkHiveQl("SELECT TRANSFORM (a, b, c, d) USING 'cat' FROM parquet_t2")
-    checkHiveQl("SELECT TRANSFORM (*) USING 'cat' FROM parquet_t2")
+    checkSQL("SELECT TRANSFORM (a, b, c, d) USING 'cat' FROM parquet_t2",
+      "script_transformation_1")
+    checkSQL("SELECT TRANSFORM (*) USING 'cat' FROM parquet_t2",
+      "script_transformation_2")
   }
 
   test("script transformation - alias list") {
-    checkHiveQl("SELECT TRANSFORM (a, b, c, d) USING 'cat' AS (d1, d2, d3, d4) FROM parquet_t2")
+    checkSQL("SELECT TRANSFORM (a, b, c, d) USING 'cat' AS (d1, d2, d3, d4) FROM parquet_t2",
+      "script_transformation_alias_list")
   }
 
   test("script transformation - alias list with type") {
-    checkHiveQl(
+    checkSQL(
       """FROM
         |(FROM parquet_t1 SELECT TRANSFORM(key, value) USING 'cat' AS (thing1 int, thing2 string)) t
         |SELECT thing1 + 1
-      """.stripMargin)
+      """.stripMargin,
+      "script_transformation_alias_list_with_type")
   }
 
   test("script transformation - row format delimited clause with only one format property") {
-    checkHiveQl(
+    checkSQL(
       """SELECT TRANSFORM (key) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
         |USING 'cat' AS (tKey) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
         |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "script_transformation_row_format_one")
   }
 
   test("script transformation - row format delimited clause with multiple format properties") {
-    checkHiveQl(
+    checkSQL(
       """SELECT TRANSFORM (key)
         |ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\t'
         |USING 'cat' AS (tKey)
         |ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\t'
         |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "script_transformation_row_format_multiple")
   }
 
   test("script transformation - row format serde clauses with SERDEPROPERTIES") {
-    checkHiveQl(
+    checkSQL(
       """SELECT TRANSFORM (key, value)
         |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
         |WITH SERDEPROPERTIES('field.delim' = '|')
@@ -444,17 +595,19 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
         |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
         |WITH SERDEPROPERTIES('field.delim' = '|')
         |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "script_transformation_row_format_serde")
   }
 
   test("script transformation - row format serde clauses without SERDEPROPERTIES") {
-    checkHiveQl(
+    checkSQL(
       """SELECT TRANSFORM (key, value)
         |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
         |USING 'cat' AS (tKey, tValue)
         |ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
         |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "script_transformation_row_format_without_serde")
   }
 
   test("plans with non-SQL expressions") {
@@ -464,7 +617,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
 
   test("named expression in column names shouldn't be quoted") {
     def checkColumnNames(query: String, expectedColNames: String*): Unit = {
-      checkHiveQl(query)
+      checkSQL(query)
       assert(sql(query).columns === expectedColNames)
     }
 
@@ -521,21 +674,25 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   }
 
   test("window basic") {
-    checkHiveQl("SELECT MAX(value) OVER (PARTITION BY key % 3) FROM parquet_t1")
-    checkHiveQl(
+    checkSQL("SELECT MAX(value) OVER (PARTITION BY key % 3) FROM parquet_t1", "window_basic_1")
+
+    checkSQL(
       """
          |SELECT key, value, ROUND(AVG(key) OVER (), 2)
          |FROM parquet_t1 ORDER BY key
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "window_basic_2")
+
+    checkSQL(
       """
          |SELECT value, MAX(key + 1) OVER (PARTITION BY key % 5 ORDER BY key % 7) AS max
          |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "window_basic_3")
   }
 
   test("multiple window functions in one expression") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT
         |  MAX(key) OVER (ORDER BY key DESC, value) / MIN(key) OVER (PARTITION BY key % 3)
@@ -544,15 +701,17 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   }
 
   test("regular expressions and window functions in one expression") {
-    checkHiveQl("SELECT MAX(key) OVER (PARTITION BY key % 3) + key FROM parquet_t1")
+    checkSQL("SELECT MAX(key) OVER (PARTITION BY key % 3) + key FROM parquet_t1",
+      "regular_expressions_and_window")
   }
 
   test("aggregate functions and window functions in one expression") {
-    checkHiveQl("SELECT MAX(c) + COUNT(a) OVER () FROM parquet_t2 GROUP BY a, b")
+    checkSQL("SELECT MAX(c) + COUNT(a) OVER () FROM parquet_t2 GROUP BY a, b",
+      "aggregate_functions_and_window")
   }
 
   test("window with different window specification") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, value,
          |DENSE_RANK() OVER (ORDER BY key, value) AS dr,
@@ -562,45 +721,49 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   }
 
   test("window with the same window specification with aggregate + having") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, value,
          |MAX(value) OVER (PARTITION BY key % 5 ORDER BY key DESC) AS max
          |FROM parquet_t1 GROUP BY key, value HAVING key > 5
-      """.stripMargin)
+      """.stripMargin,
+      "window_with_the_same_window_with_agg_having")
   }
 
   test("window with the same window specification with aggregate functions") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, value,
          |MAX(value) OVER (PARTITION BY key % 5 ORDER BY key) AS max
          |FROM parquet_t1 GROUP BY key, value
-      """.stripMargin)
+      """.stripMargin,
+      "window_with_the_same_window_with_agg_functions")
   }
 
   test("window with the same window specification with aggregate") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, value,
          |DENSE_RANK() OVER (DISTRIBUTE BY key SORT BY key, value) AS dr,
          |COUNT(key)
          |FROM parquet_t1 GROUP BY key, value
-      """.stripMargin)
+      """.stripMargin,
+      "window_with_the_same_window_with_agg")
   }
 
   test("window with the same window specification without aggregate and filter") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, value,
          |DENSE_RANK() OVER (DISTRIBUTE BY key SORT BY key, value) AS dr,
          |COUNT(key) OVER(DISTRIBUTE BY key SORT BY key, value) AS ca
          |FROM parquet_t1
-      """.stripMargin)
+      """.stripMargin,
+      "window_with_the_same_window_with_agg_filter")
   }
 
   test("window clause") {
-    checkHiveQl(
+    checkSQL(
       """
          |SELECT key, MAX(value) OVER w1 AS MAX, MIN(value) OVER w2 AS min
          |FROM parquet_t1
@@ -609,7 +772,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   }
 
   test("special window functions") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT
         |  RANK() OVER w,
@@ -626,107 +789,120 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
   }
 
   test("window with join") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT x.key, MAX(y.key) OVER (PARTITION BY x.key % 5 ORDER BY x.key)
         |FROM parquet_t1 x JOIN parquet_t1 y ON x.key = y.key
-      """.stripMargin)
+      """.stripMargin,
+      "window_with_join")
   }
 
   test("join 2 tables and aggregate function in having clause") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT COUNT(a.value), b.KEY, a.KEY
         |FROM parquet_t1 a, parquet_t1 b
         |GROUP BY a.KEY, b.KEY
         |HAVING MAX(a.KEY) > 0
-      """.stripMargin)
+      """.stripMargin,
+      "join_2_tables")
   }
 
   test("generator in project list without FROM clause") {
-    checkHiveQl("SELECT EXPLODE(ARRAY(1,2,3))")
-    checkHiveQl("SELECT EXPLODE(ARRAY(1,2,3)) AS val")
+    checkSQL("SELECT EXPLODE(ARRAY(1,2,3))", "generator_without_from_1")
+    checkSQL("SELECT EXPLODE(ARRAY(1,2,3)) AS val", "generator_without_from_2")
   }
 
   test("generator in project list with non-referenced table") {
-    checkHiveQl("SELECT EXPLODE(ARRAY(1,2,3)) FROM t0")
-    checkHiveQl("SELECT EXPLODE(ARRAY(1,2,3)) AS val FROM t0")
+    checkSQL("SELECT EXPLODE(ARRAY(1,2,3)) FROM t0", "generator_non_referenced_table_1")
+    checkSQL("SELECT EXPLODE(ARRAY(1,2,3)) AS val FROM t0", "generator_non_referenced_table_2")
   }
 
   test("generator in project list with referenced table") {
-    checkHiveQl("SELECT EXPLODE(arr) FROM parquet_t3")
-    checkHiveQl("SELECT EXPLODE(arr) AS val FROM parquet_t3")
+    checkSQL("SELECT EXPLODE(arr) FROM parquet_t3", "generator_referenced_table_1")
+    checkSQL("SELECT EXPLODE(arr) AS val FROM parquet_t3", "generator_referenced_table_2")
   }
 
   test("generator in project list with non-UDTF expressions") {
-    checkHiveQl("SELECT EXPLODE(arr), id FROM parquet_t3")
-    checkHiveQl("SELECT EXPLODE(arr) AS val, id as a FROM parquet_t3")
+    checkSQL("SELECT EXPLODE(arr), id FROM parquet_t3", "generator_non_udtf_1")
+    checkSQL("SELECT EXPLODE(arr) AS val, id as a FROM parquet_t3", "generator_non_udtf_2")
   }
 
   test("generator in lateral view") {
-    checkHiveQl("SELECT val, id FROM parquet_t3 LATERAL VIEW EXPLODE(arr) exp AS val")
-    checkHiveQl("SELECT val, id FROM parquet_t3 LATERAL VIEW OUTER EXPLODE(arr) exp AS val")
+    checkSQL("SELECT val, id FROM parquet_t3 LATERAL VIEW EXPLODE(arr) exp AS val",
+      "generator_in_lateral_view_1")
+    checkSQL("SELECT val, id FROM parquet_t3 LATERAL VIEW OUTER EXPLODE(arr) exp AS val",
+      "generator_in_lateral_view_2")
   }
 
   test("generator in lateral view with ambiguous names") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT exp.id, parquet_t3.id
         |FROM parquet_t3
         |LATERAL VIEW EXPLODE(arr) exp AS id
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "generator_with_ambiguous_names_1")
+
+    checkSQL(
       """
         |SELECT exp.id, parquet_t3.id
         |FROM parquet_t3
         |LATERAL VIEW OUTER EXPLODE(arr) exp AS id
-      """.stripMargin)
+      """.stripMargin,
+      "generator_with_ambiguous_names_2")
   }
 
   test("use JSON_TUPLE as generator") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT c0, c1, c2
         |FROM parquet_t3
         |LATERAL VIEW JSON_TUPLE(json, 'f1', 'f2', 'f3') jt
-      """.stripMargin)
-    checkHiveQl(
+      """.stripMargin,
+      "json_tuple_generator_1")
+
+    checkSQL(
       """
         |SELECT a, b, c
         |FROM parquet_t3
         |LATERAL VIEW JSON_TUPLE(json, 'f1', 'f2', 'f3') jt AS a, b, c
-      """.stripMargin)
+      """.stripMargin,
+      "json_tuple_generator_2")
   }
 
   test("nested generator in lateral view") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT val, id
         |FROM parquet_t3
         |LATERAL VIEW EXPLODE(arr2) exp1 AS nested_array
         |LATERAL VIEW EXPLODE(nested_array) exp1 AS val
-      """.stripMargin)
+      """.stripMargin,
+      "nested_generator_in_lateral_view_1")
 
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT val, id
         |FROM parquet_t3
         |LATERAL VIEW EXPLODE(arr2) exp1 AS nested_array
         |LATERAL VIEW OUTER EXPLODE(nested_array) exp1 AS val
-      """.stripMargin)
+      """.stripMargin,
+      "nested_generator_in_lateral_view_2")
   }
 
   test("generate with other operators") {
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT EXPLODE(arr) AS val, id
         |FROM parquet_t3
         |WHERE id > 2
         |ORDER BY val, id
         |LIMIT 5
-      """.stripMargin)
+      """.stripMargin,
+      "generate_with_other_1")
 
-    checkHiveQl(
+    checkSQL(
       """
         |SELECT val, id
         |FROM parquet_t3
@@ -735,24 +911,26 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
         |WHERE val > 2
         |ORDER BY val, id
         |LIMIT 5
-      """.stripMargin)
+      """.stripMargin,
+      "generate_with_other_2")
   }
 
   test("filter after subquery") {
-    checkHiveQl("SELECT a FROM (SELECT key + 1 AS a FROM parquet_t1) t WHERE a > 5")
+    checkSQL("SELECT a FROM (SELECT key + 1 AS a FROM parquet_t1) t WHERE a > 5",
+      "filter_after_subquery")
   }
 
   test("SPARK-14933 - select parquet table") {
     withTable("parquet_t") {
       sql("create table parquet_t stored as parquet as select 1 as c1, 'abc' as c2")
-      checkHiveQl("select * from parquet_t")
+      checkSQL("select * from parquet_t", "select_parquet_table")
     }
   }
 
   test("SPARK-14933 - select orc table") {
     withTable("orc_t") {
       sql("create table orc_t stored as orc as select 1 as c1, 'abc' as c2")
-      checkHiveQl("select * from orc_t")
+      checkSQL("select * from orc_t", "select_orc_table")
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message