spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [5/8] spark git commit: [SPARK-14770][SQL] Remove unused queries in hive module test resources
Date Wed, 20 Apr 2016 23:30:31 GMT
http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_failure.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_failure.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_failure.q
deleted file mode 100644
index c2074f6..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_failure.q
+++ /dev/null
@@ -1,11 +0,0 @@
-create table mp (a string) partitioned by (b string, c string);
-
-alter table mp add partition (b='1', c='1');
-alter table mp add partition (b='1', c='2');
-alter table mp add partition (b='2', c='2');
-
-show partitions mp;
-
-set hive.exec.drop.ignorenonexistent=false;
--- Can't use DROP PARTITION if the partition doesn't exist and IF EXISTS isn't specified
-alter table mp drop partition (b='3');

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_filter_failure.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_filter_failure.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_filter_failure.q
deleted file mode 100644
index df476ed..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_partition_filter_failure.q
+++ /dev/null
@@ -1,8 +0,0 @@
-create table ptestfilter1 (a string, b int) partitioned by (c string, d string);
-
-alter table ptestfilter1 add partition (c='US', d=1);
-show partitions ptestfilter1;
-
-set hive.exec.drop.ignorenonexistent=false;
-alter table ptestfilter1 drop partition (c='US', d<1);
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure1.q
deleted file mode 100644
index d47c08b..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure1.q
+++ /dev/null
@@ -1,3 +0,0 @@
-set hive.exec.drop.ignorenonexistent=false;
--- Can't use DROP TABLE if the table doesn't exist and IF EXISTS isn't specified
-DROP TABLE UnknownTable;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure2.q
deleted file mode 100644
index 631e4ff..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure2.q
+++ /dev/null
@@ -1,3 +0,0 @@
-CREATE VIEW xxx6 AS SELECT key FROM src;
--- Can't use DROP TABLE on a view
-DROP TABLE xxx6;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure3.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure3.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure3.q
deleted file mode 100644
index 534ce0b..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_table_failure3.q
+++ /dev/null
@@ -1,12 +0,0 @@
-create database dtf3;
-use dtf3; 
-
-create table drop_table_failure_temp(col STRING) partitioned by (p STRING);
-
-alter table drop_table_failure_temp add partition (p ='p1');
-alter table drop_table_failure_temp add partition (p ='p2');
-alter table drop_table_failure_temp add partition (p ='p3');
-
-alter table drop_table_failure_temp partition (p ='p3') ENABLE NO_DROP;
-
-drop table drop_table_failure_temp;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure1.q
deleted file mode 100644
index 79cb4e4..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure1.q
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-CREATE TABLE xxx1(key int);
-
--- Can't use DROP VIEW on a base table
-DROP VIEW xxx1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure2.q
deleted file mode 100644
index 93bb162..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/drop_view_failure2.q
+++ /dev/null
@@ -1,3 +0,0 @@
-SET hive.exec.drop.ignorenonexistent=false;
--- Can't use DROP VIEW if the view doesn't exist and IF EXISTS isn't specified
-DROP VIEW UnknownView;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform.q
deleted file mode 100644
index b2e8567..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform.q
+++ /dev/null
@@ -1 +0,0 @@
-FROM src SELECT TRANSFORM (key, value) USING "awk -F'\001' '{print $0}'" AS (foo, foo);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform_schema.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform_schema.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform_schema.q
deleted file mode 100644
index dabbc35..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_alias_in_transform_schema.q
+++ /dev/null
@@ -1 +0,0 @@
-FROM src SELECT TRANSFORM (key, value) USING "awk -F'\001' '{print $0}'" AS (foo STRING, foo STRING);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert1.q
deleted file mode 100644
index fcbc7d5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert1.q
+++ /dev/null
@@ -1,7 +0,0 @@
-
-create table dest1_din1(key int, value string);
-
-from src
-insert overwrite table dest1_din1 select key, value
-insert overwrite table dest1_din1 select key, value;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert2.q
deleted file mode 100644
index 4f79a03..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert2.q
+++ /dev/null
@@ -1,6 +0,0 @@
-
-create table dest1_din2(key int, value string) partitioned by (ds string);
-
-from src
-insert overwrite table dest1_din2 partition (ds='1') select key, value
-insert overwrite table dest1_din2 partition (ds='1') select key, value;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert3.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert3.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert3.q
deleted file mode 100644
index 7b271a5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/duplicate_insert3.q
+++ /dev/null
@@ -1,4 +0,0 @@
-
-from src
-insert overwrite directory '${system:test.tmp.dir}/dest1' select key, value
-insert overwrite directory '${system:test.tmp.dir}/dest1' select key, value;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part1.q
deleted file mode 100644
index 9f0b6c7..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part1.q
+++ /dev/null
@@ -1,11 +0,0 @@
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nostrict;
-set hive.exec.max.dynamic.partitions=2;
-
-
-create table dynamic_partition (key string) partitioned by (value string);
-
-insert overwrite table dynamic_partition partition(hr) select key, value from src;
-
-
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part2.q
deleted file mode 100644
index 00a9278..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part2.q
+++ /dev/null
@@ -1,11 +0,0 @@
-
-create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string);
-
-set hive.exec.dynamic.partition=true;
-
-insert overwrite table nzhang_part1 partition(ds='11', hr) select key, value from srcpart where ds is not null;
-
-show partitions nzhang_part1;
-
-
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part3.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part3.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part3.q
deleted file mode 100644
index 7a8c58a..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part3.q
+++ /dev/null
@@ -1,9 +0,0 @@
-set hive.exec.max.dynamic.partitions=600;
-set hive.exec.max.dynamic.partitions.pernode=600;
-set hive.exec.dynamic.partition.mode=nonstrict;
-set hive.exec.dynamic.partition=true;
-set hive.exec.max.created.files=100;
-
-create table nzhang_part( key string) partitioned by (value string);
-
-insert overwrite table nzhang_part partition(value) select key, value from src;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part4.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part4.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part4.q
deleted file mode 100644
index 9aff7aa..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part4.q
+++ /dev/null
@@ -1,7 +0,0 @@
-create table nzhang_part4 (key string) partitioned by (ds string, hr string, value string);
-
-set hive.exec.dynamic.partition=true;
-
-insert overwrite table nzhang_part4 partition(value = 'aaa', ds='11', hr) select key, hr from srcpart where ds is not null;
-
-drop table nzhang_part4;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_empty.q.disabled
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_empty.q.disabled b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_empty.q.disabled
deleted file mode 100644
index a8fce59..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_empty.q.disabled
+++ /dev/null
@@ -1,24 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nonstrict;
-set hive.stats.autogether=false;
-set hive.error.on.empty.partition=true;
-
-create table dyn_err(key string, value string) partitioned by (ds string);
-
-insert overwrite table dyn_err partition(ds) select key, value, ds from srcpart where ds is not null and key = 'no exists';

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max.q
deleted file mode 100644
index 6a7a625..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max.q
+++ /dev/null
@@ -1,16 +0,0 @@
-USE default;
-
--- Test of hive.exec.max.dynamic.partitions
--- Set hive.exec.max.dynamic.partitions.pernode to a large value so it will be ignored
-
-CREATE TABLE max_parts(key STRING) PARTITIONED BY (value STRING);
-
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nonstrict;
-set hive.exec.max.dynamic.partitions=10;
-set hive.exec.max.dynamic.partitions.pernode=1000;
-
-INSERT OVERWRITE TABLE max_parts PARTITION(value)
-SELECT key, value
-FROM src
-LIMIT 50;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max_per_node.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max_per_node.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max_per_node.q
deleted file mode 100644
index a411ec5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dyn_part_max_per_node.q
+++ /dev/null
@@ -1,15 +0,0 @@
-USE default;
-
--- Test of hive.exec.max.dynamic.partitions.pernode
-
-CREATE TABLE max_parts(key STRING) PARTITIONED BY (value STRING);
-
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nonstrict;
-set hive.exec.max.dynamic.partitions=1000;
-set hive.exec.max.dynamic.partitions.pernode=10;
-
-INSERT OVERWRITE TABLE max_parts PARTITION(value)
-SELECT key, value
-FROM src
-LIMIT 50;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
deleted file mode 100644
index 0ad99d1..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
+++ /dev/null
@@ -1,17 +0,0 @@
-SET hive.metastore.partition.name.whitelist.pattern=[^9]*;
-set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifyTableDirectoryIsEmptyHook;
-
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nonstrict;
-
-create table source_table like srcpart;
-
-create table dest_table like srcpart;
-
-load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11);
-
--- Tests creating dynamic partitions with characters not in the whitelist (i.e. 9)
--- If the directory is not empty the hook will throw an error, instead the error should come from the metastore
--- This shows that no dynamic partitions were created and left behind or had directories created
-
-insert overwrite table dest_table partition (ds, hr) select key, hr, ds, value from source_table where ds='2008-04-08' order by value asc;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_incomplete_partition.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_incomplete_partition.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_incomplete_partition.q
deleted file mode 100644
index ca60d04..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_incomplete_partition.q
+++ /dev/null
@@ -1,12 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
-ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05', hr='h1');
-ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05', hr='h2');
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
--- for exchange_part_test1 the value of ds is not given and the value of hr is given, thus this query will fail
-alter table exchange_part_test1 exchange partition (hr='h1') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists.q
deleted file mode 100644
index 7083edc..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists.q
+++ /dev/null
@@ -1,12 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING);
-CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING);
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05');
-ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05');
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
--- exchange_part_test1 table partition (ds='2013-04-05') already exists thus this query will fail
-alter table exchange_part_test1 exchange partition (ds='2013-04-05') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists2.q
deleted file mode 100644
index 6dfe81a..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists2.q
+++ /dev/null
@@ -1,13 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05', hr='1');
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05', hr='2');
-ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05', hr='3');
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
--- exchange_part_test1 table partition (ds='2013-04-05') already exists thus this query will fail
-alter table exchange_part_test1 exchange partition (ds='2013-04-05') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists3.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists3.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists3.q
deleted file mode 100644
index 60671e5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_exists3.q
+++ /dev/null
@@ -1,13 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING, hr STRING);
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05', hr='1');
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05', hr='2');
-ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05', hr='1');
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
--- exchange_part_test2 table partition (ds='2013-04-05') already exists thus this query will fail
-alter table exchange_part_test1 exchange partition (ds='2013-04-05') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_missing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_missing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_missing.q
deleted file mode 100644
index 38c0eda..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_partition_missing.q
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING);
-CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING);
-SHOW PARTITIONS exchange_part_test1;
-
--- exchange_part_test2 partition (ds='2013-04-05') does not exist thus this query will fail
-alter table exchange_part_test1 exchange partition (ds='2013-04-05') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing.q
deleted file mode 100644
index 7b926a3..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing.q
+++ /dev/null
@@ -1,2 +0,0 @@
--- t1 does not exist and the query fails
-alter table t1 exchange partition (ds='2013-04-05') with table t2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing2.q
deleted file mode 100644
index 48fcd74..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_table_missing2.q
+++ /dev/null
@@ -1,8 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING);
-SHOW PARTITIONS exchange_part_test1;
-
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05');
-SHOW PARTITIONS exchange_part_test1;
-
--- exchange_part_test2 table does not exist thus this query will fail
-alter table exchange_part_test1 exchange partition (ds='2013-04-05') with table exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_test.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_test.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_test.q
deleted file mode 100644
index 23e86e9..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exchange_partition_neg_test.q
+++ /dev/null
@@ -1,11 +0,0 @@
-CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING);
-CREATE TABLE exchange_part_test2 (f1 string, f2 string) PARTITIONED BY (ds STRING);
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
-ALTER TABLE exchange_part_test1 ADD PARTITION (ds='2013-04-05');
-SHOW PARTITIONS exchange_part_test1;
-SHOW PARTITIONS exchange_part_test2;
-
--- exchange_part_test1 and exchange_part_test2 do not have the same scheme and thus they fail
-ALTER TABLE exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE exchange_part_test2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
deleted file mode 100644
index 6ffc33a..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
+++ /dev/null
@@ -1,12 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id")
-	stored as textfile
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'nosuchschema://nosuchauthority/ql/test/data/exports/exim_department';
-drop table exim_department;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
deleted file mode 100644
index 970e646..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
+++ /dev/null
@@ -1,24 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id")
-	stored as textfile
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department identifier")
-	stored as textfile
-	tblproperties("maker"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
deleted file mode 100644
index 3589183..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
+++ /dev/null
@@ -1,38 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="ka");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="ka");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-export table exim_employee to 'ql/test/data/exports/exim_employee';
-drop table exim_employee;
-
-create database importer;
-use importer;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "table of employees"
-	partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
-	stored as textfile	
-	tblproperties("maker"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="ka");			
-import from 'ql/test/data/exports/exim_employee';
-describe extended exim_employee;
-select * from exim_employee;
-drop table exim_employee;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
deleted file mode 100644
index 45268c2..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_key int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
deleted file mode 100644
index cad6c90..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id", dep_name string) 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
deleted file mode 100644
index f5f904f..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id bigint comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
deleted file mode 100644
index c56329c..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as rcfile	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
deleted file mode 100644
index afaedcd..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
+++ /dev/null
@@ -1,26 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
-		outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat" 
-		inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" 
-		outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" 	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
deleted file mode 100644
index 230b28c..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
+++ /dev/null
@@ -1,24 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
-	stored as textfile
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
deleted file mode 100644
index c2e00a9..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
+++ /dev/null
@@ -1,28 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
-		with serdeproperties ("serialization.format"="0")
-	stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" 
-		outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" 
-		inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" 
-		outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
deleted file mode 100644
index a6586ea..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
+++ /dev/null
@@ -1,24 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	clustered by (dep_id) into 10 buckets
-	stored as textfile
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
deleted file mode 100644
index 990a686..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
+++ /dev/null
@@ -1,25 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;	
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
-	stored as textfile
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
deleted file mode 100644
index 289bcf0..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
+++ /dev/null
@@ -1,9 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
-	stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"	
-	tblproperties("creator"="krishna");
-export table exim_department to 'ql/test/data/exports/exim_department';	
-drop table exim_department;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
deleted file mode 100644
index 02537ef..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
+++ /dev/null
@@ -1,24 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"	
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;
-	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
deleted file mode 100644
index 897c674..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
+++ /dev/null
@@ -1,25 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	partitioned by (dep_org string)
-	stored as textfile		
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;
-	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
deleted file mode 100644
index 12013e5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
+++ /dev/null
@@ -1,25 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	partitioned by (dep_org string)
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile		
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;
-	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
deleted file mode 100644
index d8d2b80..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
+++ /dev/null
@@ -1,26 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	partitioned by (dep_org string)
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int comment "department id") 	
-	partitioned by (dep_mgr string)
-	stored as textfile		
-	tblproperties("creator"="krishna");
-import from 'ql/test/data/exports/exim_department';
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-
-drop database importer;
-	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
deleted file mode 100644
index 82dcce9..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
+++ /dev/null
@@ -1,30 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="ka");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="ka");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-export table exim_employee to 'ql/test/data/exports/exim_employee';
-drop table exim_employee;
-
-create database importer;
-use importer;
-import table exim_employee partition (emp_country="us") from 'ql/test/data/exports/exim_employee';
-describe extended exim_employee;
-select * from exim_employee;
-drop table exim_employee;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
deleted file mode 100644
index d92efeb..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
+++ /dev/null
@@ -1,30 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="ka");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="ka");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-export table exim_employee to 'ql/test/data/exports/exim_employee';
-drop table exim_employee;
-
-create database importer;
-use importer;
-import table exim_employee partition (emp_country="us", emp_state="kl") from 'ql/test/data/exports/exim_employee';
-describe extended exim_employee;
-select * from exim_employee;
-drop table exim_employee;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
deleted file mode 100644
index 12d827b..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create  table exim_department ( dep_id int comment "department id") 	
-	stored as textfile
-	tblproperties("creator"="krishna");
-import external table exim_department from 'ql/test/data/exports/exim_department';
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-drop table exim_department;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
deleted file mode 100644
index 726dee5..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
+++ /dev/null
@@ -1,30 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" into table exim_department;		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
-
-create table exim_department ( dep_id int comment "department id") 	
-	stored as textfile
-	location 'ql/test/data/tablestore/exim_department'
-	tblproperties("creator"="krishna");
-import table exim_department from 'ql/test/data/exports/exim_department'
-	location 'ql/test/data/tablestore2/exim_department';
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-drop table exim_department;
-dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
-
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
deleted file mode 100644
index d187c78..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
+++ /dev/null
@@ -1,35 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="ka");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="tn");	
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="us", emp_state="ka");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-export table exim_employee to 'ql/test/data/exports/exim_employee';
-drop table exim_employee;
-
-create database importer;
-use importer;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-import external table exim_employee partition (emp_country="us", emp_state="tn") 
-	from 'ql/test/data/exports/exim_employee';
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-drop table exim_employee;
-
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_22_export_authfail.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
deleted file mode 100644
index b818686..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
+++ /dev/null
@@ -1,14 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int) stored as textfile;
-
-set hive.security.authorization.enabled=true;
-
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-
-set hive.security.authorization.enabled=false;
-drop table exim_department;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
deleted file mode 100644
index 4acefb9..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
+++ /dev/null
@@ -1,22 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-
-create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-create table exim_department ( dep_id int) stored as textfile;
-set hive.security.authorization.enabled=true;
-import from 'ql/test/data/exports/exim_department';
-
-set hive.security.authorization.enabled=false;
-drop table exim_department;
-drop database importer;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
deleted file mode 100644
index 467014e..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
+++ /dev/null
@@ -1,31 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-set hive.test.mode.nosamplelist=exim_department,exim_employee;
-
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-load data local inpath "../../data/files/test.dat" 
-	into table exim_employee partition (emp_country="in", emp_state="tn");		
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-export table exim_employee to 'ql/test/data/exports/exim_employee';
-drop table exim_employee;
-
-create database importer;
-use importer;
-create table exim_employee ( emp_id int comment "employee id") 	
-	comment "employee table"
-	partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
-	stored as textfile	
-	tblproperties("creator"="krishna");
-
-set hive.security.authorization.enabled=true;
-import from 'ql/test/data/exports/exim_employee';
-set hive.security.authorization.enabled=false;
-
-dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
-drop table exim_employee;
-drop database importer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
deleted file mode 100644
index 595fa7e..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
+++ /dev/null
@@ -1,23 +0,0 @@
-set hive.test.mode=true;
-set hive.test.mode.prefix=;
-set hive.test.mode.nosamplelist=exim_department,exim_employee;
-
-create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-export table exim_department to 'ql/test/data/exports/exim_department';
-drop table exim_department;
-
-create database importer;
-use importer;
-
-set hive.security.authorization.enabled=true;
-import from 'ql/test/data/exports/exim_department';
-
-set hive.security.authorization.enabled=false;
-select * from exim_department;
-drop table exim_department;
-drop database importer;
-dfs -rmr target/tmp/ql/test/data/exports/exim_department;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external1.q
deleted file mode 100644
index d56c955..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external1.q
+++ /dev/null
@@ -1,3 +0,0 @@
-
-create external table external1(a int, b int) location 'invalidscheme://data.s3ndemo.hive/kv';
-describe external1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external2.q
deleted file mode 100644
index 0df85a0..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/external2.q
+++ /dev/null
@@ -1,4 +0,0 @@
-
-create external table external2(a int, b int) partitioned by (ds string);
-alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01';
-describe external2 partition (ds='2008-01-01');

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fetchtask_ioexception.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
deleted file mode 100644
index 82230f7..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
+++ /dev/null
@@ -1,7 +0,0 @@
-CREATE TABLE fetchtask_ioexception (
-  KEY STRING,
-  VALUE STRING) STORED AS SEQUENCEFILE;
-
-LOAD DATA LOCAL INPATH '../../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception;
-
-SELECT * FROM fetchtask_ioexception;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/file_with_header_footer_negative.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/file_with_header_footer_negative.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/file_with_header_footer_negative.q
deleted file mode 100644
index 286cf1a..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/file_with_header_footer_negative.q
+++ /dev/null
@@ -1,13 +0,0 @@
-dfs ${system:test.dfs.mkdir} hdfs:///tmp/test_file_with_header_footer_negative/;
-
-dfs -copyFromLocal ../data/files/header_footer_table_1 hdfs:///tmp/test_file_with_header_footer_negative/header_footer_table_1;
-
-dfs -copyFromLocal ../data/files/header_footer_table_2 hdfs:///tmp/test_file_with_header_footer_negative/header_footer_table_2;
-
-CREATE EXTERNAL TABLE header_footer_table_1 (name string, message string, id int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LOCATION 'hdfs:///tmp/test_file_with_header_footer_negative/header_footer_table_1' tblproperties ("skip.header.line.count"="1", "skip.footer.line.count"="200");
-
-SELECT * FROM header_footer_table_1;
-
-DROP TABLE header_footer_table_1;
-
-dfs -rmr hdfs:///tmp/test_file_with_header_footer_negative;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_bad_class.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_bad_class.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_bad_class.q
deleted file mode 100644
index 33dd4fa..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_bad_class.q
+++ /dev/null
@@ -1,3 +0,0 @@
-CREATE TABLE dest1(key INT, value STRING) STORED AS
-  INPUTFORMAT 'ClassDoesNotExist'
-  OUTPUTFORMAT 'java.lang.Void';

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_input.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_input.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_input.q
deleted file mode 100644
index c514562..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_input.q
+++ /dev/null
@@ -1,8 +0,0 @@
-CREATE TABLE dest1(key INT, value STRING) STORED AS
-  INPUTFORMAT 'java.lang.Void'
-  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10;
-
-SELECT dest1.* FROM dest1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_output.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_output.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_output.q
deleted file mode 100644
index a9cef1e..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fileformat_void_output.q
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE dest1(key INT, value STRING) STORED AS
-  INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-  OUTPUTFORMAT 'java.lang.Void';
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name1.q
deleted file mode 100644
index f50369b..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name1.q
+++ /dev/null
@@ -1,2 +0,0 @@
-set fs.default.name='http://www.example.com;
-show tables;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name2.q
deleted file mode 100644
index 485c3db..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/fs_default_name2.q
+++ /dev/null
@@ -1,2 +0,0 @@
-set fs.default.name='http://www.example.com;
-SELECT * FROM src;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/genericFileFormat.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/genericFileFormat.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/genericFileFormat.q
deleted file mode 100644
index bd633b9..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/genericFileFormat.q
+++ /dev/null
@@ -1 +0,0 @@
-create table testFail (a int) stored as foo;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_map_skew_multi_distinct.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_map_skew_multi_distinct.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_map_skew_multi_distinct.q
deleted file mode 100644
index cecd9c6..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_map_skew_multi_distinct.q
+++ /dev/null
@@ -1,14 +0,0 @@
-set hive.map.aggr=true;
-set hive.groupby.skewindata=true;
-set mapred.reduce.tasks=31;
-
-CREATE TABLE dest1(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-
-EXPLAIN
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-
-SELECT dest1.* FROM dest1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_multi_distinct.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_multi_distinct.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_multi_distinct.q
deleted file mode 100644
index e3b0066..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby2_multi_distinct.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.map.aggr=false;
-set hive.groupby.skewindata=true;
-
-CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-
-EXPLAIN
-FROM src
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-
-FROM src
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-
-SELECT dest_g2.* FROM dest_g2;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_map_skew_multi_distinct.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_map_skew_multi_distinct.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_map_skew_multi_distinct.q
deleted file mode 100644
index 168aeb1..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_map_skew_multi_distinct.q
+++ /dev/null
@@ -1,36 +0,0 @@
-set hive.map.aggr=true;
-set hive.groupby.skewindata=true;
-set mapred.reduce.tasks=31;
-
-CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE, c10 DOUBLE, c11 DOUBLE) STORED AS TEXTFILE;
-
-EXPLAIN
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT
-  sum(substr(src.value,5)),
-  avg(substr(src.value,5)),
-  avg(DISTINCT substr(src.value,5)),
-  max(substr(src.value,5)),
-  min(substr(src.value,5)),
-  std(substr(src.value,5)),
-  stddev_samp(substr(src.value,5)),
-  variance(substr(src.value,5)),
-  var_samp(substr(src.value,5)),
-  sum(DISTINCT substr(src.value, 5)),
-  count(DISTINCT substr(src.value, 5));
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT
-  sum(substr(src.value,5)),
-  avg(substr(src.value,5)),
-  avg(DISTINCT substr(src.value,5)),
-  max(substr(src.value,5)),
-  min(substr(src.value,5)),
-  std(substr(src.value,5)),
-  stddev_samp(substr(src.value,5)),
-  variance(substr(src.value,5)),
-  var_samp(substr(src.value,5)),
-  sum(DISTINCT substr(src.value, 5)),
-  count(DISTINCT substr(src.value, 5));
-
-SELECT dest1.* FROM dest1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_multi_distinct.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_multi_distinct.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_multi_distinct.q
deleted file mode 100644
index 1a28477..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby3_multi_distinct.q
+++ /dev/null
@@ -1,36 +0,0 @@
-set hive.map.aggr=false;
-set hive.groupby.skewindata=true;
-
-CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE, c10 DOUBLE, c11 DOUBLE) STORED AS TEXTFILE;
-
-EXPLAIN
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT 
-  sum(substr(src.value,5)), 
-  avg(substr(src.value,5)), 
-  avg(DISTINCT substr(src.value,5)), 
-  max(substr(src.value,5)),
-  min(substr(src.value,5)), 
-  std(substr(src.value,5)),
-  stddev_samp(substr(src.value,5)),
-  variance(substr(src.value,5)),
-  var_samp(substr(src.value,5)),
-  sum(DISTINCT substr(src.value, 5)),
-  count(DISTINCT substr(src.value, 5));
-  
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT 
-  sum(substr(src.value,5)), 
-  avg(substr(src.value,5)), 
-  avg(DISTINCT substr(src.value,5)), 
-  max(substr(src.value,5)), 
-  min(substr(src.value,5)), 
-  std(substr(src.value,5)),
-  stddev_samp(substr(src.value,5)),
-  variance(substr(src.value,5)),
-  var_samp(substr(src.value,5)),
-  sum(DISTINCT substr(src.value, 5)),
-  count(DISTINCT substr(src.value, 5));
-
-SELECT dest1.* FROM dest1;

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube1.q
deleted file mode 100644
index a0bc177..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube1.q
+++ /dev/null
@@ -1,4 +0,0 @@
-set hive.map.aggr=false;
-
-SELECT key, count(distinct value) FROM src GROUP BY key with cube;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube2.q
deleted file mode 100644
index f8ecb6a..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_cube2.q
+++ /dev/null
@@ -1,4 +0,0 @@
-set hive.map.aggr=true;
-
-SELECT key, value, count(distinct value) FROM src GROUP BY key, value with cube;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_id1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_id1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_id1.q
deleted file mode 100644
index ac5b6f7..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_id1.q
+++ /dev/null
@@ -1,4 +0,0 @@
-CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-
-SELECT GROUPING__ID FROM T1;
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets1.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets1.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets1.q
deleted file mode 100644
index ec6b16b..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets1.q
+++ /dev/null
@@ -1,5 +0,0 @@
-CREATE TABLE T1(a STRING, b STRING, c STRING);
-
--- Check for empty grouping set
-SELECT * FROM T1 GROUP BY a GROUPING SETS (());
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets2.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets2.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets2.q
deleted file mode 100644
index c988e04..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets2.q
+++ /dev/null
@@ -1,4 +0,0 @@
-CREATE TABLE T1(a STRING, b STRING, c STRING);
-
--- Check for mupltiple empty grouping sets
-SELECT * FROM T1 GROUP BY b GROUPING SETS ((), (), ());

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets3.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets3.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets3.q
deleted file mode 100644
index 3e73552..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets3.q
+++ /dev/null
@@ -1,4 +0,0 @@
-CREATE TABLE T1(a STRING, b STRING, c STRING); 
-
--- Grouping sets expression is not in GROUP BY clause
-SELECT a FROM T1 GROUP BY a GROUPING SETS (a, b);

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets4.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets4.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets4.q
deleted file mode 100644
index cf6352c..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets4.q
+++ /dev/null
@@ -1,4 +0,0 @@
-CREATE TABLE T1(a STRING, b STRING, c STRING); 
-
--- Expression 'a' is not in GROUP BY clause
-SELECT a FROM T1 GROUP BY b GROUPING SETS (b);

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets5.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets5.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets5.q
deleted file mode 100644
index 7df3318..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets5.q
+++ /dev/null
@@ -1,5 +0,0 @@
-CREATE TABLE T1(a STRING, b STRING, c STRING);
-
--- Alias in GROUPING SETS
-SELECT a as c, count(*) FROM T1 GROUP BY c GROUPING SETS (c);
-

http://git-wip-us.apache.org/repos/asf/spark/blob/b28fe448/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets6.q
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets6.q b/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets6.q
deleted file mode 100644
index 2783047..0000000
--- a/sql/hive/src/test/resources/ql/src/test/queries/clientnegative/groupby_grouping_sets6.q
+++ /dev/null
@@ -1,8 +0,0 @@
-set hive.new.job.grouping.set.cardinality=2;
-
-CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE; 
-
--- Since 4 grouping sets would be generated for the query below, an additional MR job should be created
--- This is not allowed with distincts.
-SELECT a, b, count(distinct c) from T1 group by a, b with cube;
-


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message