From commits-return-31391-archive-asf-public=cust-asf.ponee.io@hive.apache.org Mon Jan 22 09:01:11 2018 Return-Path: X-Original-To: archive-asf-public@eu.ponee.io Delivered-To: archive-asf-public@eu.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by mx-eu-01.ponee.io (Postfix) with ESMTP id AF52118079A for ; Mon, 22 Jan 2018 09:01:10 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 9FA51160C4B; Mon, 22 Jan 2018 08:01:10 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 1CE43160C51 for ; Mon, 22 Jan 2018 09:01:07 +0100 (CET) Received: (qmail 28291 invoked by uid 500); 22 Jan 2018 08:01:06 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 26988 invoked by uid 99); 22 Jan 2018 08:01:06 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 22 Jan 2018 08:01:06 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 06CD3F32CC; Mon, 22 Jan 2018 08:01:05 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit From: kgyrtkirk@apache.org To: commits@hive.apache.org Date: Mon, 22 Jan 2018 08:01:14 -0000 Message-Id: <2710df3052d64b3f9dc1ec4200812855@git.apache.org> In-Reply-To: <8966f93aeae14400bc13d97f00f8d78a@git.apache.org> References: <8966f93aeae14400bc13d97f00f8d78a@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [11/17] hive git commit: HIVE-18061: q.outs: be more selective with masking hdfs paths (Laszlo Bodor via Zoltan Haindrich) http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/llap/table_nonprintable.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/llap/table_nonprintable.q.out b/ql/src/test/results/clientpositive/llap/table_nonprintable.q.out index 3fa30f7..8221b8c 100644 --- a/ql/src/test/results/clientpositive/llap/table_nonprintable.q.out +++ b/ql/src/test/results/clientpositive/llap/table_nonprintable.q.out @@ -1,21 +1,22 @@ Found 1 items -#### A masked pattern was here #### +-rw-r--r-- 3 ### USER ### ### GROUP ### 16 ### HDFS DATE ### hdfs://### HDFS PATH ###¢Bar/in1.txt Found 1 items -#### A masked pattern was here #### +-rw-r--r-- 3 ### USER ### ### GROUP ### 16 ### HDFS DATE ### hdfs://### HDFS PATH ###Foo/in1.txt Found 2 items -#### A masked pattern was here #### +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ###Foo +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ###¢Bar PREHOOK: query: create external table table_external (c1 int, c2 int) partitioned by (day string) -#### A masked pattern was here #### +location 'hdfs://### HDFS PATH ###' PREHOOK: type: CREATETABLE -#### A masked pattern was here #### +PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:default PREHOOK: Output: default@table_external POSTHOOK: query: create external table table_external (c1 int, c2 int) partitioned by (day string) -#### A masked pattern was here #### +location 'hdfs://### HDFS PATH ###' POSTHOOK: type: CREATETABLE -#### A masked pattern was here #### +POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:default POSTHOOK: Output: default@table_external PREHOOK: query: msck repair table table_external @@ -28,7 +29,8 @@ Partitions not in metastore: table_external:day=¢Bar Repair: Cannot add partition table_external:day=Foo due to invalid characters in the name #### A masked pattern was here #### Found 2 items -#### A masked pattern was here #### +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ###Foo +drwxr-xr-x - ### USER ### ### GROUP ### 0 ### HDFS DATE ### hdfs://### HDFS PATH ###¢Bar PREHOOK: query: show partitions table_external PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@table_external @@ -40,12 +42,12 @@ PREHOOK: query: select * from table_external PREHOOK: type: QUERY PREHOOK: Input: default@table_external PREHOOK: Input: default@table_external@day=¢Bar -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from table_external POSTHOOK: type: QUERY POSTHOOK: Input: default@table_external POSTHOOK: Input: default@table_external@day=¢Bar -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### NULL 35 ¢Bar 48 NULL ¢Bar 100 100 ¢Bar http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/llap/temp_table_external.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/llap/temp_table_external.q.out b/ql/src/test/results/clientpositive/llap/temp_table_external.q.out index 54954b5..910d4f0 100644 --- a/ql/src/test/results/clientpositive/llap/temp_table_external.q.out +++ b/ql/src/test/results/clientpositive/llap/temp_table_external.q.out @@ -1,22 +1,23 @@ Found 1 items -#### A masked pattern was here #### +-rw-r--r-- 3 ### USER ### ### GROUP ### 16 ### HDFS DATE ### hdfs://### HDFS PATH ### +PREHOOK: query: create temporary external table temp_table_external (c1 int, c2 int) location 'hdfs://### HDFS PATH ###' PREHOOK: type: CREATETABLE -#### A masked pattern was here #### +PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:default PREHOOK: Output: default@temp_table_external -#### A masked pattern was here #### +POSTHOOK: query: create temporary external table temp_table_external (c1 int, c2 int) location 'hdfs://### HDFS PATH ###' POSTHOOK: type: CREATETABLE -#### A masked pattern was here #### +POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:default POSTHOOK: Output: default@temp_table_external PREHOOK: query: select * from temp_table_external PREHOOK: type: QUERY PREHOOK: Input: default@temp_table_external -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from temp_table_external POSTHOOK: type: QUERY POSTHOOK: Input: default@temp_table_external -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### NULL 35 48 NULL 100 100 @@ -29,4 +30,5 @@ POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@temp_table_external POSTHOOK: Output: default@temp_table_external Found 1 items +-rw-r--r-- 3 ### USER ### ### GROUP ### 16 ### HDFS DATE ### hdfs://### HDFS PATH ### #### A masked pattern was here #### http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition.q.out b/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition.q.out index f84c597..da93bb3 100644 --- a/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition.q.out +++ b/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition.q.out @@ -18,11 +18,11 @@ POSTHOOK: Lineage: dummy.i SCRIPT [] PREHOOK: query: select * from dummy PREHOOK: type: QUERY PREHOOK: Input: default@dummy -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from dummy POSTHOOK: type: QUERY POSTHOOK: Input: default@dummy -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 PREHOOK: query: create table partunion1(id1 int) partitioned by (part1 string) PREHOOK: type: CREATETABLE @@ -205,11 +205,11 @@ PREHOOK: query: select * from partunion1 PREHOOK: type: QUERY PREHOOK: Input: default@partunion1 PREHOOK: Input: default@partunion1@part1=2014 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from partunion1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partunion1 POSTHOOK: Input: default@partunion1@part1=2014 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 2014 2 2014 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition_2.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition_2.q.out b/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition_2.q.out index 5834028..f2c02a8 100644 --- a/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition_2.q.out +++ b/ql/src/test/results/clientpositive/llap/tez_union_dynamic_partition_2.q.out @@ -26,11 +26,11 @@ POSTHOOK: Lineage: dummy.i SCRIPT [] PREHOOK: query: select * from dummy PREHOOK: type: QUERY PREHOOK: Input: default@dummy -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from dummy POSTHOOK: type: QUERY POSTHOOK: Input: default@dummy -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 PREHOOK: query: create table partunion1(id1 int) partitioned by (part1 string) stored as orc PREHOOK: type: CREATETABLE @@ -181,7 +181,7 @@ STAGE PLANS: Move Operator files: hdfs directory: true -#### A masked pattern was here #### + destination: hdfs://### HDFS PATH ### Stage: Stage-2 Dependency Collection @@ -232,7 +232,7 @@ STAGE PLANS: Move Operator files: hdfs directory: true -#### A masked pattern was here #### + destination: hdfs://### HDFS PATH ### PREHOOK: query: insert into table partunion1 partition(part1) select 1 as id1, '2014' as part1 from dummy @@ -253,12 +253,12 @@ PREHOOK: query: select * from partunion1 PREHOOK: type: QUERY PREHOOK: Input: default@partunion1 PREHOOK: Input: default@partunion1@part1=2014 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from partunion1 POSTHOOK: type: QUERY POSTHOOK: Input: default@partunion1 POSTHOOK: Input: default@partunion1@part1=2014 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 2014 2 2014 PREHOOK: query: drop table dummy http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out index fe99c38..7f7e782 100644 --- a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out +++ b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out @@ -255,11 +255,11 @@ POSTHOOK: Lineage: tmptable.value EXPRESSION [(src)s1.null, (src)s2.null, (src)s PREHOOK: query: select * from tmptable x sort by x.key PREHOOK: type: QUERY PREHOOK: Input: default@tmptable -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from tmptable x sort by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### tst1 500 tst2 500 tst3 500 @@ -536,11 +536,11 @@ POSTHOOK: Lineage: tmptable12.value EXPRESSION [(src)s1.null, (src1)s2.null, (sr PREHOOK: query: select * from tmptable12 x sort by x.key PREHOOK: type: QUERY PREHOOK: Input: default@tmptable12 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from tmptable12 x sort by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@tmptable12 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### tst1 500 tst2 25 tst3 1000 @@ -632,12 +632,12 @@ PREHOOK: query: select unionsrc.key, unionsrc.value FROM (select s1.key as key, select s2.key as key, s2.value as value from src s2) unionsrc PREHOOK: type: QUERY PREHOOK: Input: default@src -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select unionsrc.key, unionsrc.value FROM (select s1.key as key, s1.value as value from src s1 UNION DISTINCT select s2.key as key, s2.value as value from src s2) unionsrc POSTHOOK: type: QUERY POSTHOOK: Input: default@src -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 103 val_103 105 val_105 @@ -1236,11 +1236,11 @@ POSTHOOK: Lineage: dest2.val2 EXPRESSION [(src)s1.null, (src)s2.FieldSchema(name PREHOOK: query: SELECT DEST1.* FROM DEST1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST1.* FROM DEST1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 10 1 100 1 @@ -1554,11 +1554,11 @@ tst1 1 PREHOOK: query: SELECT DEST2.* FROM DEST2 PREHOOK: type: QUERY PREHOOK: Input: default@dest2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST2.* FROM DEST2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 1 10 val_10 1 100 val_100 1 @@ -2127,11 +2127,11 @@ POSTHOOK: Lineage: dest218.val2 EXPRESSION [(src)s1.null, (src)s2.FieldSchema(na PREHOOK: query: SELECT DEST118.* FROM DEST118 SORT BY DEST118.key, DEST118.value PREHOOK: type: QUERY PREHOOK: Input: default@dest118 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST118.* FROM DEST118 SORT BY DEST118.key, DEST118.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest118 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 10 val_10 100 val_100 @@ -2445,11 +2445,11 @@ tst1 500 PREHOOK: query: SELECT DEST218.* FROM DEST218 SORT BY DEST218.key, DEST218.val1, DEST218.val2 PREHOOK: type: QUERY PREHOOK: Input: default@dest218 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST218.* FROM DEST218 SORT BY DEST218.key, DEST218.val1, DEST218.val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest218 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 val_0 10 val_10 val_10 100 val_100 val_100 @@ -3028,11 +3028,11 @@ POSTHOOK: Lineage: dest219.val2 EXPRESSION [(src)s1.null, (src)s2.FieldSchema(na PREHOOK: query: SELECT DEST119.* FROM DEST119 SORT BY DEST119.key, DEST119.value PREHOOK: type: QUERY PREHOOK: Input: default@dest119 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST119.* FROM DEST119 SORT BY DEST119.key, DEST119.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest119 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 10 1 100 1 @@ -3346,11 +3346,11 @@ tst1 1 PREHOOK: query: SELECT DEST219.* FROM DEST219 SORT BY DEST219.key, DEST219.val1, DEST219.val2 PREHOOK: type: QUERY PREHOOK: Input: default@dest219 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT DEST219.* FROM DEST219 SORT BY DEST219.key, DEST219.val1, DEST219.val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest219 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 val_0 10 val_10 val_10 100 val_100 val_100 @@ -3778,9 +3778,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [dst_union22_delta] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -3795,6 +3795,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22_delta numFiles 1 numRows 500 @@ -3817,6 +3818,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22_delta partition_columns ds partition_columns.types string @@ -3853,9 +3855,9 @@ STAGE PLANS: value expressions: _col1 (type: string), _col2 (type: string) auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [a] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -3870,6 +3872,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22 numFiles 1 numRows 500 @@ -3892,6 +3895,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22 partition_columns ds partition_columns.types string @@ -3928,9 +3932,9 @@ STAGE PLANS: value expressions: _col1 (type: string), _col2 (type: string) auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [dst_union22_delta] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -3945,6 +3949,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22_delta numFiles 1 numRows 500 @@ -3967,6 +3972,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22_delta partition_columns ds partition_columns.types string @@ -3991,11 +3997,11 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 1 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 1 Static Partition Specification: ds=2/ Statistics: Num rows: 332 Data size: 104486 Basic stats: COMPLETE Column stats: COMPLETE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -4006,6 +4012,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22 partition_columns ds partition_columns.types string @@ -4054,10 +4061,10 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 0 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 1 Statistics: Num rows: 1 Data size: 1845 Basic stats: COMPLETE Column stats: COMPLETE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4119,7 +4126,7 @@ STAGE PLANS: partition: ds 2 replace: true -#### A masked pattern was here #### + source: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -4130,6 +4137,7 @@ STAGE PLANS: columns.comments columns.types string:string:string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.dst_union22 partition_columns ds partition_columns.types string @@ -4143,7 +4151,7 @@ STAGE PLANS: Stage: Stage-3 Stats Work Basic Stats Work: -#### A masked pattern was here #### + Stats Aggregation Key Prefix: hdfs://### HDFS PATH ### Column Stats Desc: Columns: k1, k2, k3, k4 Column Types: string, string, string, string @@ -4192,12 +4200,12 @@ PREHOOK: query: select * from dst_union22 where ds = '2' PREHOOK: type: QUERY PREHOOK: Input: default@dst_union22 PREHOOK: Input: default@dst_union22@ds=2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from dst_union22 where ds = '2' POSTHOOK: type: QUERY POSTHOOK: Input: default@dst_union22 POSTHOOK: Input: default@dst_union22@ds=2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 103 val_103 103 val_103 2 113 val_113 113 val_113 2 120 val_120 120 val_120 2 @@ -4628,7 +4636,7 @@ from ( select key as key2, value as value2 from src) s PREHOOK: type: QUERY PREHOOK: Input: default@src -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select s.key2, s.value2 from ( select transform(key, value) using 'cat' as (key2, value2) @@ -4637,7 +4645,7 @@ from ( select key as key2, value as value2 from src) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 103 val_103 105 val_105 @@ -5063,9 +5071,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src2] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src2 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5078,6 +5086,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -5099,6 +5108,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -5139,9 +5149,9 @@ STAGE PLANS: value expressions: _col1 (type: bigint) auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src5] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src5 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5154,6 +5164,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -5175,6 +5186,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -5217,9 +5229,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src3] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src3 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5232,6 +5244,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -5253,6 +5266,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -5295,9 +5309,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src4] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src4 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5310,6 +5324,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -5331,6 +5346,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -5420,10 +5436,10 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 0 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 1 Statistics: Num rows: 77 Data size: 13919 Basic stats: COMPLETE Column stats: NONE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5466,7 +5482,7 @@ PREHOOK: Input: default@src2 PREHOOK: Input: default@src3 PREHOOK: Input: default@src4 PREHOOK: Input: default@src5 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select s.key, s.count from ( select key, count from src2 where key < 10 UNION DISTINCT @@ -5481,7 +5497,7 @@ POSTHOOK: Input: default@src2 POSTHOOK: Input: default@src3 POSTHOOK: Input: default@src4 POSTHOOK: Input: default@src5 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 0 3 2 1 @@ -5552,9 +5568,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src2] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src2 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5567,6 +5583,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -5588,6 +5605,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -5630,9 +5648,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src3] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src3 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5645,6 +5663,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -5666,6 +5685,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -5703,9 +5723,9 @@ STAGE PLANS: tag: 0 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [a] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src4 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5718,6 +5738,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -5739,6 +5760,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -5777,9 +5799,9 @@ STAGE PLANS: value expressions: _col1 (type: bigint) auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [b] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src5 input format: org.apache.hadoop.mapred.TextInputFormat @@ -5792,6 +5814,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -5813,6 +5836,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -5859,10 +5883,10 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 0 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 1 Statistics: Num rows: 108 Data size: 19391 Basic stats: COMPLETE Column stats: NONE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5930,7 +5954,7 @@ PREHOOK: Input: default@src2 PREHOOK: Input: default@src3 PREHOOK: Input: default@src4 PREHOOK: Input: default@src5 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select s.key, s.count from ( select key, count from src2 where key < 10 UNION DISTINCT @@ -5943,7 +5967,7 @@ POSTHOOK: Input: default@src2 POSTHOOK: Input: default@src3 POSTHOOK: Input: default@src4 POSTHOOK: Input: default@src5 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 3 2 1 4 1 @@ -6013,9 +6037,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src2] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src2 input format: org.apache.hadoop.mapred.TextInputFormat @@ -6028,6 +6052,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -6049,6 +6074,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src2 numFiles 1 numRows 309 @@ -6086,9 +6112,9 @@ STAGE PLANS: tag: 1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [b] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src5 input format: org.apache.hadoop.mapred.TextInputFormat @@ -6101,6 +6127,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -6122,6 +6149,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src5 numFiles 1 numRows 309 @@ -6164,9 +6192,9 @@ STAGE PLANS: tag: -1 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src3] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src3 input format: org.apache.hadoop.mapred.TextInputFormat @@ -6179,6 +6207,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -6200,6 +6229,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src3 numFiles 1 numRows 309 @@ -6237,9 +6267,9 @@ STAGE PLANS: tag: 0 auto parallelism: true Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [a] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src4 input format: org.apache.hadoop.mapred.TextInputFormat @@ -6252,6 +6282,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -6273,6 +6304,7 @@ STAGE PLANS: columns.comments columns.types string:bigint #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src4 numFiles 1 numRows 309 @@ -6319,10 +6351,10 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 0 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 1 Statistics: Num rows: 79 Data size: 14280 Basic stats: COMPLETE Column stats: NONE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6410,7 +6442,7 @@ PREHOOK: Input: default@src2 PREHOOK: Input: default@src3 PREHOOK: Input: default@src4 PREHOOK: Input: default@src5 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select s.key, s.count from ( select key, count from src2 where key < 10 UNION DISTINCT @@ -6423,7 +6455,7 @@ POSTHOOK: Input: default@src2 POSTHOOK: Input: default@src3 POSTHOOK: Input: default@src4 POSTHOOK: Input: default@src5 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 0 3 2 1 @@ -6648,7 +6680,7 @@ STAGE PLANS: Move Operator files: hdfs directory: true -#### A masked pattern was here #### + destination: hdfs://### HDFS PATH ### PREHOOK: query: EXPLAIN SELECT @@ -6859,7 +6891,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT count(1) as counts, key, @@ -6884,7 +6916,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 0 val_0 1 10 val_10 1 100 val_100 @@ -7218,7 +7250,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT count(1) as counts, key, @@ -7243,7 +7275,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 0 val_0 1 103 val_103 1 105 val_105 @@ -7577,7 +7609,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT count(1) as counts, key, @@ -7602,7 +7634,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 1 0 val_0 1 103 val_103 1 105 val_105 @@ -8076,13 +8108,13 @@ PREHOOK: type: QUERY PREHOOK: Input: default@dim_pho PREHOOK: Input: default@jackson_sev_add PREHOOK: Input: default@jackson_sev_same -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select b.* from jackson_sev_same a join (select * from dim_pho UNION DISTINCT select * from jackson_sev_add)b on a.key=b.key and b.key=97 POSTHOOK: type: QUERY POSTHOOK: Input: default@dim_pho POSTHOOK: Input: default@jackson_sev_add POSTHOOK: Input: default@jackson_sev_same -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 97 val_97 97 val_97 PREHOOK: query: create table union_subq_union(key int, value string) @@ -8364,11 +8396,11 @@ POSTHOOK: Lineage: union_subq_union.value EXPRESSION [(src)src.FieldSchema(name: PREHOOK: query: select * from union_subq_union order by key, value limit 20 PREHOOK: type: QUERY PREHOOK: Input: default@union_subq_union -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from union_subq_union order by key, value limit 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@union_subq_union -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 2 val_2 4 val_4 @@ -8624,11 +8656,11 @@ POSTHOOK: Lineage: union_subq_union29.value EXPRESSION [(src)src.FieldSchema(nam PREHOOK: query: select * from union_subq_union29 order by key, value limit 20 PREHOOK: type: QUERY PREHOOK: Input: default@union_subq_union29 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from union_subq_union29 order by key, value limit 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@union_subq_union29 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 2 val_2 4 val_4 @@ -8923,11 +8955,11 @@ POSTHOOK: Lineage: union_out.id EXPRESSION [] PREHOOK: query: select * from union_out PREHOOK: type: QUERY PREHOOK: Input: default@union_out -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from union_out POSTHOOK: type: QUERY POSTHOOK: Input: default@union_out -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 4 1 2 @@ -9280,11 +9312,11 @@ POSTHOOK: Lineage: union_subq_union30.value EXPRESSION [(src)src.FieldSchema(nam PREHOOK: query: select * from union_subq_union30 order by key, value limit 20 PREHOOK: type: QUERY PREHOOK: Input: default@union_subq_union30 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from union_subq_union30 order by key, value limit 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@union_subq_union30 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 2 val_2 4 val_4 @@ -9630,11 +9662,11 @@ POSTHOOK: Lineage: t4.value EXPRESSION [(t1)t1.FieldSchema(name:value, type:stri PREHOOK: query: select * from t3 PREHOOK: type: QUERY PREHOOK: Input: default@t3 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t3 POSTHOOK: type: QUERY POSTHOOK: Input: default@t3 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 2 1 4 1 @@ -9644,11 +9676,11 @@ POSTHOOK: Input: default@t3 PREHOOK: query: select * from t4 PREHOOK: type: QUERY PREHOOK: Input: default@t4 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t4 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### val_0 1 val_2 1 val_4 1 @@ -9980,11 +10012,11 @@ POSTHOOK: Lineage: t6.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] PREHOOK: query: select * from t5 PREHOOK: type: QUERY PREHOOK: Input: default@t5 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t5 POSTHOOK: type: QUERY POSTHOOK: Input: default@t5 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 3 2 1 4 1 @@ -9994,11 +10026,11 @@ POSTHOOK: Input: default@t5 PREHOOK: query: select * from t6 PREHOOK: type: QUERY PREHOOK: Input: default@t6 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t6 POSTHOOK: type: QUERY POSTHOOK: Input: default@t6 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 3 2 1 4 1 @@ -10325,11 +10357,11 @@ POSTHOOK: Lineage: t8.cnt EXPRESSION [(t1)t1.null, (t9)t9.null, ] PREHOOK: query: select * from t7 PREHOOK: type: QUERY PREHOOK: Input: default@t7 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t7 POSTHOOK: type: QUERY POSTHOOK: Input: default@t7 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 2 1 4 1 @@ -10339,11 +10371,11 @@ POSTHOOK: Input: default@t7 PREHOOK: query: select * from t8 PREHOOK: type: QUERY PREHOOK: Input: default@t8 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from t8 POSTHOOK: type: QUERY POSTHOOK: Input: default@t8 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 1 2 1 4 1 @@ -10444,7 +10476,7 @@ SELECT CAST(key AS BIGINT) AS key FROM t2) a PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM (SELECT CAST(key AS DOUBLE) AS key FROM t1 UNION DISTINCT @@ -10452,7 +10484,7 @@ SELECT CAST(key AS BIGINT) AS key FROM t2) a POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0.0 2.0 4.0 @@ -10588,7 +10620,7 @@ SELECT CAST(key AS DOUBLE) AS key FROM t2) a PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM (SELECT CAST(a.key AS BIGINT) AS key FROM t1 a JOIN t2 b ON a.key = b.key UNION DISTINCT @@ -10596,7 +10628,7 @@ SELECT CAST(key AS DOUBLE) AS key FROM t2) a POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0.0 2.0 4.0 @@ -10732,7 +10764,7 @@ SELECT CAST(a.key AS BIGINT) AS key FROM t1 a JOIN t2 b ON a.key = b.key) a PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM (SELECT CAST(key AS DOUBLE) AS key FROM t2 UNION DISTINCT @@ -10740,7 +10772,7 @@ SELECT CAST(a.key AS BIGINT) AS key FROM t1 a JOIN t2 b ON a.key = b.key) a POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0.0 2.0 4.0 @@ -10876,7 +10908,7 @@ SELECT CAST(key AS DOUBLE) AS key, CAST(key AS STRING) AS value FROM t2) a PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM (SELECT CAST(a.key AS BIGINT) AS key, CAST(b.key AS CHAR(20)) AS value FROM t1 a JOIN t2 b ON a.key = b.key UNION DISTINCT @@ -10884,7 +10916,7 @@ SELECT CAST(key AS DOUBLE) AS key, CAST(key AS STRING) AS value FROM t2) a POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0.0 0 2.0 2 4.0 4 @@ -11020,7 +11052,7 @@ SELECT CAST(a.key AS BIGINT) AS key, CAST(b.key AS VARCHAR(20)) AS value FROM t1 PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM (SELECT CAST(key AS DOUBLE) AS key, CAST(key AS STRING) AS value FROM t2 UNION DISTINCT @@ -11028,7 +11060,7 @@ SELECT CAST(a.key AS BIGINT) AS key, CAST(b.key AS VARCHAR(20)) AS value FROM t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0.0 0 2.0 2 4.0 4 @@ -11242,11 +11274,11 @@ POSTHOOK: Lineage: test_src.value EXPRESSION [(src)src.FieldSchema(name:value, t PREHOOK: query: SELECT COUNT(*) FROM test_src PREHOOK: type: QUERY PREHOOK: Input: default@test_src -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT COUNT(*) FROM test_src POSTHOOK: type: QUERY POSTHOOK: Input: default@test_src -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 310 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE test_src SELECT key, value FROM ( @@ -11443,11 +11475,11 @@ POSTHOOK: Lineage: test_src.value EXPRESSION [(src)src.null, (src)src.FieldSchem PREHOOK: query: SELECT COUNT(*) FROM test_src PREHOOK: type: QUERY PREHOOK: Input: default@test_src -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT COUNT(*) FROM test_src POSTHOOK: type: QUERY POSTHOOK: Input: default@test_src -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 310 PREHOOK: query: -- union34.q @@ -11719,7 +11751,7 @@ PREHOOK: Input: default@src10_1 PREHOOK: Input: default@src10_2 PREHOOK: Input: default@src10_3 PREHOOK: Input: default@src10_4 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM ( SELECT sub1.key,sub1.value FROM (SELECT * FROM src10_1) sub1 JOIN (SELECT * FROM src10_2) sub0 ON (sub0.key = sub1.key) UNION DISTINCT @@ -11730,7 +11762,7 @@ POSTHOOK: Input: default@src10_1 POSTHOOK: Input: default@src10_2 POSTHOOK: Input: default@src10_3 POSTHOOK: Input: default@src10_4 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 238 val_238 484 val_484 278 val_278 @@ -11918,7 +11950,7 @@ PREHOOK: Input: default@src10_1 PREHOOK: Input: default@src10_2 PREHOOK: Input: default@src10_3 PREHOOK: Input: default@src10_4 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT * FROM ( SELECT sub1.key,sub1.value FROM (SELECT * FROM src10_1) sub1 JOIN (SELECT * FROM src10_2) sub0 ON (sub0.key = sub1.key) UNION DISTINCT @@ -11929,7 +11961,7 @@ POSTHOOK: Input: default@src10_1 POSTHOOK: Input: default@src10_2 POSTHOOK: Input: default@src10_3 POSTHOOK: Input: default@src10_4 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 238 val_238 484 val_484 278 val_278 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/parallel_orderby.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/parallel_orderby.q.out b/ql/src/test/results/clientpositive/parallel_orderby.q.out index 2b2bcc7..571362d 100644 --- a/ql/src/test/results/clientpositive/parallel_orderby.q.out +++ b/ql/src/test/results/clientpositive/parallel_orderby.q.out @@ -67,7 +67,7 @@ STAGE PLANS: Move Operator files: hdfs directory: true -#### A masked pattern was here #### + destination: hdfs://### HDFS PATH ### Stage: Stage-3 Create Table Operator: @@ -131,11 +131,11 @@ Storage Desc Params: PREHOOK: query: select * from total_ordered PREHOOK: type: QUERY PREHOOK: Input: default@total_ordered -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from total_ordered POSTHOOK: type: QUERY POSTHOOK: Input: default@total_ordered -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 128 val_128 128 val_128 150 val_150 @@ -241,11 +241,11 @@ Storage Desc Params: PREHOOK: query: select * from total_ordered PREHOOK: type: QUERY PREHOOK: Input: default@total_ordered -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from total_ordered POSTHOOK: type: QUERY POSTHOOK: Input: default@total_ordered -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 128 val_128 128 val_128 150 val_150 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/perf/tez/query39.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/perf/tez/query39.q.out b/ql/src/test/results/clientpositive/perf/tez/query39.q.out index 8e91494..38d5c26 100644 --- a/ql/src/test/results/clientpositive/perf/tez/query39.q.out +++ b/ql/src/test/results/clientpositive/perf/tez/query39.q.out @@ -221,7 +221,7 @@ PREHOOK: Input: default@date_dim PREHOOK: Input: default@inventory PREHOOK: Input: default@item PREHOOK: Input: default@warehouse -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: with inv as (select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy ,stdev,mean, case mean when 0 then null else stdev/mean end cov @@ -252,4 +252,4 @@ POSTHOOK: Input: default@date_dim POSTHOOK: Input: default@inventory POSTHOOK: Input: default@item POSTHOOK: Input: default@warehouse -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/root_dir_external_table.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/root_dir_external_table.q.out b/ql/src/test/results/clientpositive/root_dir_external_table.q.out index 238a409..7ba2b5b 100644 --- a/ql/src/test/results/clientpositive/root_dir_external_table.q.out +++ b/ql/src/test/results/clientpositive/root_dir_external_table.q.out @@ -1,26 +1,28 @@ -#### A masked pattern was here #### +PREHOOK: query: insert overwrite directory "hdfs://### HDFS PATH ###" select key from src where (key < 20) order by key PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### +POSTHOOK: query: insert overwrite directory "hdfs://### HDFS PATH ###" select key from src where (key < 20) order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### +PREHOOK: query: create external table roottable (key string) row format delimited fields terminated by '\\t' stored as textfile location 'hdfs:///' PREHOOK: type: CREATETABLE -#### A masked pattern was here #### +PREHOOK: Input: hdfs://### HDFS PATH ### PREHOOK: Output: database:default PREHOOK: Output: default@roottable -#### A masked pattern was here #### +POSTHOOK: query: create external table roottable (key string) row format delimited fields terminated by '\\t' stored as textfile location 'hdfs://### HDFS PATH ###' POSTHOOK: type: CREATETABLE -#### A masked pattern was here #### +POSTHOOK: Input: hdfs://### HDFS PATH ### POSTHOOK: Output: database:default POSTHOOK: Output: default@roottable PREHOOK: query: select count(*) from roottable PREHOOK: type: QUERY PREHOOK: Input: default@roottable -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select count(*) from roottable POSTHOOK: type: QUERY POSTHOOK: Input: default@roottable -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 20 #### A masked pattern was here #### http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/scriptfile1.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/scriptfile1.q.out b/ql/src/test/results/clientpositive/scriptfile1.q.out index cf718cc..f105cf4 100644 --- a/ql/src/test/results/clientpositive/scriptfile1.q.out +++ b/ql/src/test/results/clientpositive/scriptfile1.q.out @@ -31,11 +31,11 @@ POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:strin PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 10 val_10 100 val_100 100 val_100 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out index 91408df..9939e83 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out @@ -195,7 +195,7 @@ PREHOOK: Input: default@bucket_big@day=day1/pri=1 PREHOOK: Input: default@bucket_small PREHOOK: Input: default@bucket_small@pri=1 PREHOOK: Input: default@bucket_small@pri=2 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select a.key , a.value , @@ -225,7 +225,7 @@ POSTHOOK: Input: default@bucket_big@day=day1/pri=1 POSTHOOK: Input: default@bucket_small POSTHOOK: Input: default@bucket_small@pri=1 POSTHOOK: Input: default@bucket_small@pri=2 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 val_0 day1 1 0 val_0 val_0 day1 1 0 val_0 val_0 day1 1 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out_spark ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out_spark b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out_spark new file mode 100644 index 0000000..91408df --- /dev/null +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_16.q.out_spark @@ -0,0 +1,252 @@ +PREHOOK: query: CREATE TABLE stage_bucket_big +( +key BIGINT, +value STRING +) +PARTITIONED BY (file_tag STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@stage_bucket_big +POSTHOOK: query: CREATE TABLE stage_bucket_big +( +key BIGINT, +value STRING +) +PARTITIONED BY (file_tag STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@stage_bucket_big +PREHOOK: query: CREATE TABLE bucket_big +( +key BIGINT, +value STRING +) +PARTITIONED BY (day STRING, pri bigint) +clustered by (key) sorted by (key) into 12 buckets +stored as RCFile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@bucket_big +POSTHOOK: query: CREATE TABLE bucket_big +( +key BIGINT, +value STRING +) +PARTITIONED BY (day STRING, pri bigint) +clustered by (key) sorted by (key) into 12 buckets +stored as RCFile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@bucket_big +PREHOOK: query: CREATE TABLE stage_bucket_small +( +key BIGINT, +value string +) +PARTITIONED BY (file_tag STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@stage_bucket_small +POSTHOOK: query: CREATE TABLE stage_bucket_small +( +key BIGINT, +value string +) +PARTITIONED BY (file_tag STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@stage_bucket_small +PREHOOK: query: CREATE TABLE bucket_small +( +key BIGINT, +value string +) +PARTITIONED BY (pri bigint) +clustered by (key) sorted by (key) into 12 buckets +stored as RCFile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@bucket_small +POSTHOOK: query: CREATE TABLE bucket_small +( +key BIGINT, +value string +) +PARTITIONED BY (pri bigint) +clustered by (key) sorted by (key) into 12 buckets +stored as RCFile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@bucket_small +PREHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/big/000000_0' overwrite into table stage_bucket_small partition (file_tag='1') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@stage_bucket_small +POSTHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/big/000000_0' overwrite into table stage_bucket_small partition (file_tag='1') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@stage_bucket_small +POSTHOOK: Output: default@stage_bucket_small@file_tag=1 +PREHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/big/000000_0' overwrite into table stage_bucket_small partition (file_tag='2') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@stage_bucket_small +POSTHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/big/000000_0' overwrite into table stage_bucket_small partition (file_tag='2') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@stage_bucket_small +POSTHOOK: Output: default@stage_bucket_small@file_tag=2 +PREHOOK: query: insert overwrite table bucket_small partition(pri) +select +key, +value, +file_tag as pri +from +stage_bucket_small +where file_tag between 1 and 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@stage_bucket_small +PREHOOK: Input: default@stage_bucket_small@file_tag=1 +PREHOOK: Input: default@stage_bucket_small@file_tag=2 +PREHOOK: Output: default@bucket_small +POSTHOOK: query: insert overwrite table bucket_small partition(pri) +select +key, +value, +file_tag as pri +from +stage_bucket_small +where file_tag between 1 and 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stage_bucket_small +POSTHOOK: Input: default@stage_bucket_small@file_tag=1 +POSTHOOK: Input: default@stage_bucket_small@file_tag=2 +POSTHOOK: Output: default@bucket_small@pri=1 +POSTHOOK: Output: default@bucket_small@pri=2 +POSTHOOK: Lineage: bucket_small PARTITION(pri=1).key SIMPLE [(stage_bucket_small)stage_bucket_small.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: bucket_small PARTITION(pri=1).value SIMPLE [(stage_bucket_small)stage_bucket_small.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: bucket_small PARTITION(pri=2).key SIMPLE [(stage_bucket_small)stage_bucket_small.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: bucket_small PARTITION(pri=2).value SIMPLE [(stage_bucket_small)stage_bucket_small.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/small/000000_0' overwrite into table stage_bucket_big partition (file_tag='1') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@stage_bucket_big +POSTHOOK: query: load data local inpath '../../data/files/auto_sortmerge_join/small/000000_0' overwrite into table stage_bucket_big partition (file_tag='1') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@stage_bucket_big +POSTHOOK: Output: default@stage_bucket_big@file_tag=1 +PREHOOK: query: insert overwrite table bucket_big partition(day,pri) +select +key, +value, +'day1' as day, +1 as pri +from +stage_bucket_big +where +file_tag='1' +PREHOOK: type: QUERY +PREHOOK: Input: default@stage_bucket_big +PREHOOK: Input: default@stage_bucket_big@file_tag=1 +PREHOOK: Output: default@bucket_big +POSTHOOK: query: insert overwrite table bucket_big partition(day,pri) +select +key, +value, +'day1' as day, +1 as pri +from +stage_bucket_big +where +file_tag='1' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stage_bucket_big +POSTHOOK: Input: default@stage_bucket_big@file_tag=1 +POSTHOOK: Output: default@bucket_big@day=day1/pri=1 +POSTHOOK: Lineage: bucket_big PARTITION(day=day1,pri=1).key SIMPLE [(stage_bucket_big)stage_bucket_big.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: bucket_big PARTITION(day=day1,pri=1).value SIMPLE [(stage_bucket_big)stage_bucket_big.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: select +a.key , +a.value , +b.value , +'day1' as day, +1 as pri +from +( +select +key, +value +from bucket_big where day='day1' +) a +left outer join +( +select +key, +value +from bucket_small +where pri between 1 and 2 +) b +on +(a.key = b.key) +PREHOOK: type: QUERY +PREHOOK: Input: default@bucket_big +PREHOOK: Input: default@bucket_big@day=day1/pri=1 +PREHOOK: Input: default@bucket_small +PREHOOK: Input: default@bucket_small@pri=1 +PREHOOK: Input: default@bucket_small@pri=2 +#### A masked pattern was here #### +POSTHOOK: query: select +a.key , +a.value , +b.value , +'day1' as day, +1 as pri +from +( +select +key, +value +from bucket_big where day='day1' +) a +left outer join +( +select +key, +value +from bucket_small +where pri between 1 and 2 +) b +on +(a.key = b.key) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucket_big +POSTHOOK: Input: default@bucket_big@day=day1/pri=1 +POSTHOOK: Input: default@bucket_small +POSTHOOK: Input: default@bucket_small@pri=1 +POSTHOOK: Input: default@bucket_small@pri=2 +#### A masked pattern was here #### +0 val_0 val_0 day1 1 +0 val_0 val_0 day1 1 +0 val_0 val_0 day1 1 +0 val_0 val_0 day1 1 +0 val_0 val_0 day1 1 +0 val_0 val_0 day1 1 +103 val_103 val_103 day1 1 +103 val_103 val_103 day1 1 +103 val_103 val_103 day1 1 +103 val_103 val_103 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +169 val_169 val_169 day1 1 +172 val_172 val_172 day1 1 +172 val_172 val_172 day1 1 +172 val_172 val_172 day1 1 +172 val_172 val_172 day1 1 +374 val_374 val_374 day1 1 +374 val_374 val_374 day1 1 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucket4.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/spark/bucket4.q.out b/ql/src/test/results/clientpositive/spark/bucket4.q.out index 63683fc..4df95d5 100644 --- a/ql/src/test/results/clientpositive/spark/bucket4.q.out +++ b/ql/src/test/results/clientpositive/spark/bucket4.q.out @@ -46,9 +46,9 @@ STAGE PLANS: value expressions: _col0 (type: string), _col1 (type: string) auto parallelism: false Path -> Alias: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### [src] Path -> Partition: -#### A masked pattern was here #### + hdfs://### HDFS PATH ### Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -61,6 +61,7 @@ STAGE PLANS: columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src numFiles 1 numRows 500 @@ -82,6 +83,7 @@ STAGE PLANS: columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.src numFiles 1 numRows 500 @@ -106,10 +108,10 @@ STAGE PLANS: File Output Operator compressed: false GlobalTableId: 1 -#### A masked pattern was here #### + directory: hdfs://### HDFS PATH ### NumFilesPerFileSink: 2 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE -#### A masked pattern was here #### + Stats Publishing Key Prefix: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -123,6 +125,7 @@ STAGE PLANS: columns.comments columns.types int:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.bucket4_1 numFiles 0 numRows 0 @@ -142,7 +145,7 @@ STAGE PLANS: Move Operator tables: replace: true -#### A masked pattern was here #### + source: hdfs://### HDFS PATH ### table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -156,6 +159,7 @@ STAGE PLANS: columns.comments columns.types int:string #### A masked pattern was here #### + location hdfs://### HDFS PATH ### name default.bucket4_1 numFiles 0 numRows 0 @@ -171,7 +175,7 @@ STAGE PLANS: Stage: Stage-2 Stats Work Basic Stats Work: -#### A masked pattern was here #### + Stats Aggregation Key Prefix: hdfs://### HDFS PATH ### PREHOOK: query: insert overwrite table bucket4_1 select * from src @@ -211,11 +215,11 @@ STAGE PLANS: PREHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s PREHOOK: type: QUERY PREHOOK: Input: default@bucket4_1 -#### A masked pattern was here #### +PREHOOK: Output: hdfs://### HDFS PATH ### POSTHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket4_1 -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### 0 val_0 0 val_0 0 val_0 http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucket4.q.out_spark ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/spark/bucket4.q.out_spark b/ql/src/test/results/clientpositive/spark/bucket4.q.out_spark new file mode 100644 index 0000000..63683fc --- /dev/null +++ b/ql/src/test/results/clientpositive/spark/bucket4.q.out_spark @@ -0,0 +1,465 @@ +PREHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@bucket4_1 +POSTHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@bucket4_1 +PREHOOK: query: explain extended +insert overwrite table bucket4_1 +select * from src +PREHOOK: type: QUERY +POSTHOOK: query: explain extended +insert overwrite table bucket4_1 +select * from src +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + Stage-2 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-1 + Spark + Edges: + Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: src + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + GatherStats: false + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: UDFToInteger(_col0) (type: int) + null sort order: a + sort order: + + Map-reduce partition columns: UDFToInteger(_col0) (type: int) + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + tag: -1 + value expressions: _col0 (type: string), _col1 (type: string) + auto parallelism: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: src + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments 'default','default' + columns.types string:string +#### A masked pattern was here #### + name default.src + numFiles 1 + numRows 500 + rawDataSize 5312 + serialization.ddl struct src { string key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + bucket_count -1 + column.name.delimiter , + columns key,value + columns.comments 'default','default' + columns.types string:string +#### A masked pattern was here #### + name default.src + numFiles 1 + numRows 500 + rawDataSize 5312 + serialization.ddl struct src { string key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.src + name: default.src + Truncated Path -> Alias: + /src [src] + Reducer 2 + Needs Tagging: false + Reduce Operator Tree: + Select Operator + expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 2 + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + SORTBUCKETCOLSPREFIX TRUE + bucket_count 2 + bucket_field_name key + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 + serialization.ddl struct bucket4_1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucket4_1 + TotalFiles: 2 + GatherStats: true + MultiFileSpray: true + + Stage: Stage-0 + Move Operator + tables: + replace: true +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}} + SORTBUCKETCOLSPREFIX TRUE + bucket_count 2 + bucket_field_name key + column.name.delimiter , + columns key,value + columns.comments + columns.types int:string +#### A masked pattern was here #### + name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 + serialization.ddl struct bucket4_1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucket4_1 + + Stage: Stage-2 + Stats Work + Basic Stats Work: +#### A masked pattern was here #### + +PREHOOK: query: insert overwrite table bucket4_1 +select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@bucket4_1 +POSTHOOK: query: insert overwrite table bucket4_1 +select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@bucket4_1 +POSTHOOK: Lineage: bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: explain +select * from bucket4_1 tablesample (bucket 1 out of 2) s +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from bucket4_1 tablesample (bucket 1 out of 2) s +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: s + Filter Operator + predicate: (((hash(key) & 2147483647) % 2) = 0) (type: boolean) + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + ListSink + +PREHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s +PREHOOK: type: QUERY +PREHOOK: Input: default@bucket4_1 +#### A masked pattern was here #### +POSTHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s +POSTHOOK: type: QUERY +POSTHOOK: Input: default@bucket4_1 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +2 val_2 +4 val_4 +8 val_8 +10 val_10 +12 val_12 +12 val_12 +18 val_18 +18 val_18 +20 val_20 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +28 val_28 +30 val_30 +34 val_34 +42 val_42 +42 val_42 +44 val_44 +54 val_54 +58 val_58 +58 val_58 +64 val_64 +66 val_66 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +78 val_78 +80 val_80 +82 val_82 +84 val_84 +84 val_84 +86 val_86 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +96 val_96 +98 val_98 +98 val_98 +100 val_100 +100 val_100 +104 val_104 +104 val_104 +114 val_114 +116 val_116 +118 val_118 +118 val_118 +120 val_120 +120 val_120 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +134 val_134 +134 val_134 +136 val_136 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +146 val_146 +146 val_146 +150 val_150 +152 val_152 +152 val_152 +156 val_156 +158 val_158 +160 val_160 +162 val_162 +164 val_164 +164 val_164 +166 val_166 +168 val_168 +170 val_170 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +176 val_176 +176 val_176 +178 val_178 +180 val_180 +186 val_186 +190 val_190 +192 val_192 +194 val_194 +196 val_196 +200 val_200 +200 val_200 +202 val_202 +208 val_208 +208 val_208 +208 val_208 +214 val_214 +216 val_216 +216 val_216 +218 val_218 +222 val_222 +224 val_224 +224 val_224 +226 val_226 +228 val_228 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +238 val_238 +238 val_238 +242 val_242 +242 val_242 +244 val_244 +248 val_248 +252 val_252 +256 val_256 +256 val_256 +258 val_258 +260 val_260 +262 val_262 +266 val_266 +272 val_272 +272 val_272 +274 val_274 +278 val_278 +278 val_278 +280 val_280 +280 val_280 +282 val_282 +282 val_282 +284 val_284 +286 val_286 +288 val_288 +288 val_288 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +302 val_302 +306 val_306 +308 val_308 +310 val_310 +316 val_316 +316 val_316 +316 val_316 +318 val_318 +318 val_318 +318 val_318 +322 val_322 +322 val_322 +332 val_332 +336 val_336 +338 val_338 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +356 val_356 +360 val_360 +362 val_362 +364 val_364 +366 val_366 +368 val_368 +374 val_374 +378 val_378 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +392 val_392 +394 val_394 +396 val_396 +396 val_396 +396 val_396 +400 val_400 +402 val_402 +404 val_404 +404 val_404 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +414 val_414 +414 val_414 +418 val_418 +424 val_424 +424 val_424 +430 val_430 +430 val_430 +430 val_430 +432 val_432 +436 val_436 +438 val_438 +438 val_438 +438 val_438 +444 val_444 +446 val_446 +448 val_448 +452 val_452 +454 val_454 +454 val_454 +454 val_454 +458 val_458 +458 val_458 +460 val_460 +462 val_462 +462 val_462 +466 val_466 +466 val_466 +466 val_466 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +470 val_470 +472 val_472 +478 val_478 +478 val_478 +480 val_480 +480 val_480 +480 val_480 +482 val_482 +484 val_484 +490 val_490 +492 val_492 +492 val_492 +494 val_494 +496 val_496 +498 val_498 +498 val_498 +498 val_498