hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1594114 - in /hive/trunk: itests/qtest/ ql/src/test/results/clientpositive/tez/
Date Tue, 13 May 2014 01:35:28 GMT
Author: gunther
Date: Tue May 13 01:35:28 2014
New Revision: 1594114

URL: http://svn.apache.org/r1594114
Log:
HIVE-7037: Add additional tests for transform clauses with Tez (Gunther Hagleitner, reviewed
by Vikram Dixit K)

Added:
    hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/script_pipe.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/scriptfile1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/transform1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/transform2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
Modified:
    hive/trunk/itests/qtest/pom.xml

Modified: hive/trunk/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/qtest/pom.xml?rev=1594114&r1=1594113&r2=1594114&view=diff
==============================================================================
--- hive/trunk/itests/qtest/pom.xml (original)
+++ hive/trunk/itests/qtest/pom.xml Tue May 13 01:35:28 2014
@@ -39,7 +39,7 @@
     <minimr.query.files>stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q,auto_sortmerge_join_16.q,quotedid_smb.q,file_with_header_footer.q,external_table_with_space_in_location_path.q,root_dir_external_table.q,index_bitmap3.q,ql_rewrite_gbtoidx.q,index_bitmap_auto.q,udf_using.q</minimr.query.files>
     <minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q,file_with_header_footer_negative.q,udf_local_resource.q</minimr.query.negative.files>
     <minitez.query.files>tez_fsstat.q,mapjoin_decimal.q,tez_join_tests.q,tez_joins_explain.q,mrr.q,tez_dml.q,tez_insert_overwrite_local_directory_1.q,tez_union.q,bucket_map_join_tez1.q,bucket_map_join_tez2.q,tez_schema_evolution.q</minitez.query.files>
-    <minitez.query.files.shared>cross_product_check_1.q,cross_product_check_2.q,dynpart_sort_opt_vectorization.q,dynpart_sort_optimization.q,orc_analyze.q,join0.q,join1.q,auto_join0.q,auto_join1.q,bucket2.q,bucket3.q,bucket4.q,count.q,create_merge_compressed.q,cross_join.q,ctas.q,custom_input_output_format.q,disable_merge_for_bucketing.q,enforce_order.q,filter_join_breaktask.q,filter_join_breaktask2.q,groupby1.q,groupby2.q,groupby3.q,having.q,insert1.q,insert_into1.q,insert_into2.q,leftsemijoin.q,limit_pushdown.q,load_dyn_part1.q,load_dyn_part2.q,load_dyn_part3.q,mapjoin_mapjoin.q,mapreduce1.q,mapreduce2.q,merge1.q,merge2.q,metadata_only_queries.q,sample1.q,subquery_in.q,subquery_exists.q,vectorization_15.q,ptf.q,stats_counter.q,stats_noscan_1.q,stats_counter_partitioned.q,union2.q,union3.q,union4.q,union5.q,union6.q,union7.q,union8.q,union9.q</minitez.query.files.shared>
+    <minitez.query.files.shared>cross_product_check_1.q,cross_product_check_2.q,dynpart_sort_opt_vectorization.q,dynpart_sort_optimization.q,orc_analyze.q,join0.q,join1.q,auto_join0.q,auto_join1.q,bucket2.q,bucket3.q,bucket4.q,count.q,create_merge_compressed.q,cross_join.q,ctas.q,custom_input_output_format.q,disable_merge_for_bucketing.q,enforce_order.q,filter_join_breaktask.q,filter_join_breaktask2.q,groupby1.q,groupby2.q,groupby3.q,having.q,insert1.q,insert_into1.q,insert_into2.q,leftsemijoin.q,limit_pushdown.q,load_dyn_part1.q,load_dyn_part2.q,load_dyn_part3.q,mapjoin_mapjoin.q,mapreduce1.q,mapreduce2.q,merge1.q,merge2.q,metadata_only_queries.q,sample1.q,subquery_in.q,subquery_exists.q,vectorization_15.q,ptf.q,stats_counter.q,stats_noscan_1.q,stats_counter_partitioned.q,union2.q,union3.q,union4.q,union5.q,union6.q,union7.q,union8.q,union9.q,transform1.q,transform2.q,transform_ppr1.q,transform_ppr2.q,script_env_var1.q,script_env_var2.q,script_pipe.q,scriptfile1.q</minitez.query.fi
 les.shared>
     <beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rena
 me.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_o
 verwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
   </properties>
 

Added: hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var1.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var1.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,18 @@
+PREHOOK: query: -- Verifies that script operator ID environment variables have unique values
+-- in each instance of the script operator.
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 UNION
ALL
+  SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 )
a GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Verifies that script operator ID environment variables have unique values
+-- in each instance of the script operator.
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 UNION
ALL
+  SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 )
a GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1

Added: hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var2.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/script_env_var2.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,16 @@
+PREHOOK: query: -- Same test as script_env_var1, but test setting the variable name
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Same test as script_env_var1, but test setting the variable name
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1

Added: hive/trunk/ql/src/test/results/clientpositive/tez/script_pipe.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/script_pipe.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/script_pipe.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/script_pipe.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,123 @@
+PREHOOK: query: -- Tests exception in ScriptOperator.close() by passing to the operator a
small amount of data
+EXPLAIN SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Tests exception in ScriptOperator.close() by passing to the operator
a small amount of data
+EXPLAIN SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats:
NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column
stats: NONE
+                    Limit
+                      Number of rows: 1
+                      Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column
stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column
stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Extract
+                Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats:
NONE
+                Limit
+                  Number of rows: 1
+                  Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats:
NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats:
NONE
+                    Transform Operator
+                      command: true
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column
stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column
stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: -- Tests exception in ScriptOperator.processOp() by passing extra data needed
to fill pipe buffer
+EXPLAIN SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key,
value) USING 'head -n 1' as a,b,c,d FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Tests exception in ScriptOperator.processOp() by passing extra data needed
to fill pipe buffer
+EXPLAIN SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key,
value) USING 'head -n 1' as a,b,c,d FROM src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats:
NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), key (type: string),
value (type: string), key (type: string), value (type: string), key (type: string), value
(type: string), key (type: string), value (type: string), key (type: string), value (type:
string)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7,
_col8, _col9, _col10, _col11
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column
stats: NONE
+                    Transform Operator
+                      command: head -n 1
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column
stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column
stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT
1) tmp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT
1) tmp
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: query: SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value,
key, value) USING 'head -n 1' as a,b,c,d FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value,
key, value) USING 'head -n 1' as a,b,c,d FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+238	val_238	238	val_238

Added: hive/trunk/ql/src/test/results/clientpositive/tez/scriptfile1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/scriptfile1.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/scriptfile1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/scriptfile1.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,50 @@
+PREHOOK: query: -- EXCLUDE_OS_WINDOWS
+CREATE TABLE dest1(key INT, value STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: -- EXCLUDE_OS_WINDOWS
+CREATE TABLE dest1(key INT, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'testgrep' AS (tkey, tvalue)
+  CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'testgrep' AS (tkey, tvalue)
+  CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default),
(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default),
(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default),
(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default),
(src)src.FieldSchema(name:value, type:string, comment:default), ]
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+310	val_310

Added: hive/trunk/ql/src/test/results/clientpositive/tez/transform1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/transform1.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/transform1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/transform1.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,130 @@
+PREHOOK: query: create table transform1_t1(a string, b string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table transform1_t1(a string, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@transform1_t1
+PREHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: transform1_t1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: a (type: string), b (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats:
NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats:
NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats:
NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t1
+#### A masked pattern was here ####
+PREHOOK: query: create table transform1_t2(col array<int>)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table transform1_t2(col array<int>)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@transform1_t2
+PREHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@transform1_t2
+POSTHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@transform1_t2
+POSTHOOK: Lineage: transform1_t2.col EXPRESSION []
+PREHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: transform1_t2.col EXPRESSION []
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: transform1_t2
+                  Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats:
COMPLETE
+                  Select Operator
+                    expressions: '012' (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats:
COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column
stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column
stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM
transform1_t2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM
transform1_t2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: transform1_t2.col EXPRESSION []
+[0,1,2]

Added: hive/trunk/ql/src/test/results/clientpositive/tez/transform2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/transform2.q.out?rev=1594114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/transform2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/transform2.q.out Tue May 13 01:35:28
2014
@@ -0,0 +1,11 @@
+PREHOOK: query: -- Transform with a function that has many parameters
+SELECT TRANSFORM(substr(key, 1, 2)) USING 'cat' FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Transform with a function that has many parameters
+SELECT TRANSFORM(substr(key, 1, 2)) USING 'cat' FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+23	NULL

Added: hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out?rev=1594114&view=auto
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out (added) and hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
Tue May 13 01:35:28 2014 differ

Added: hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out?rev=1594114&view=auto
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out (added) and hive/trunk/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
Tue May 13 01:35:28 2014 differ



Mime
View raw message