hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pras...@apache.org
Subject svn commit: r771990 [1/8] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ data/conf/ hwi/ jdbc/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ ql/src/java/org/apache/hadoop/hive/ql...
Date Tue, 05 May 2009 22:17:50 GMT
Author: prasadc
Date: Tue May  5 22:17:36 2009
New Revision: 771990

URL: http://svn.apache.org/viewvc?rev=771990&view=rev
Log:
Add a pre execution hook to ql

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/trunk/conf/hive-default.xml
    hadoop/hive/trunk/data/conf/hive-site.xml
    hadoop/hive/trunk/hwi/build.xml
    hadoop/hive/trunk/jdbc/build.xml
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/describe_xpath.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_sequencefile.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_text.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby_bigdata.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/implicit_cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input16_cc.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_cb_delim.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_columnarserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testsequencefile.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/insert1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join23.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullinput.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/order.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/order2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_clusterby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_constant_expr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby_join.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_random.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_transform.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_union.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_bigdata.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_columnar.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/scriptfile1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/show_tables.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/showparts.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sort.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_10_trims.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_case.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_case_column_pruning.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_case_thrift.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_json.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_length.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_round.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_substr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_when.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union19.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union9.q.out
    hadoop/hive/trunk/service/build.xml
    hadoop/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue May  5 22:17:36 2009
@@ -27,6 +27,9 @@
     HIVE-420. Support regular expressions for column names
     (Zheng Shao via namit)
 
+    HIVE-463. Add a pre execution hook to ql
+    (Ashish Thusoo via prasadc)
+
   IMPROVEMENTS
     HIVE-389. Option to build without ivy (jssarma)
 

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue May  5 22:17:36 2009
@@ -65,6 +65,7 @@
     COMPRESSINTERMEDIATE("hive.exec.compress.intermediate", false),
     BYTESPERREDUCER("hive.exec.reducers.bytes.per.reducer", (long)(1000*1000*1000)),
     MAXREDUCERS("hive.exec.reducers.max", 999),
+    PREEXECHOOKS("hive.exec.pre.hooks", ""),
 
     // hadoop stuff
     HADOOPBIN("hadoop.bin.path", System.getenv("HADOOP_HOME") + "/bin/hadoop"),

Modified: hadoop/hive/trunk/conf/hive-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/conf/hive-default.xml?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/conf/hive-default.xml (original)
+++ hadoop/hive/trunk/conf/hive-default.xml Tue May  5 22:17:36 2009
@@ -142,4 +142,10 @@
   <description>This is the WAR file with the jsp content for Hive Web Interface</description>
 </property>
 
+<property>
+  <name>hive.exec.pre.hooks</name>
+  <value></value>
+  <description>Pre Execute Hook for Tests</description>
+</property>
+
 </configuration>

Modified: hadoop/hive/trunk/data/conf/hive-site.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/conf/hive-site.xml?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/data/conf/hive-site.xml (original)
+++ hadoop/hive/trunk/data/conf/hive-site.xml Tue May  5 22:17:36 2009
@@ -112,4 +112,10 @@
   <description>Location of the structured hive logs</description>
 </property>
 
+<property>
+  <name>hive.exec.pre.hooks</name>
+  <value>org.apache.hadoop.hive.ql.hooks.PreExecutePrinter</value>
+  <description>Pre Execute Hook for Tests</description>
+</property>
+
 </configuration>

Modified: hadoop/hive/trunk/hwi/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/build.xml?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/build.xml (original)
+++ hadoop/hive/trunk/hwi/build.xml Tue May  5 22:17:36 2009
@@ -11,6 +11,17 @@
     <mkdir dir="${build.dir}/test/src"/>
   </target> 
 
+  <path id="test.classpath">
+    <pathelement location="${test.build.classes}" />
+    <pathelement location="" />
+    <pathelement location="${test.data.dir}/conf"/>
+    <pathelement location="${hive.conf.dir}"/>
+    <fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
+    <fileset dir="${hive.root}" includes="testlibs/*.jar"/>
+    <pathelement location="${build.dir.hive}/ql/test/classes"/>
+    <path refid="classpath"/>
+  </path>
+
   <target name="compile" depends="hwi-init">
     <echo message="Compiling: ${name}"/>
     <javac

Modified: hadoop/hive/trunk/jdbc/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/build.xml?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/build.xml (original)
+++ hadoop/hive/trunk/jdbc/build.xml Tue May  5 22:17:36 2009
@@ -29,6 +29,17 @@
 
   <import file="../build-common.xml"/>
 
+  <path id="test.classpath">
+    <pathelement location="${test.build.classes}" />
+    <pathelement location="" />
+    <pathelement location="${test.data.dir}/conf"/>
+    <pathelement location="${hive.conf.dir}"/>
+    <fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
+    <fileset dir="${hive.root}" includes="testlibs/*.jar"/>
+    <pathelement location="${build.dir.hive}/ql/test/classes"/>
+    <path refid="classpath"/>
+  </path>
+
   <target name="core-compile" depends="init">
     <javac
      encoding="${build.encoding}"

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue May  5 22:17:36 2009
@@ -40,10 +40,13 @@
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.PreExecute;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.plan.tableDesc;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -209,6 +212,27 @@
     return execute();
   }
 
+  private List<PreExecute> getPreExecHooks() throws Exception {
+    ArrayList<PreExecute> pehooks = new ArrayList<PreExecute>();
+    String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
+    pestr = pestr.trim();
+    if (pestr.equals(""))
+      return pehooks;
+
+    String[] peClasses = pestr.split(",");
+    
+    for(String peClass: peClasses) {
+      try {
+        pehooks.add((PreExecute)Class.forName(peClass.trim()).newInstance());
+      } catch (ClassNotFoundException e) {
+        console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+        throw e;
+      }
+    }
+    
+    return pehooks;
+  }
+  
   public int execute() {
     boolean noName = StringUtils.isEmpty(conf
         .getVar(HiveConf.ConfVars.HADOOPJOBNAME));
@@ -220,7 +244,7 @@
     conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
     conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
 
-    try {
+    try {      
       LOG.info("Starting command: " + queryStr);
 
       if (SessionState.get() != null)
@@ -229,6 +253,14 @@
       resStream = null;
 
       BaseSemanticAnalyzer sem = plan.getPlan();
+
+      // Get all the pre execution hooks and execute them.
+      for(PreExecute peh: getPreExecHooks()) {
+        peh.run(SessionState.get(), 
+                sem.getInputs(), sem.getOutputs(),
+                UserGroupInformation.getCurrentUGI());        
+      }
+      
       int jobs = countJobs(sem.getRootTasks());
       if (jobs > 0) {
         console.printInfo("Total MapReduce jobs = " + jobs);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java Tue May  5 22:17:36 2009
@@ -22,11 +22,14 @@
 import java.util.ArrayList;
 import java.util.Map;
 import java.util.HashMap;
+import java.util.Set;
 import java.io.Serializable;
 
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.plan.tableDesc;
@@ -49,6 +52,8 @@
      * @param currTask    the current task
      * @param currTopOp   the current top operator being traversed
      * @param currAliasId the current alias for the to operator
+     * @param inputs      the list of read entities
+     * @param outputs     the list of write entities
      */
     public GenMapRedCtx (Task<? extends Serializable>         currTask,
                          Operator<? extends Serializable>     currTopOp,
@@ -133,7 +138,18 @@
   private UnionOperator                        currUnionOp;
   private String                               currAliasId;
   private List<Operator<? extends Serializable>> rootOps;
-
+  
+  /**
+   * Set of read entities. This list is generated by the walker and is 
+   * passed to the hooks.
+   */
+  private Set<ReadEntity>                     inputs;
+  /**
+   * Set of write entities. This list is generated by the walker and is
+   * passed to the hooks.
+   */
+  private Set<WriteEntity>                    outputs;
+  
   /**
    * @param opTaskMap  reducer to task mapping
    * @param seenOps    operator already visited
@@ -142,6 +158,8 @@
    * @param mvTask     the final move task
    * @param scratchDir directory for temp destinations   
    * @param mapCurrCtx operator to task mappings
+   * @param inputs     the set of input tables/partitions generated by the walk
+   * @param outputs    the set of destinations generated by the walk
    */
   public GenMRProcContext (
     HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap,
@@ -150,7 +168,9 @@
     Task<? extends Serializable>           mvTask,
     List<Task<? extends Serializable>>     rootTasks,
     String scratchDir, int randomid, int pathid,
-    Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx) 
+    Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx,
+    Set<ReadEntity> inputs,
+    Set<WriteEntity> outputs) 
   {
 
     this.opTaskMap  = opTaskMap;
@@ -162,6 +182,8 @@
     this.randomid   = randomid;
     this.pathid     = pathid;
     this.mapCurrCtx = mapCurrCtx;
+    this.inputs = inputs;
+    this.outputs = outputs;
     currTask        = null;
     currTopOp       = null;
     currUnionOp     = null;
@@ -371,4 +393,18 @@
   public void setUnionTask(UnionOperator op, GenMRUnionCtx uTask) {
     unionTaskMap.put(op, uTask);
   }
+  
+  /**
+   * Get the input set.
+   */
+  public Set<ReadEntity> getInputs() {
+    return inputs;
+  }
+  
+  /**
+   * Get the output set.
+   */
+  public Set<WriteEntity> getOutputs() {
+    return outputs;
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Tue May  5 22:17:36 2009
@@ -49,6 +49,8 @@
 import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.parse.*;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
@@ -237,6 +239,7 @@
       mapredWork plan, boolean local, GenMRProcContext opProcCtx) 
     throws SemanticException {
     ParseContext parseCtx = opProcCtx.getParseCtx();
+    Set<ReadEntity> inputs = opProcCtx.getInputs();
 
     if (!local) {
       // Generate the map work for this alias_id
@@ -256,6 +259,11 @@
       SamplePruner samplePruner = parseCtx.getAliasToSamplePruner().get(alias_id);
       
       for (Partition part : parts) {
+        if (part.getTable().isPartitioned())
+          inputs.add(new ReadEntity(part));
+        else
+          inputs.add(new ReadEntity(part.getTable()));
+        
         // Later the properties have to come from the partition as opposed
         // to from the table in order to support versioning.
         Path paths[];

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Tue May  5 22:17:36 2009
@@ -30,7 +30,10 @@
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.metadata.Partition;
 
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 
 public abstract class BaseSemanticAnalyzer {
   protected String scratchDir;
@@ -242,6 +245,14 @@
     return taskTmpDir;
   }
   
+  public Set<ReadEntity> getInputs() {
+    return new LinkedHashSet<ReadEntity>();
+  }
+  
+  public Set<WriteEntity> getOutputs() {
+    return new LinkedHashSet<WriteEntity>();
+  }
+  
   public static class tableSpec {
     public String tableName;
     public Table tableHandle;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue May  5 22:17:36 2009
@@ -22,12 +22,12 @@
 import java.io.Serializable;
 import java.lang.reflect.Method;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Formatter;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -81,12 +81,9 @@
 import org.apache.hadoop.hive.ql.optimizer.Optimizer;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
-import org.apache.hadoop.hive.ql.optimizer.GenMRUnion1;
 import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3;
 import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
 import org.apache.hadoop.hive.ql.exec.*;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.aggregationDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
@@ -117,7 +114,6 @@
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -125,9 +121,11 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+
 /**
  * Implementation of the semantic analyzer
  */
@@ -145,6 +143,15 @@
   private int destTableId;
   private UnionProcContext uCtx;
 
+  /**
+   * ReadEntitites that are passed to the hooks.
+   */
+  private Set<ReadEntity> inputs;
+  /**
+   * List of WriteEntities that are passed to the hooks.
+   */
+  private Set<WriteEntity> outputs;
+  
   private static class Phase1Ctx {
     String dest;
     int nextNum;
@@ -163,6 +170,9 @@
     opParseCtx = new HashMap<Operator<? extends Serializable>, OpParseContext>();
     this.destTableId = 1;
     this.uCtx = null;
+    
+    inputs = new LinkedHashSet<ReadEntity>();
+    outputs = new LinkedHashSet<WriteEntity>();
   }
   
 
@@ -2256,6 +2266,7 @@
         this.loadTableWork.add(new loadTableDesc(queryTmpdir, getTmpFileName(),
                                             table_desc,
                                             new HashMap<String, String>()));
+        outputs.add(new WriteEntity(dest_tab));
         break;
       }
     case QBMetaData.DEST_PARTITION:
@@ -2269,6 +2280,7 @@
         this.destTableId ++;
         
         this.loadTableWork.add(new loadTableDesc(queryTmpdir, getTmpFileName(), table_desc, dest_part.getSpec()));
+        outputs.add(new WriteEntity(dest_part));
         break;
       }
     case QBMetaData.DEST_LOCAL_FILE:
@@ -2295,10 +2307,12 @@
           currentTableId = this.destTableId;
           this.destTableId ++;
         }
+        boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
         this.loadFileWork.add(new loadFileDesc(queryTmpdir, dest_path,
-                                          (dest_type.intValue() == QBMetaData.DEST_DFS_FILE), cols));
+                                          isDfsDir, cols));
         table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode),
             cols);
+        outputs.add(new WriteEntity(dest_path, !isDfsDir));
         break;
     }
     default:
@@ -3460,6 +3474,7 @@
       if (!tab.isPartitioned()) {
         if (qbParseInfo.getDestToWhereExpr().isEmpty())
           fetch = new fetchWork(tab.getPath(), Utilities.getTableDesc(tab), qb.getParseInfo().getOuterQueryLimit()); 
+        inputs.add(new ReadEntity(tab));
       }
       else {
         if (aliasToPruner.size() == 1) {
@@ -3480,6 +3495,7 @@
                   Partition part = iterParts.next();
                   listP.add(part.getPartitionPath());
                   partP.add(Utilities.getPartitionDesc(part));
+                  inputs.add(new ReadEntity(part));
                 }
                 fetch = new fetchWork(listP, partP, qb.getParseInfo().getOuterQueryLimit());
               }
@@ -3528,7 +3544,8 @@
         new HashMap<Operator<? extends Serializable>, Task<? extends Serializable>>(),
         new ArrayList<Operator<? extends Serializable>>(),
         getParseContext(), mvTask, this.rootTasks, this.scratchDir, this.randomid, this.pathid,
-        new HashMap<Operator<? extends Serializable>, GenMapRedCtx>());
+        new HashMap<Operator<? extends Serializable>, GenMapRedCtx>(),
+        inputs, outputs);
 
     // create a walker which walks the tree in a DFS manner while maintaining the operator stack. 
     // The dispatcher generates the plan from the operator tree
@@ -3799,4 +3816,12 @@
     return newParameters;
   }
   
+  @Override
+  public Set<ReadEntity> getInputs() {
+    return inputs;
+  }
+  
+  public Set<WriteEntity> getOutputs() {
+    return outputs;
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue May  5 22:17:36 2009
@@ -284,7 +284,7 @@
   /**
    * initialize or retrieve console object for SessionState
    */
-  private static LogHelper getConsole() {
+  public static LogHelper getConsole() {
     if(_console == null) {
       Log LOG = LogFactory.getLog("SessionState");
       _console = new LogHelper(LOG);

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,5 @@
+query: drop table altern1
+query: create table altern1(a int, b int) partitioned by (ds string)
+query: alter table altern1 replace columns(a int, b int, ds string)
 Invalid table columns : Partition column name ds conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
 FAILED: Error in semantic analysis: Sampling Expression Needed for Non-Bucketed Table srcpart

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
 FAILED: Error in semantic analysis: line 8:14 Cannot have both Cluster By and Distribute By Clauses tkey

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
 FAILED: Error in semantic analysis: line 8:8 Cannot have both Cluster By and Sort By Clauses one

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: DROP TABLE table_test_output_fomat
 FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,3 @@
+query: describe src_thrift.$elem$
 FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $elem$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,3 @@
+query: describe src_thrift.$key$
 FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $key$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,3 @@
+query: describe src_thrift.lint.abc
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,3 @@
+query: describe src_thrift.mStringString.abc
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out Tue May  5 22:17:36 2009
@@ -1,3 +1,6 @@
+query: drop table external1
+query: create external table external1(a int, b int) location 'invalidscheme://data.s3ndemo.hive/kv'
 FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme)
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+query: describe external1
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out Tue May  5 22:17:36 2009
@@ -1,3 +1,7 @@
+query: drop table external2
+query: create external table external2(a int, b int) partitioned by (ds string)
+query: alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01'
 FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme)
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+query: describe external2 partition (ds='2008-01-01')
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out Tue May  5 22:17:36 2009
@@ -1 +1,8 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+  INPUTFORMAT 'java.lang.Void'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
+Input: default/src
+Output: default/dest1
 FAILED: Error in semantic analysis: line 3:20 Input Format must implement InputFormat dest1

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,8 @@
+query: DROP TABLE inv_valid_tbl1
+query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' 
+    PARTITIONED BY(aint DATETIME, country STRING) 
+    CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
+    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+    STORED AS SEQUENCEFILE
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition column name aint conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out Tue May  5 22:17:36 2009
@@ -1,2 +1,5 @@
+query: DROP TABLE T1
+query: CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
 Failed with exception Cannot load text files into a table stored as SequenceFile.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
 FAILED: Error in semantic analysis: line 4:44 Expression Not In Group By Key src

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out Tue May  5 22:17:36 2009
@@ -1 +1,2 @@
+query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
 FAILED: Error in semantic analysis: line 8:8 Cannot have both Order By and Sort By Clauses one

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Tue May  5 22:17:36 2009
@@ -1,3 +1,6 @@
+query: EXPLAIN
+SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+FROM src
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '../data/scripts/error_script' (TOK_ALIASLIST tkey tvalue))))))
 
@@ -33,4 +36,8 @@
       limit: -1
 
 
+query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+FROM src
+Input: default/src
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/35582581/85175665.10000
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out Tue May  5 22:17:36 2009
@@ -1 +1,5 @@
+query: drop table t1
+query: drop table t2
+query: create table if not exists t1(r string, c string, v string)
+query: create table if not exists t2(s string, c string, v string)
 FAILED: Error in semantic analysis: Schema of both sides of union should match: Column v is of type string on first table and type double on second table

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out Tue May  5 22:17:36 2009
@@ -1,31 +1,49 @@
+query: drop table alter1
+query: create table alter1(a int, b int)
+query: describe extended alter1
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
+query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
+query: describe extended alter1
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=rmurthy,c=3,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=athusoo,c=3,last_modified_time=1241277624,a=1})	
+query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
+query: describe extended alter1
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
+query: alter table alter1 set serdeproperties('s1'='9')
+query: describe extended alter1
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
+query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
+query: describe extended alter1
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
+query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
+query: describe extended alter1
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
+query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+query: describe extended alter1
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:rmurthy,createTime:1238029930,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=rmurthy,c=4,last_modified_time=1238029930,a=1})	
+Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
+query: alter table alter1 replace columns (a int, b int, c string)
+query: describe alter1
 a	int	
 b	int	
 c	string	
+query: drop table alter1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out Tue May  5 22:17:36 2009
@@ -1,36 +1,57 @@
+query: drop table alter2
+query: create table alter2(a int, b int) partitioned by (insertdate string)
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+query: show partitions alter2
+query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+query: show partitions alter2
 insertdate=2008-01-01
+query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
+query: show partitions alter2
 insertdate=2008-01-01
 insertdate=2008-01-02
+query: drop table alter2
+query: create external table alter2(a int, b int) partitioned by (insertdate string)
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+query: show partitions alter2
+query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+query: show partitions alter2
 insertdate=2008-01-01
+query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+query: describe extended alter2
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:rmurthy,createTime:1238029932,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/mnt/vol/devrs005.snc1/rmurthy/hive/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
+query: show partitions alter2
 insertdate=2008-01-01
 insertdate=2008-01-02
+query: drop table alter2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
Files hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out (original) and hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out Tue May  5 22:17:36 2009 differ

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue May  5 22:17:36 2009
@@ -1,3 +1,7 @@
+query: CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_Thrift) LINT) 1)) (TOK_SELEXPR (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (. (TOK_TABLE_OR_COL src_thrift) liNT) 0) 0))))
 
@@ -46,6 +50,13 @@
                 name: dest1
 
 
+query: FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
+Input: default/src_thrift
+Output: default/dest1
+query: SELECT DEST1.* FROM Dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/256252840/400813971.10000
 2	1
 4	8
 6	27

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=771990&r1=771989&r2=771990&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Tue May  5 22:17:36 2009
@@ -1,3 +1,6 @@
+query: CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE
+query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ (+ 3 (TOK_FUNCTION TOK_INT 2.0)) (TOK_FUNCTION TOK_INT (TOK_FUNCTION TOK_SMALLINT 0)))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
 
@@ -54,4 +57,10 @@
                 name: dest1
 
 
+query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+Input: default/src
+Output: default/dest1
+query: select dest1.* FROM dest1
+Input: default/dest1
+Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/351550201/345617503.10000
 5	5.0	5.0	5.0	5	true	1



Mime
View raw message