hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rmur...@apache.org
Subject svn commit: r819792 [4/24] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apac...
Date Tue, 29 Sep 2009 01:25:30 GMT
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 10)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
 
@@ -57,12 +61,21 @@
       limit: -1
 
 
-query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1039157739/10000
+PREHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/10918634/10000
+POSTHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/10918634/10000
 10	val_10
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
+SELECT * FROM SRC x  where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT * FROM SRC x  where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
 
@@ -120,12 +133,21 @@
       limit: -1
 
 
-query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/936932129/10000
+PREHOOK: query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/494894638/10000
+POSTHOOK: query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/494894638/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
 
@@ -183,12 +205,21 @@
       limit: -1
 
 
-query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1777006866/10000
+PREHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/726529435/10000
+POSTHOOK: query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/726529435/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
+SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
 
@@ -246,12 +277,21 @@
       limit: -1
 
 
-query: SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/332012199/10000
+PREHOOK: query: SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/424130746/10000
+POSTHOOK: query: SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/424130746/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
 
@@ -309,12 +349,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1467820372/10000
+PREHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/252561840/10000
+POSTHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/252561840/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
 
@@ -372,12 +421,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1936271284/10000
+PREHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/175842831/10000
+POSTHOOK: query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/175842831/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
+SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
 
@@ -435,12 +493,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/554101432/10000
+PREHOOK: query: SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2554842/10000
+POSTHOOK: query: SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2554842/10000
 20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key)))) y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL y) key) 20))))
 
@@ -501,12 +568,21 @@
       limit: -1
 
 
-query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1032907487/10000
+PREHOOK: query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/156651006/10000
+POSTHOOK: query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/156651006/10000
 20	val_20
-query: EXPLAIN 
+PREHOOK: query: EXPLAIN 
+SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key)  where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN 
 SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key)  where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
 
@@ -586,7 +662,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/2042815322/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/213322227/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col1
@@ -617,12 +693,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/251237187/10000
+PREHOOK: query: SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/458771054/10000
+POSTHOOK: query: SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/458771054/10000
 20	val_20	20
-query: EXPLAIN 
+PREHOOK: query: EXPLAIN 
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN 
 SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1))))
 
@@ -706,7 +791,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1924216417/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1040346447/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col1
@@ -739,12 +824,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/328575061/10000
+PREHOOK: query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1866708520/10000
+POSTHOOK: query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1866708520/10000
 20	val_20	20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key))))
 
@@ -828,7 +922,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/745749935/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1738200454/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -861,12 +955,21 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/66741381/10000
+PREHOOK: query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/550654126/10000
+POSTHOOK: query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/550654126/10000
 20	val_20	20	val_20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
+SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key) yk)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key))))
 
@@ -946,7 +1049,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1745977063/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1291639540/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -977,11 +1080,16 @@
       limit: -1
 
 
-query: SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/26281356/10000
+PREHOOK: query: SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1737050750/10000
+POSTHOOK: query: SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1737050750/10000
 20	val_20	20
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT unioninput.*
 FROM (
   FROM src select src.key, src.value WHERE src.key < 100
@@ -989,6 +1097,16 @@
   FROM src SELECT src.* WHERE src.key > 100
 ) unioninput
 CLUSTER BY unioninput.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT unioninput.*
+FROM (
+  FROM src select src.key, src.value WHERE src.key < 100
+  UNION ALL
+  FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioninput))) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL unioninput) key))))
 
@@ -1094,15 +1212,26 @@
       limit: -1
 
 
-query: SELECT unioninput.*
+PREHOOK: query: SELECT unioninput.*
+FROM (
+  FROM src select src.key, src.value WHERE src.key < 100
+  UNION ALL
+  FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1695374031/10000
+POSTHOOK: query: SELECT unioninput.*
 FROM (
   FROM src select src.key, src.value WHERE src.key < 100
   UNION ALL
   FROM src SELECT src.* WHERE src.key > 100
 ) unioninput
 CLUSTER BY unioninput.key
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/658971317/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1695374031/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,20 @@
-query: drop table columnarserde_create_shortcut
-query: CREATE TABLE columnarserde_create_shortcut(a array<int>, b array<string>, c map<string,string>, d int, e string) STORED AS RCFILE
-query: EXPLAIN
+PREHOOK: query: drop table columnarserde_create_shortcut
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table columnarserde_create_shortcut
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE columnarserde_create_shortcut(a array<int>, b array<string>, c map<string,string>, d int, e string) STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE columnarserde_create_shortcut(a array<int>, b array<string>, c map<string,string>, d int, e string) STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@columnarserde_create_shortcut
+PREHOOK: query: EXPLAIN
 FROM src_thrift
 INSERT OVERWRITE TABLE columnarserde_create_shortcut SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE columnarserde_create_shortcut SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB columnarserde_create_shortcut)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring))) (TOK_DISTRIBUTEBY 1)))
 
@@ -69,13 +81,24 @@
               name: columnarserde_create_shortcut
 
 
-query: FROM src_thrift
+PREHOOK: query: FROM src_thrift
 INSERT OVERWRITE TABLE columnarserde_create_shortcut SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
-Input: default/src_thrift
-Output: default/columnarserde_create_shortcut
-query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1
-Input: default/columnarserde_create_shortcut
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1249909626/10000
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@columnarserde_create_shortcut
+POSTHOOK: query: FROM src_thrift
+INSERT OVERWRITE TABLE columnarserde_create_shortcut SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@columnarserde_create_shortcut
+PREHOOK: query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columnarserde_create_shortcut
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1010466676/10000
+POSTHOOK: query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columnarserde_create_shortcut
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1010466676/10000
 [0,0,0]	["0","0","0"]	{"key_0":"value_0"}	1712634731	record_0
 [1,2,3]	["10","100","1000"]	{"key_1":"value_1"}	465985200	record_1
 [2,4,6]	["20","200","2000"]	{"key_2":"value_2"}	-751827638	record_2
@@ -87,9 +110,14 @@
 [8,16,24]	["80","800","8000"]	{"key_8":"value_8"}	1638581578	record_8
 [9,18,27]	["90","900","9000"]	{"key_9":"value_9"}	336964413	record_9
 null	null	{}	0	NULL
-query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1
-Input: default/columnarserde_create_shortcut
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467882709/10000
+PREHOOK: query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columnarserde_create_shortcut
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/236647428/10000
+POSTHOOK: query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columnarserde_create_shortcut
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/236647428/10000
 0	0	NULL	1712634731	record_0
 1	10	NULL	465985200	record_1
 2	20	NULL	-751827638	record_2
@@ -101,19 +129,44 @@
 8	80	NULL	1638581578	record_8
 9	90	NULL	336964413	record_9
 NULL	NULL	NULL	0	NULL
-query: drop table columnarserde_create_shortcut
-query: DROP TABLE columnShortcutTable
-query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE
-query: FROM src
+PREHOOK: query: drop table columnarserde_create_shortcut
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table columnarserde_create_shortcut
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@columnarserde_create_shortcut
+PREHOOK: query: DROP TABLE columnShortcutTable
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE columnShortcutTable
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@columnShortcutTable
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE columnShortcutTable SELECT src.key, src.value LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@columnshortcuttable
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE columnShortcutTable SELECT src.key, src.value LIMIT 10
-Input: default/src
-Output: default/columnshortcuttable
-query: describe columnShortcutTable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@columnshortcuttable
+PREHOOK: query: describe columnShortcutTable
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe columnShortcutTable
+POSTHOOK: type: DESCTABLE
 key	string	from deserializer
 value	string	from deserializer
-query: SELECT columnShortcutTable.* FROM columnShortcutTable
-Input: default/columnshortcuttable
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2017736818/10000
+PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columnshortcuttable
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/531162572/10000
+POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columnshortcuttable
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/531162572/10000
 238	val_238
 86	val_86
 311	val_311
@@ -124,10 +177,20 @@
 278	val_278
 98	val_98
 484	val_484
-query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string)
-query: SELECT columnShortcutTable.* FROM columnShortcutTable
-Input: default/columnshortcuttable
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/984890513/10000
+PREHOOK: query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@columnshortcuttable
+POSTHOOK: Output: default@columnshortcuttable
+PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columnshortcuttable
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1510560377/10000
+POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columnshortcuttable
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1510560377/10000
 238	val_238	NULL
 86	val_86	NULL
 311	val_311	NULL
@@ -138,10 +201,20 @@
 278	val_278	NULL
 98	val_98	NULL
 484	val_484	NULL
-query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int)
-query: SELECT columnShortcutTable.* FROM columnShortcutTable
-Input: default/columnshortcuttable
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1805361378/10000
+PREHOOK: query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@columnshortcuttable
+POSTHOOK: Output: default@columnshortcuttable
+PREHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columnshortcuttable
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1833240424/10000
+POSTHOOK: query: SELECT columnShortcutTable.* FROM columnShortcutTable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columnshortcuttable
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1833240424/10000
 238
 86
 311
@@ -152,4 +225,8 @@
 278
 98
 484
-query: DROP TABLE columnShortcutTable
+PREHOOK: query: DROP TABLE columnShortcutTable
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE columnShortcutTable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@columnshortcuttable

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out Tue Sep 29 01:25:15 2009
@@ -1,36 +1,93 @@
-query: DROP TABLE table1
-query: DROP TABLE table2
-query: DROP TABLE table3
-query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
-query: DESCRIBE table1
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DESCRIBE table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
-query: DESCRIBE EXTENDED table1
+PREHOOK: query: DESCRIBE EXTENDED table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table1,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
-query: CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE
-query: CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE
-query: DESCRIBE table2
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:njain, createTime:1253779809, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})	
+PREHOOK: query: CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: DESCRIBE table2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table2
+POSTHOOK: type: DESCTABLE
 a	string	
 b	int	
-query: DESCRIBE EXTENDED table2
+PREHOOK: query: DESCRIBE EXTENDED table2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table2
+POSTHOOK: type: DESCTABLE
 a	string	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:table2,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
-query: CREATE TABLE table3 (a STRING, b STRING)
+Detailed Table Information	Table(tableName:table2, dbName:default, owner:njain, createTime:1253779809, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})	
+PREHOOK: query: CREATE TABLE table3 (a STRING, b STRING)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
 STORED AS TEXTFILE
-query: DESCRIBE table3
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table3 (a STRING, b STRING)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table3
+PREHOOK: query: DESCRIBE table3
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table3
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
-query: DESCRIBE EXTENDED table3
+PREHOOK: query: DESCRIBE EXTENDED table3
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table3
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table3,dbName:default,owner:athusoo,createTime:1241277708,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/table3,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=9,field.delim=	}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
-query: DROP TABLE table1
-query: DROP TABLE table2
-query: DROP TABLE table3
+Detailed Table Information	Table(tableName:table3, dbName:default, owner:njain, createTime:1253779810, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=9,field.delim=	}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table3

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out Tue Sep 29 01:25:15 2009
@@ -1,22 +1,57 @@
-query: DROP TABLE table1
-query: DROP TABLE table2
-query: DROP TABLE table3
-query: CREATE TABLE table1 (a STRING, b STRING)
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ESCAPED BY '\\'
 STORED AS TEXTFILE
-query: DESCRIBE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table1 (a STRING, b STRING)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ESCAPED BY '\\'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DESCRIBE table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
-query: DESCRIBE EXTENDED table1
+PREHOOK: query: DESCRIBE EXTENDED table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table1,dbName:default,owner:zshao,createTime:1245330503,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/tools/136-trunk-apache-hive/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=9,escape.delim=\,field.delim=	}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
-query: INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86
-Input: default/src
-Output: default/table1
-query: SELECT * FROM table1
-Input: default/table1
-Output: file:/data/users/zshao/tools/136-trunk-apache-hive/build/ql/tmp/822936101/10000
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:njain, createTime:1253779812, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=9,escape.delim=\,field.delim=	}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})
+PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table1
+POSTHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table1
+PREHOOK: query: SELECT * FROM table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1244934892/10000
+POSTHOOK: query: SELECT * FROM table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1244934892/10000
 86	\	\
-query: DROP TABLE table1
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: EXPLAIN
+CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+POSTHOOK: type: CREATEFUNCTION
 ABSTRACT SYNTAX TREE:
   (TOK_CREATEFUNCTION test_avg 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage')
 
@@ -10,12 +14,22 @@
   Stage: Stage-0
 
 
-query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
-query: EXPLAIN
+PREHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+POSTHOOK: type: CREATEFUNCTION
+PREHOOK: query: EXPLAIN
+SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 SELECT
     test_avg(1),
     test_avg(substr(value,5))
 FROM src
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION test_avg 1)) (TOK_SELEXPR (TOK_FUNCTION test_avg (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5))))))
 
@@ -75,11 +89,22 @@
       limit: -1
 
 
-query: SELECT
+PREHOOK: query: SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1951411616/10000
+POSTHOOK: query: SELECT
     test_avg(1),
     test_avg(substr(value,5))
 FROM src
-Input: default/src
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1700649603/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1951411616/10000
 1.0	260.182
-query: DROP TEMPORARY FUNCTIOn test_avg
+PREHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
+PREHOOK: type: DROPFUNCTION
+POSTHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
+POSTHOOK: type: DROPFUNCTION

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudf.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: EXPLAIN
+CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
+POSTHOOK: type: CREATEFUNCTION
 ABSTRACT SYNTAX TREE:
   (TOK_CREATEFUNCTION test_translate 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate')
 
@@ -10,9 +14,29 @@
   Stage: Stage-0
 
 
-query: CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
-query: CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, c7 STRING)
-query: FROM src 
+PREHOOK: query: CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
+POSTHOOK: type: CREATEFUNCTION
+PREHOOK: query: CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, c7 STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, c7 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: FROM src 
+INSERT OVERWRITE TABLE dest1 
+SELECT 
+    test_translate('abc', 'a', 'b'),
+    test_translate('abc', 'ab', 'bc'),
+    test_translate(NULL, 'a', 'b'),
+    test_translate('a', NULL, 'b'),
+    test_translate('a', 'a', NULL),
+    test_translate('abc', 'ab', 'b'),
+    test_translate('abc', 'a', 'ab')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src 
 INSERT OVERWRITE TABLE dest1 
 SELECT 
     test_translate('abc', 'a', 'b'),
@@ -22,10 +46,19 @@
     test_translate('a', 'a', NULL),
     test_translate('abc', 'ab', 'b'),
     test_translate('abc', 'a', 'ab')
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1 LIMIT 1
-Input: default/dest1
-Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1518422046/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/161564459/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/161564459/10000
 bbc	bcc	NULL	NULL	NULL	bc	abc
-query: DROP TEMPORARY FUNCTION test_translate
+PREHOOK: query: DROP TEMPORARY FUNCTION test_translate
+PREHOOK: type: DROPFUNCTION
+POSTHOOK: query: DROP TEMPORARY FUNCTION test_translate
+POSTHOOK: type: DROPFUNCTION

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out Tue Sep 29 01:25:15 2009
@@ -1,36 +1,102 @@
-query: DROP TABLE table_test_output_format
-query: CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
+PREHOOK: query: DROP TABLE table_test_output_format
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
-query: FROM src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table_test_output_format
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_test_output_format
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10
-Input: default/src
-Output: default/table_test_output_format
-query: describe table_test_output_format
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_test_output_format
+PREHOOK: query: describe table_test_output_format
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe table_test_output_format
+POSTHOOK: type: DESCTABLE
 key	int	
 value	string	
-query: DROP TABLE table_test_output_format
-query: DROP TABLE table_test_output_format_sequencefile
-query: CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
+PREHOOK: query: DROP TABLE table_test_output_format
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table_test_output_format
+PREHOOK: query: DROP TABLE table_test_output_format_sequencefile
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format_sequencefile
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat'
-query: FROM src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table_test_output_format_sequencefile
+PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10
-Input: default/src
-Output: default/table_test_output_format_sequencefile
-query: describe table_test_output_format_sequencefile
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_test_output_format_sequencefile
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_test_output_format_sequencefile
+PREHOOK: query: describe table_test_output_format_sequencefile
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe table_test_output_format_sequencefile
+POSTHOOK: type: DESCTABLE
 key	int	
 value	string	
-query: DROP TABLE table_test_output_format_sequencefile
-query: DROP TABLE table_test_output_format_hivesequencefile
-query: CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
+PREHOOK: query: DROP TABLE table_test_output_format_sequencefile
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format_sequencefile
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table_test_output_format_sequencefile
+PREHOOK: query: DROP TABLE table_test_output_format_hivesequencefile
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format_hivesequencefile
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-query: FROM src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table_test_output_format_hivesequencefile
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table_test_output_format_hivesequencefile
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10
-Input: default/src
-Output: default/table_test_output_format_hivesequencefile
-query: describe table_test_output_format_hivesequencefile
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table_test_output_format_hivesequencefile
+PREHOOK: query: describe table_test_output_format_hivesequencefile
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe table_test_output_format_hivesequencefile
+POSTHOOK: type: DESCTABLE
 key	int	
 value	string	
-query: DROP TABLE table_test_output_format_hivesequencefile
+PREHOOK: query: DROP TABLE table_test_output_format_hivesequencefile
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_format_hivesequencefile
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table_test_output_format_hivesequencefile

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out Tue Sep 29 01:25:15 2009
@@ -1,50 +1,129 @@
-query: DROP TABLE table1
-query: DROP TABLE table2
-query: DROP TABLE table3
-query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
-query: DESCRIBE table1
-a	string	
-b	string	
-query: DESCRIBE EXTENDED table1
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DESCRIBE table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table1
+POSTHOOK: type: DESCTABLE
+a	string	
+b	string	
+PREHOOK: query: DESCRIBE EXTENDED table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table1,dbName:default,owner:zshao,createTime:1243547901,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/tools/495-trunk-apache-hive/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
-query: CREATE TABLE table2 LIKE table1
-query: DESCRIBE table2
-a	string	
-b	string	
-query: DESCRIBE EXTENDED table2
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:njain, createTime:1253779841, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})	
+PREHOOK: query: CREATE TABLE table2 LIKE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table2 LIKE table1
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: DESCRIBE table2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table2
+POSTHOOK: type: DESCTABLE
+a	string	
+b	string	
+PREHOOK: query: DESCRIBE EXTENDED table2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table2
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table2,dbName:default,owner:zshao,createTime:1243547901,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/tools/495-trunk-apache-hive/build/ql/test/data/warehouse/table2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{EXTERNAL=FALSE})	
-query: CREATE TABLE IF NOT EXISTS table2 LIKE table1
-query: CREATE EXTERNAL TABLE IF NOT EXISTS table2 LIKE table1
-query: CREATE EXTERNAL TABLE IF NOT EXISTS table3 LIKE table1
-query: DESCRIBE table3
-a	string	
-b	string	
-query: DESCRIBE EXTENDED table3
+Detailed Table Information	Table(tableName:table2, dbName:default, owner:njain, createTime:1253779841, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE})	
+PREHOOK: query: CREATE TABLE IF NOT EXISTS table2 LIKE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS table2 LIKE table1
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS table2 LIKE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS table2 LIKE table1
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS table3 LIKE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS table3 LIKE table1
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table3
+PREHOOK: query: DESCRIBE table3
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table3
+POSTHOOK: type: DESCTABLE
+a	string	
+b	string	
+PREHOOK: query: DESCRIBE EXTENDED table3
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table3
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 	 	 
-Detailed Table Information	Table(tableName:table3,dbName:default,owner:zshao,createTime:1243547901,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/tools/495-trunk-apache-hive/build/ql/test/data/warehouse/table3,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{EXTERNAL=TRUE})	
-query: INSERT OVERWRITE TABLE table1 SELECT key, value FROM src WHERE key = 86
-Input: default/src
-Output: default/table1
-query: INSERT OVERWRITE TABLE table2 SELECT key, value FROM src WHERE key = 100
-Input: default/src
-Output: default/table2
-query: SELECT * FROM table1
-Input: default/table1
-Output: file:/data/users/zshao/tools/495-trunk-apache-hive/build/ql/tmp/2122743298/10000
+Detailed Table Information	Table(tableName:table3, dbName:default, owner:njain, createTime:1253779841, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE})	
+PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, value FROM src WHERE key = 86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table1
+POSTHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key, value FROM src WHERE key = 86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table1
+PREHOOK: query: INSERT OVERWRITE TABLE table2 SELECT key, value FROM src WHERE key = 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@table2
+POSTHOOK: query: INSERT OVERWRITE TABLE table2 SELECT key, value FROM src WHERE key = 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@table2
+PREHOOK: query: SELECT * FROM table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/20199794/10000
+POSTHOOK: query: SELECT * FROM table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/20199794/10000
 86	val_86
-query: SELECT * FROM table2
-Input: default/table2
-Output: file:/data/users/zshao/tools/495-trunk-apache-hive/build/ql/tmp/1988589189/10000
+PREHOOK: query: SELECT * FROM table2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table2
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1027337637/10000
+POSTHOOK: query: SELECT * FROM table2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table2
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1027337637/10000
 100	val_100
 100	val_100
-query: DROP TABLE table1
-query: DROP TABLE table2
-query: DROP TABLE table3
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DROP TABLE table2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table2
+PREHOOK: query: DROP TABLE table3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table3

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out Tue Sep 29 01:25:15 2009
@@ -1,28 +1,59 @@
-query: DROP TABLE table1
-query: CREATE TABLE table1 (
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table1 (
        a STRING,
        b ARRAY<STRING>,
        c ARRAY<MAP<STRING,STRING>>,
        d MAP<STRING,ARRAY<STRING>>
        ) STORED AS TEXTFILE
-query: DESCRIBE table1
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table1 (
+       a STRING,
+       b ARRAY<STRING>,
+       c ARRAY<MAP<STRING,STRING>>,
+       d MAP<STRING,ARRAY<STRING>>
+       ) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table1
+PREHOOK: query: DESCRIBE table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	array<string>	
 c	array<map<string,string>>	
 d	map<string,array<string>>	
-query: DESCRIBE EXTENDED table1
+PREHOOK: query: DESCRIBE EXTENDED table1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED table1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	array<string>	
 c	array<map<string,string>>	
 d	map<string,array<string>>	
 	 	 
-Detailed Table Information	Table(tableName:table1,dbName:default,owner:zshao,createTime:1246574836,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:array<string>,comment:null), FieldSchema(name:c,type:array<map<string,string>>,comment:null), FieldSchema(name:d,type:map<string,array<string>>,comment:null)],location:file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
-query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
-query: SELECT * from table1
-Input: default/table1
-Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1990601936/10000
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:njain, createTime:1253779851, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:array<string>, comment:null), FieldSchema(name:c, type:array<map<string,string>>, comment:null), FieldSchema(name:d, type:map<string,array<string>>, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})	
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@table1
+PREHOOK: query: SELECT * from table1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1594663066/10000
+POSTHOOK: query: SELECT * from table1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1594663066/10000
 a0	["b00","b01"]	[{"c001":"C001","c002":"C002"},{"c011":null,"c012":"C012"}]	{"d01":["d011","d012"],"d02":["d021","d022"]}
 a1	["b10"]	[{"c001":"C001","c002":"C002"}]	{"d01":["d011","d012"],"d02":null}
 a2	[]	[{"c001":null,"c002":"C002"},{"c011":"C011","c012":"C012"}]	{"d01":[null,"d012"],"d02":["d021","d022"]}
 a3	null	null	null
-query: DROP TABLE table1
+PREHOOK: query: DROP TABLE table1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@table1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_struct_table.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_struct_table.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_struct_table.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_struct_table.q.out Tue Sep 29 01:25:15 2009
@@ -1,13 +1,33 @@
-query: drop table abc
-query: create table abc(strct struct<a:int, b:string, c:string>)
+PREHOOK: query: drop table abc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table abc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table abc(strct struct<a:int, b:string, c:string>)
 row format delimited
   fields terminated by '\t'
   collection items terminated by '\001'
-query: load data local inpath '../data/files/kv1.txt'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table abc(strct struct<a:int, b:string, c:string>)
+row format delimited
+  fields terminated by '\t'
+  collection items terminated by '\001'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@abc
+PREHOOK: query: load data local inpath '../data/files/kv1.txt'
+overwrite into table abc
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt'
 overwrite into table abc
-query: SELECT strct, strct.a, strct.b FROM abc LIMIT 10
-Input: default/abc
-Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1624100365/10000
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@abc
+PREHOOK: query: SELECT strct, strct.a, strct.b FROM abc LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@abc
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/611539169/10000
+POSTHOOK: query: SELECT strct, strct.a, strct.b FROM abc LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@abc
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/611539169/10000
 {"a":238,"b":"val_238","c":null}	238	val_238
 {"a":86,"b":"val_86","c":null}	86	val_86
 {"a":311,"b":"val_311","c":null}	311	val_311
@@ -18,4 +38,8 @@
 {"a":278,"b":"val_278","c":null}	278	val_278
 {"a":98,"b":"val_98","c":null}	98	val_98
 {"a":484,"b":"val_484","c":null}	484	val_484
-query: drop table abc
+PREHOOK: query: drop table abc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table abc
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@abc

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_udaf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_udaf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_udaf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_udaf.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: EXPLAIN
+CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
+POSTHOOK: type: CREATEFUNCTION
 ABSTRACT SYNTAX TREE:
   (TOK_CREATEFUNCTION test_max 'org.apache.hadoop.hive.ql.udf.UDAFTestMax')
 
@@ -10,13 +14,33 @@
   Stage: Stage-0
 
 
-query: CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
-query: CREATE TABLE dest1(col INT)
-query: FROM src INSERT OVERWRITE TABLE dest1 SELECT test_max(length(src.value))
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/208230949/10000
+PREHOOK: query: CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
+POSTHOOK: type: CREATEFUNCTION
+PREHOOK: query: CREATE TABLE dest1(col INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(col INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT test_max(length(src.value))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT test_max(length(src.value))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1967995374/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1967995374/10000
 7
-query: DROP TEMPORARY FUNCTION test_max
+PREHOOK: query: DROP TEMPORARY FUNCTION test_max
+PREHOOK: type: DROPFUNCTION
+POSTHOOK: query: DROP TEMPORARY FUNCTION test_max
+POSTHOOK: type: DROPFUNCTION



Mime
View raw message