hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dhr...@apache.org
Subject svn commit: r712905 [8/38] - in /hadoop/core/trunk: ./ src/contrib/hive/ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/ src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/ src/contrib/hive/conf/ src/contrib/hive/data/files/ src/con...
Date Tue, 11 Nov 2008 01:50:18 GMT
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join4.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join4.q Mon Nov 10 17:50:06 2008
@@ -10,5 +10,5 @@
  ON (a.c1 = b.c3)
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
-INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+SELECT c.c1, c.c2, c.c3, c.c4
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join5.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join5.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join5.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join5.q Mon Nov 10 17:50:06 2008
@@ -10,6 +10,6 @@
  ON (a.c1 = b.c3)
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
-INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+SELECT c.c1, c.c2, c.c3, c.c4
 
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join6.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join6.q Mon Nov 10 17:50:06 2008
@@ -10,7 +10,7 @@
  ON (a.c1 = b.c3)
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
-INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+SELECT c.c1, c.c2, c.c3, c.c4
 
 
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join7.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join7.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join7.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join7.q Mon Nov 10 17:50:06 2008
@@ -15,7 +15,7 @@
  ON (a.c1 = c.c5)
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
 ) c
-INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
+SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
 
 
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join8.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join8.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join8.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/join8.q Mon Nov 10 17:50:06 2008
@@ -10,5 +10,5 @@
  ON (a.c1 = b.c3)
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
-INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
+SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/sample1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/sample1.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/sample1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/sample1.q Mon Nov 10 17:50:06 2008
@@ -1,5 +1,5 @@
 -- no input pruning, no sample filter
-INSERT OVERWRITE TABLE dest1 SELECT s.*
-FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s
+SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON rand()) s
 WHERE s.ds='2008-04-08' and s.hr='11'
 

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf1.q?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf1.q Mon Nov 10 17:50:06 2008
@@ -1,4 +1,4 @@
-FROM src INSERT OVERWRITE TABLE dest1 SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_',
+FROM src SELECT 'a' LIKE '%a%', 'b' LIKE '%a%', 'ab' LIKE '%a%', 'ab' LIKE '%a_',
   '%_' LIKE '\%\_', 'ab' LIKE '\%\_', 'ab' LIKE '_a%', 'ab' LIKE 'a',
   '' RLIKE '.*', 'a' RLIKE '[ab]', '' RLIKE '[ab]', 'hadoop' RLIKE '[a-z]*', 'hadoop' RLIKE 'o*',
   REGEXP_REPLACE('abc', 'b', 'c'), REGEXP_REPLACE('abc', 'z', 'a'), REGEXP_REPLACE('abbbb', 'bb', 'b'), REGEXP_REPLACE('hadoop', '(.)[a-z]*', '$1ive')

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/bad_sample_clause.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Sampling Expression Needed for Non-Bucketed Table srcpart

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input1.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:7 Invalid Table Alias a

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input2.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:7 Invalid Table Alias a

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/input_testxpath4.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 3:42 Invalid Table Alias lintstring

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,2 @@
+FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition collumn name aint conflicts with table columns.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/invalid_tbl_name.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,2 @@
+FAILED: Parse Error: line 1:20 mismatched input '-' expecting EOF
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/joinneg.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 5:12 Invalid Table Alias b

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,2 @@
+Failed with exception Cannot load text files into a table stored as SequenceFile.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias3.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:44 Expression Not In Group By Key key

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/notable_alias4.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/alter1.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,28 @@
+a	int
+b	int
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+a	int
+b	int
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,c=3,last_modified_time=1225994182,a=1})
+a	int
+b	int
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+a	int
+b	int
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+a	int
+b	int
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+a	string	'from deserializer'
+b	string	'from deserializer'
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+a	string	'from deserializer'
+b	string	'from deserializer'
+Detailed Table Information:
+Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out Mon Nov 10 17:50:06 2008
@@ -10,22 +10,28 @@
     Map Reduce
       Alias -> Map Operator Tree:
         src_thrift 
-            Filter Operator
-              predicate:
-                  expr: (lint[0] > 0)
-                  type: Boolean
-              Select Operator
-                expressions:
-                      expr: lint[1]
-                      type: int
-                      expr: lintstring[0].MYSTRING
-                      type: string
-                File Output Operator
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
-                      name: dest1
+            Select Operator
+              expressions:
+                    expr: lint
+                    type: array<int>
+                    expr: lintstring
+                    type: array<struct{myint:int,mystring:string}>
+              Filter Operator
+                predicate:
+                    expr: (0[0] > 0)
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: 0[1]
+                        type: int
+                        expr: 1[0].MYSTRING
+                        type: string
+                  File Output Operator
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                        name: dest1
 
   Stage: Stage-0
     Move Operator

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out Mon Nov 10 17:50:06 2008
@@ -10,32 +10,36 @@
     Map Reduce
       Alias -> Map Operator Tree:
         src 
-            Filter Operator
-              predicate:
-                  expr: (key = 86)
-                  type: Boolean
-              Select Operator
-                expressions:
-                      expr: (3 + 2)
-                      type: int
-                      expr: (3.0 + UDFToDouble(2))
-                      type: double
-                      expr: (UDFToDouble(3) + 2.0)
-                      type: double
-                      expr: (3.0 + 2.0)
-                      type: double
-                      expr: (3 + UDFToInteger(2.0))
-                      type: int
-                      expr: UDFToBoolean(1)
-                      type: Boolean
-                      expr: UDFToInteger(true)
-                      type: int
-                File Output Operator
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
-                      name: dest1
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              Filter Operator
+                predicate:
+                    expr: (0 = 86)
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: (3 + 2)
+                        type: int
+                        expr: (3.0 + UDFToDouble(2))
+                        type: double
+                        expr: (UDFToDouble(3) + 2.0)
+                        type: double
+                        expr: (3.0 + 2.0)
+                        type: double
+                        expr: (3 + UDFToInteger(2.0))
+                        type: int
+                        expr: UDFToBoolean(1)
+                        type: boolean
+                        expr: UDFToInteger(true)
+                        type: int
+                  File Output Operator
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                        name: dest1
 
   Stage: Stage-0
     Move Operator

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out Mon Nov 10 17:50:06 2008
@@ -15,7 +15,9 @@
               key expressions:
                     expr: key
                     type: string
-              # partition fields: -1
+              Map-reduce partition columns:
+                    expr: rand()
+                    type: double
               tag: -1
               value expressions:
                     expr: substr(value, 4)
@@ -23,29 +25,32 @@
       Reduce Operator Tree:
         Group By Operator
         
-              expr: sum(VALUE.0)
+              expr: sum(UDFToDouble(VALUE.0))
           keys:
                 expr: KEY.0
                 type: string
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/748679827/1407352694.10001 
+        /tmp/hive-njain/250776234/130709293.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
             value expressions:
                   expr: 1
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -53,13 +58,13 @@
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
+                  type: double
             File Output Operator
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out Mon Nov 10 17:50:06 2008
@@ -15,7 +15,9 @@
               key expressions:
                     expr: key
                     type: string
-              # partition fields: -1
+              Map-reduce partition columns:
+                    expr: rand()
+                    type: double
               tag: -1
               value expressions:
                     expr: substr(value, 4)
@@ -23,29 +25,32 @@
       Reduce Operator Tree:
         Group By Operator
         
-              expr: sum(VALUE.0)
+              expr: sum(UDFToDouble(VALUE.0))
           keys:
                 expr: KEY.0
                 type: string
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/7427260/341902671.10001 
+        /tmp/hive-njain/1162348581/1600132674.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
             value expressions:
                   expr: 1
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -53,13 +58,13 @@
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
+                  type: double
             Limit
               File Output Operator
                 table:

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_map.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,401 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Group By Operator
+            
+                  expr: sum(UDFToDouble(substr(value, 4)))
+              keys:
+                    expr: key
+                    type: string
+              mode: hash
+              Reduce Output Operator
+                key expressions:
+                      expr: 0
+                      type: string
+                Map-reduce partition columns:
+                      expr: rand()
+                      type: double
+                tag: -1
+                value expressions:
+                      expr: 1
+                      type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial2
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/126291708/5299613.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: unknown
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: double
+            File Output Operator
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0
+116	116.0
+118	236.0
+119	357.0
+12	24.0
+120	240.0
+125	250.0
+126	126.0
+128	384.0
+129	258.0
+131	131.0
+133	133.0
+134	268.0
+136	136.0
+137	274.0
+138	552.0
+143	143.0
+145	145.0
+146	292.0
+149	298.0
+15	30.0
+150	150.0
+152	304.0
+153	153.0
+155	155.0
+156	156.0
+157	157.0
+158	158.0
+160	160.0
+162	162.0
+163	163.0
+164	328.0
+165	330.0
+166	166.0
+167	501.0
+168	168.0
+169	676.0
+17	17.0
+170	170.0
+172	344.0
+174	348.0
+175	350.0
+176	352.0
+177	177.0
+178	178.0
+179	358.0
+18	36.0
+180	180.0
+181	181.0
+183	183.0
+186	186.0
+187	561.0
+189	189.0
+19	19.0
+190	190.0
+191	382.0
+192	192.0
+193	579.0
+194	194.0
+195	390.0
+196	196.0
+197	394.0
+199	597.0
+2	2.0
+20	20.0
+200	400.0
+201	201.0
+202	202.0
+203	406.0
+205	410.0
+207	414.0
+208	624.0
+209	418.0
+213	426.0
+214	214.0
+216	432.0
+217	434.0
+218	218.0
+219	438.0
+221	442.0
+222	222.0
+223	446.0
+224	448.0
+226	226.0
+228	228.0
+229	458.0
+230	1150.0
+233	466.0
+235	235.0
+237	474.0
+238	476.0
+239	478.0
+24	48.0
+241	241.0
+242	484.0
+244	244.0
+247	247.0
+248	248.0
+249	249.0
+252	252.0
+255	510.0
+256	512.0
+257	257.0
+258	258.0
+26	52.0
+260	260.0
+262	262.0
+263	263.0
+265	530.0
+266	266.0
+27	27.0
+272	544.0
+273	819.0
+274	274.0
+275	275.0
+277	1108.0
+278	556.0
+28	28.0
+280	560.0
+281	562.0
+282	564.0
+283	283.0
+284	284.0
+285	285.0
+286	286.0
+287	287.0
+288	576.0
+289	289.0
+291	291.0
+292	292.0
+296	296.0
+298	894.0
+30	30.0
+302	302.0
+305	305.0
+306	306.0
+307	614.0
+308	308.0
+309	618.0
+310	310.0
+311	933.0
+315	315.0
+316	948.0
+317	634.0
+318	954.0
+321	642.0
+322	644.0
+323	323.0
+325	650.0
+327	981.0
+33	33.0
+331	662.0
+332	332.0
+333	666.0
+335	335.0
+336	336.0
+338	338.0
+339	339.0
+34	34.0
+341	341.0
+342	684.0
+344	688.0
+345	345.0
+348	1740.0
+35	105.0
+351	351.0
+353	706.0
+356	356.0
+360	360.0
+362	362.0
+364	364.0
+365	365.0
+366	366.0
+367	734.0
+368	368.0
+369	1107.0
+37	74.0
+373	373.0
+374	374.0
+375	375.0
+377	377.0
+378	378.0
+379	379.0
+382	764.0
+384	1152.0
+386	386.0
+389	389.0
+392	392.0
+393	393.0
+394	394.0
+395	790.0
+396	1188.0
+397	794.0
+399	798.0
+4	4.0
+400	400.0
+401	2005.0
+402	402.0
+403	1209.0
+404	808.0
+406	1624.0
+407	407.0
+409	1227.0
+41	41.0
+411	411.0
+413	826.0
+414	828.0
+417	1251.0
+418	418.0
+419	419.0
+42	84.0
+421	421.0
+424	848.0
+427	427.0
+429	858.0
+43	43.0
+430	1290.0
+431	1293.0
+432	432.0
+435	435.0
+436	436.0
+437	437.0
+438	1314.0
+439	878.0
+44	44.0
+443	443.0
+444	444.0
+446	446.0
+448	448.0
+449	449.0
+452	452.0
+453	453.0
+454	1362.0
+455	455.0
+457	457.0
+458	916.0
+459	918.0
+460	460.0
+462	924.0
+463	926.0
+466	1398.0
+467	467.0
+468	1872.0
+469	2345.0
+47	47.0
+470	470.0
+472	472.0
+475	475.0
+477	477.0
+478	956.0
+479	479.0
+480	1440.0
+481	481.0
+482	482.0
+483	483.0
+484	484.0
+485	485.0
+487	487.0
+489	1956.0
+490	490.0
+491	491.0
+492	984.0
+493	493.0
+494	494.0
+495	495.0
+496	496.0
+497	497.0
+498	1494.0
+5	15.0
+51	102.0
+53	53.0
+54	54.0
+57	57.0
+58	116.0
+64	64.0
+65	65.0
+66	66.0
+67	134.0
+69	69.0
+70	210.0
+72	144.0
+74	74.0
+76	152.0
+77	77.0
+78	78.0
+8	8.0
+80	80.0
+82	82.0
+83	166.0
+84	168.0
+85	85.0
+86	86.0
+87	87.0
+9	9.0
+90	270.0
+92	92.0
+95	190.0
+96	96.0
+97	194.0
+98	196.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out Mon Nov 10 17:50:06 2008
@@ -17,37 +17,44 @@
                     type: string
                     expr: substr(value, 4)
                     type: string
-              # partition fields: 2147483647
+              Map-reduce partition columns:
+                    expr: substr(key, 0, 1)
+                    type: string
+                    expr: substr(value, 4)
+                    type: string
               tag: -1
       Reduce Operator Tree:
         Group By Operator
         
               expr: count(DISTINCT KEY.1)
-              expr: sum(KEY.1)
+              expr: sum(UDFToDouble(KEY.1))
           keys:
                 expr: KEY.0
                 type: string
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/307368091/808162418.10001 
+        /tmp/hive-njain/282197599/271187968.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
             value expressions:
                   expr: 1
-                  type: string
+                  type: bigint
                   expr: 2
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -56,14 +63,14 @@
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
-                  expr: concat(0, 2)
+                  type: bigint
+                  expr: concat(0, UDFToString(2))
                   type: string
             File Output Operator
               table:

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out Mon Nov 10 17:50:06 2008
@@ -15,7 +15,9 @@
               key expressions:
                     expr: key
                     type: string
-              # partition fields: -1
+              Map-reduce partition columns:
+                    expr: rand()
+                    type: double
               tag: -1
               value expressions:
                     expr: substr(value, 4)
@@ -23,29 +25,32 @@
       Reduce Operator Tree:
         Group By Operator
         
-              expr: sum(VALUE.0)
+              expr: sum(UDFToDouble(VALUE.0))
           keys:
                 expr: KEY.0
                 type: string
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/606295988/175965730.10002 
+        /tmp/hive-njain/4007501/112626006.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
             value expressions:
                   expr: 1
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -53,13 +58,13 @@
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
+                  type: double
             Limit
               File Output Operator
                 table:

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_map.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,117 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_COLREF src key) 0 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (TOK_COLREF src key) 0 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_COLREF src key) 0 1))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Group By Operator
+            
+                  expr: count(DISTINCT substr(value, 4))
+                  expr: sum(UDFToDouble(substr(value, 4)))
+              keys:
+                    expr: substr(key, 0, 1)
+                    type: string
+                    expr: substr(value, 4)
+                    type: string
+              mode: hash
+              Reduce Output Operator
+                key expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                Map-reduce partition columns:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                tag: -1
+                value expressions:
+                      expr: 2
+                      type: bigint
+                      expr: 3
+                      type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: count(DISTINCT KEY.1)
+              expr: sum(VALUE.1)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial2
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/781819455/901894899.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: bigint
+                  expr: 2
+                  type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: count(VALUE.0)
+              expr: sum(VALUE.1)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: unknown
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: bigint
+                  expr: concat(0, UDFToString(2))
+                  type: string
+            File Output Operator
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+0	1	00.0
+1	71	116414.0
+2	69	225571.0
+3	62	332004.0
+4	74	452763.0
+5	6	5397.0
+6	5	6398.0
+7	6	7735.0
+8	8	8762.0
+9	7	91047.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out Mon Nov 10 17:50:06 2008
@@ -11,44 +11,50 @@
     Map Reduce
       Alias -> Map Operator Tree:
         src 
-            Reduce Output Operator
-              key expressions:
-                    expr: substr(value, 4)
+            Select Operator
+              expressions:
+                    expr: value
                     type: string
-              # partition fields: 2147483647
-              tag: -1
+              Reduce Output Operator
+                key expressions:
+                      expr: substr(0, 4)
+                      type: string
+                Map-reduce partition columns:
+                      expr: substr(0, 4)
+                      type: string
+                tag: -1
       Reduce Operator Tree:
         Group By Operator
         
-              expr: avg(DISTINCT KEY.0)
-              expr: sum(KEY.0)
-              expr: avg(KEY.0)
-              expr: min(KEY.0)
-              expr: max(KEY.0)
+              expr: avg(DISTINCT UDFToDouble(KEY.0))
+              expr: sum(UDFToDouble(KEY.0))
+              expr: avg(UDFToDouble(KEY.0))
+              expr: min(UDFToDouble(KEY.0))
+              expr: max(UDFToDouble(KEY.0))
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/629715569/118113569.10001 
+        /tmp/hive-njain/67781830/202058716.10001 
           Reduce Output Operator
-            # partition fields: 0
             tag: -1
             value expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
+                  type: double
                   expr: 2
                   type: string
                   expr: 3
-                  type: string
+                  type: double
                   expr: 4
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -57,19 +63,19 @@
               expr: avg(VALUE.2)
               expr: min(VALUE.3)
               expr: max(VALUE.4)
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 1
-                  type: string
+                  type: double
                   expr: 2
                   type: string
                   expr: 0
                   type: string
                   expr: 4
-                  type: string
+                  type: double
                   expr: 3
-                  type: string
+                  type: double
             File Output Operator
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3_map.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,119 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTIONDI avg (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION substr (TOK_COLREF src value) 4))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION substr (TOK_COLREF src value) 4))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Select Operator
+              expressions:
+                    expr: value
+                    type: string
+              Group By Operator
+              
+                    expr: avg(DISTINCT UDFToDouble(substr(0, 4)))
+                    expr: sum(UDFToDouble(substr(0, 4)))
+                    expr: avg(UDFToDouble(substr(0, 4)))
+                    expr: min(UDFToDouble(substr(0, 4)))
+                    expr: max(UDFToDouble(substr(0, 4)))
+                keys:
+                      expr: substr(0, 4)
+                      type: string
+                mode: hash
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 1
+                        type: string
+                        expr: 2
+                        type: double
+                        expr: 3
+                        type: string
+                        expr: 4
+                        type: double
+                        expr: 5
+                        type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: avg(DISTINCT UDFToDouble(KEY.0))
+              expr: sum(VALUE.1)
+              expr: avg(VALUE.2)
+              expr: min(VALUE.3)
+              expr: max(VALUE.4)
+          mode: partial2
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/130240402/83496104.10001 
+          Reduce Output Operator
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: double
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: double
+                  expr: 4
+                  type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: avg(VALUE.0)
+              expr: sum(VALUE.1)
+              expr: avg(VALUE.2)
+              expr: min(VALUE.3)
+              expr: max(VALUE.4)
+          mode: unknown
+          Select Operator
+            expressions:
+                  expr: 1
+                  type: double
+                  expr: 2
+                  type: string
+                  expr: 0
+                  type: string
+                  expr: 4
+                  type: double
+                  expr: 3
+                  type: double
+            File Output Operator
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+130091.0	260.182	256.10355987055016	498.0	0.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out Mon Nov 10 17:50:06 2008
@@ -11,12 +11,18 @@
     Map Reduce
       Alias -> Map Operator Tree:
         src 
-            Reduce Output Operator
-              key expressions:
-                    expr: substr(key, 0, 1)
+            Select Operator
+              expressions:
+                    expr: key
                     type: string
-              # partition fields: -1
-              tag: -1
+              Reduce Output Operator
+                key expressions:
+                      expr: substr(0, 0, 1)
+                      type: string
+                Map-reduce partition columns:
+                      expr: rand()
+                      type: double
+                tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
@@ -25,25 +31,28 @@
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/1561965178/525265780.10001 
+        /tmp/hive-njain/213545057/773995409.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4_map.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,53 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Select Operator
+              Group By Operator
+              
+                    expr: count(1)
+                mode: hash
+                Reduce Output Operator
+                  Map-reduce partition columns:
+                        expr: rand()
+                        type: double
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: bigint
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: count(VALUE.0)
+          mode: unknown
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: bigint
+            File Output Operator
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+500

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out Mon Nov 10 17:50:06 2008
@@ -15,7 +15,9 @@
               key expressions:
                     expr: key
                     type: string
-              # partition fields: -1
+              Map-reduce partition columns:
+                    expr: rand()
+                    type: double
               tag: -1
               value expressions:
                     expr: substr(value, 4)
@@ -23,29 +25,32 @@
       Reduce Operator Tree:
         Group By Operator
         
-              expr: sum(VALUE.0)
+              expr: sum(UDFToDouble(VALUE.0))
           keys:
                 expr: KEY.0
                 type: string
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/888102295/1013886705.10001 
+        /tmp/hive-njain/485297652/61546074.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
             value expressions:
                   expr: 1
-                  type: string
+                  type: double
       Reduce Operator Tree:
         Group By Operator
         
@@ -53,13 +58,13 @@
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0
                   type: string
                   expr: 1
-                  type: string
+                  type: double
             File Output Operator
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5_map.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,56 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_COLREF src key))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              Group By Operator
+              
+                    expr: sum(UDFToDouble(0))
+                mode: hash
+                Reduce Output Operator
+                  Map-reduce partition columns:
+                        expr: rand()
+                        type: double
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: double
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          mode: unknown
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: double
+            File Output Operator
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+130091.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out Mon Nov 10 17:50:06 2008
@@ -11,12 +11,18 @@
     Map Reduce
       Alias -> Map Operator Tree:
         src 
-            Reduce Output Operator
-              key expressions:
-                    expr: substr(value, 4, 1)
+            Select Operator
+              expressions:
+                    expr: value
                     type: string
-              # partition fields: -1
-              tag: -1
+              Reduce Output Operator
+                key expressions:
+                      expr: substr(0, 4, 1)
+                      type: string
+                Map-reduce partition columns:
+                      expr: rand()
+                      type: double
+                tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
@@ -25,25 +31,28 @@
           mode: partial1
           File Output Operator
             table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-njain/256745338/35530060.10001 
+        /tmp/hive-njain/911936039/66288606.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
                   type: string
-            # partition fields: 1
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
             tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
                 expr: KEY.0
                 type: string
-          mode: partial2
+          mode: unknown
           Select Operator
             expressions:
                   expr: 0

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input0.q.out Mon Nov 10 17:50:06 2008
@@ -0,0 +1,512 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484
+265	val_265
+193	val_193
+401	val_401
+150	val_150
+273	val_273
+224	val_224
+369	val_369
+66	val_66
+128	val_128
+213	val_213
+146	val_146
+406	val_406
+429	val_429
+374	val_374
+152	val_152
+469	val_469
+145	val_145
+495	val_495
+37	val_37
+327	val_327
+281	val_281
+277	val_277
+209	val_209
+15	val_15
+82	val_82
+403	val_403
+166	val_166
+417	val_417
+430	val_430
+252	val_252
+292	val_292
+219	val_219
+287	val_287
+153	val_153
+193	val_193
+338	val_338
+446	val_446
+459	val_459
+394	val_394
+237	val_237
+482	val_482
+174	val_174
+413	val_413
+494	val_494
+207	val_207
+199	val_199
+466	val_466
+208	val_208
+174	val_174
+399	val_399
+396	val_396
+247	val_247
+417	val_417
+489	val_489
+162	val_162
+377	val_377
+397	val_397
+309	val_309
+365	val_365
+266	val_266
+439	val_439
+342	val_342
+367	val_367
+325	val_325
+167	val_167
+195	val_195
+475	val_475
+17	val_17
+113	val_113
+155	val_155
+203	val_203
+339	val_339
+0	val_0
+455	val_455
+128	val_128
+311	val_311
+316	val_316
+57	val_57
+302	val_302
+205	val_205
+149	val_149
+438	val_438
+345	val_345
+129	val_129
+170	val_170
+20	val_20
+489	val_489
+157	val_157
+378	val_378
+221	val_221
+92	val_92
+111	val_111
+47	val_47
+72	val_72
+4	val_4
+280	val_280
+35	val_35
+427	val_427
+277	val_277
+208	val_208
+356	val_356
+399	val_399
+169	val_169
+382	val_382
+498	val_498
+125	val_125
+386	val_386
+437	val_437
+469	val_469
+192	val_192
+286	val_286
+187	val_187
+176	val_176
+54	val_54
+459	val_459
+51	val_51
+138	val_138
+103	val_103
+239	val_239
+213	val_213
+216	val_216
+430	val_430
+278	val_278
+176	val_176
+289	val_289
+221	val_221
+65	val_65
+318	val_318
+332	val_332
+311	val_311
+275	val_275
+137	val_137
+241	val_241
+83	val_83
+333	val_333
+180	val_180
+284	val_284
+12	val_12
+230	val_230
+181	val_181
+67	val_67
+260	val_260
+404	val_404
+384	val_384
+489	val_489
+353	val_353
+373	val_373
+272	val_272
+138	val_138
+217	val_217
+84	val_84
+348	val_348
+466	val_466
+58	val_58
+8	val_8
+411	val_411
+230	val_230
+208	val_208
+348	val_348
+24	val_24
+463	val_463
+431	val_431
+179	val_179
+172	val_172
+42	val_42
+129	val_129
+158	val_158
+119	val_119
+496	val_496
+0	val_0
+322	val_322
+197	val_197
+468	val_468
+393	val_393
+454	val_454
+100	val_100
+298	val_298
+199	val_199
+191	val_191
+418	val_418
+96	val_96
+26	val_26
+165	val_165
+327	val_327
+230	val_230
+205	val_205
+120	val_120
+131	val_131
+51	val_51
+404	val_404
+43	val_43
+436	val_436
+156	val_156
+469	val_469
+468	val_468
+308	val_308
+95	val_95
+196	val_196
+288	val_288
+481	val_481
+457	val_457
+98	val_98
+282	val_282
+197	val_197
+187	val_187
+318	val_318
+318	val_318
+409	val_409
+470	val_470
+137	val_137
+369	val_369
+316	val_316
+169	val_169
+413	val_413
+85	val_85
+77	val_77
+0	val_0
+490	val_490
+87	val_87
+364	val_364
+179	val_179
+118	val_118
+134	val_134
+395	val_395
+282	val_282
+138	val_138
+238	val_238
+419	val_419
+15	val_15
+118	val_118
+72	val_72
+90	val_90
+307	val_307
+19	val_19
+435	val_435
+10	val_10
+277	val_277
+273	val_273
+306	val_306
+224	val_224
+309	val_309
+389	val_389
+327	val_327
+242	val_242
+369	val_369
+392	val_392
+272	val_272
+331	val_331
+401	val_401
+242	val_242
+452	val_452
+177	val_177
+226	val_226
+5	val_5
+497	val_497
+402	val_402
+396	val_396
+317	val_317
+395	val_395
+58	val_58
+35	val_35
+336	val_336
+95	val_95
+11	val_11
+168	val_168
+34	val_34
+229	val_229
+233	val_233
+143	val_143
+472	val_472
+322	val_322
+498	val_498
+160	val_160
+195	val_195
+42	val_42
+321	val_321
+430	val_430
+119	val_119
+489	val_489
+458	val_458
+78	val_78
+76	val_76
+41	val_41
+223	val_223
+492	val_492
+149	val_149
+449	val_449
+218	val_218
+228	val_228
+138	val_138
+453	val_453
+30	val_30
+209	val_209
+64	val_64
+468	val_468
+76	val_76
+74	val_74
+342	val_342
+69	val_69
+230	val_230
+33	val_33
+368	val_368
+103	val_103
+296	val_296
+113	val_113
+216	val_216
+367	val_367
+344	val_344
+167	val_167
+274	val_274
+219	val_219
+239	val_239
+485	val_485
+116	val_116
+223	val_223
+256	val_256
+263	val_263
+70	val_70
+487	val_487
+480	val_480
+401	val_401
+288	val_288
+191	val_191
+5	val_5
+244	val_244
+438	val_438
+128	val_128
+467	val_467
+432	val_432
+202	val_202
+316	val_316
+229	val_229
+469	val_469
+463	val_463
+280	val_280
+2	val_2
+35	val_35
+283	val_283
+331	val_331
+235	val_235
+80	val_80
+44	val_44
+193	val_193
+321	val_321
+335	val_335
+104	val_104
+466	val_466
+366	val_366
+175	val_175
+403	val_403
+483	val_483
+53	val_53
+105	val_105
+257	val_257
+406	val_406
+409	val_409
+190	val_190
+406	val_406
+401	val_401
+114	val_114
+258	val_258
+90	val_90
+203	val_203
+262	val_262
+348	val_348
+424	val_424
+12	val_12
+396	val_396
+201	val_201
+217	val_217
+164	val_164
+431	val_431
+454	val_454
+478	val_478
+298	val_298
+125	val_125
+431	val_431
+164	val_164
+424	val_424
+187	val_187
+382	val_382
+5	val_5
+70	val_70
+397	val_397
+480	val_480
+291	val_291
+24	val_24
+351	val_351
+255	val_255
+104	val_104
+70	val_70
+163	val_163
+438	val_438
+119	val_119
+414	val_414
+200	val_200
+491	val_491
+237	val_237
+439	val_439
+360	val_360
+248	val_248
+479	val_479
+305	val_305
+417	val_417
+199	val_199
+444	val_444
+120	val_120
+429	val_429
+169	val_169
+443	val_443
+323	val_323
+325	val_325
+277	val_277
+230	val_230
+478	val_478
+178	val_178
+468	val_468
+310	val_310
+317	val_317
+333	val_333
+493	val_493
+460	val_460
+207	val_207
+249	val_249
+265	val_265
+480	val_480
+83	val_83
+136	val_136
+353	val_353
+172	val_172
+214	val_214
+462	val_462
+233	val_233
+406	val_406
+133	val_133
+175	val_175
+189	val_189
+454	val_454
+375	val_375
+401	val_401
+421	val_421
+407	val_407
+384	val_384
+256	val_256
+26	val_26
+134	val_134
+67	val_67
+384	val_384
+379	val_379
+18	val_18
+462	val_462
+492	val_492
+100	val_100
+298	val_298
+9	val_9
+341	val_341
+498	val_498
+146	val_146
+458	val_458
+362	val_362
+186	val_186
+285	val_285
+348	val_348
+167	val_167
+18	val_18
+273	val_273
+183	val_183
+281	val_281
+344	val_344
+97	val_97
+469	val_469
+315	val_315
+84	val_84
+28	val_28
+37	val_37
+448	val_448
+152	val_152
+348	val_348
+307	val_307
+194	val_194
+414	val_414
+477	val_477
+222	val_222
+126	val_126
+90	val_90
+169	val_169
+403	val_403
+400	val_400
+200	val_200
+97	val_97

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out Mon Nov 10 17:50:06 2008
@@ -13,7 +13,7 @@
             Filter Operator
               predicate:
                   expr: (key < 100)
-                  type: Boolean
+                  type: boolean
               Select Operator
                 expressions:
                       expr: key

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11_limit.q.out Mon Nov 10 17:50:06 2008
@@ -13,7 +13,7 @@
             Filter Operator
               predicate:
                   expr: (key < 100)
-                  type: Boolean
+                  type: boolean
               Select Operator
                 expressions:
                       expr: key



Mime
View raw message