hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1610806 [13/13] - in /hive/branches/cbo: ./ bin/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/conf/ conf/ data/files/ hcatalog/core/src/mai...
Date Tue, 15 Jul 2014 18:50:55 GMT
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/nullformat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/nullformat.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/nullformat.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/nullformat.q.out Tue Jul 15 18:50:51 2014
@@ -51,6 +51,7 @@ STAGE PLANS:
           columns: a string, b string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: null_tab1
 
 PREHOOK: query: CREATE TABLE null_tab1(a STRING, b STRING) ROW FORMAT DELIMITED NULL DEFINED AS 'fooNull'

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/nullformatCTAS.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/nullformatCTAS.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/nullformatCTAS.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/nullformatCTAS.q.out Tue Jul 15 18:50:51 2014
@@ -95,6 +95,7 @@ STAGE PLANS:
           columns: a string, b string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: null_tab3
 
   Stage: Stage-2

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/parallel_orderby.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/parallel_orderby.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/parallel_orderby.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/parallel_orderby.q.out Tue Jul 15 18:50:51 2014
@@ -74,6 +74,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: total_ordered
 
   Stage: Stage-2

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/skewjoin_noskew.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/skewjoin_noskew.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/skewjoin_noskew.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/skewjoin_noskew.q.out Tue Jul 15 18:50:51 2014
@@ -146,6 +146,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: noskew
 
   Stage: Stage-3

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/smb_mapjoin9.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/smb_mapjoin9.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/smb_mapjoin9.q.out Tue Jul 15 18:50:51 2014
@@ -349,6 +349,7 @@ STAGE PLANS:
           columns: k1 int, value string, ds string, k2 int
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: smb_mapjoin9_results
 
   Stage: Stage-3

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/subquery_exists_having.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/subquery_exists_having.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/subquery_exists_having.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/subquery_exists_having.q.out Tue Jul 15 18:50:51 2014
@@ -153,6 +153,170 @@ POSTHOOK: Input: default@src
 96	1
 97	2
 98	2
+PREHOOK: query: -- no agg, corr
+explain
+select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+PREHOOK: type: QUERY
+POSTHOOK: query: -- no agg, corr
+explain
+select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: key
+                Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  keys: key (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: bigint)
+          TableScan
+            alias: a
+            Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((value > 'val_9') and key is not null) (type: boolean)
+              Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col1
+                Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  keys: _col1 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Demux Operator
+          Statistics: Num rows: 34 Data size: 3908 Basic stats: COMPLETE Column stats: NONE
+          Group By Operator
+            aggregations: count(VALUE._col0)
+            keys: KEY._col0 (type: string)
+            mode: mergepartial
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 17 Data size: 1954 Basic stats: COMPLETE Column stats: NONE
+            Mux Operator
+              Statistics: Num rows: 51 Data size: 5862 Basic stats: COMPLETE Column stats: NONE
+              Join Operator
+                condition map:
+                     Left Semi Join 0 to 1
+                condition expressions:
+                  0 {KEY.reducesinkkey0} {VALUE._col0}
+                  1 
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Filter Operator
+                  predicate: (1 = 1) (type: boolean)
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: bigint)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          Mux Operator
+            Statistics: Num rows: 51 Data size: 5862 Basic stats: COMPLETE Column stats: NONE
+            Join Operator
+              condition map:
+                   Left Semi Join 0 to 1
+              condition expressions:
+                0 {KEY.reducesinkkey0} {VALUE._col0}
+                1 
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Filter Operator
+                predicate: (1 = 1) (type: boolean)
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+90	3
+92	1
+95	2
+96	1
+97	2
+98	2
 PREHOOK: query: -- view test
 create view cv1 as 
 select b.key, count(*) as c

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/subquery_in_having.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/subquery_in_having.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/subquery_in_having.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/subquery_in_having.q.out Tue Jul 15 18:50:51 2014
@@ -582,6 +582,159 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
+PREHOOK: query: -- agg, non corr
+explain
+select p_mfgr, avg(p_size)
+from part b
+group by b.p_mfgr
+having b.p_mfgr in
+   (select p_mfgr
+    from part
+    group by p_mfgr
+    having max(p_size) - min(p_size) < 20
+   )
+PREHOOK: type: QUERY
+POSTHOOK: query: -- agg, non corr
+explain
+select p_mfgr, avg(p_size)
+from part b
+group by b.p_mfgr
+having b.p_mfgr in
+   (select p_mfgr
+    from part
+    group by p_mfgr
+    having max(p_size) - min(p_size) < 20
+   )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 30 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: p_mfgr is not null (type: boolean)
+              Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: p_mfgr (type: string), p_size (type: int)
+                outputColumnNames: p_mfgr, p_size
+                Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: avg(p_size)
+                  keys: p_mfgr (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>)
+          TableScan
+            alias: part
+            Statistics: Num rows: 30 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: p_mfgr is not null (type: boolean)
+              Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: p_mfgr (type: string), p_size (type: int)
+                outputColumnNames: p_mfgr, p_size
+                Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: max(p_size), min(p_size)
+                  keys: p_mfgr (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: int), _col2 (type: int)
+      Reduce Operator Tree:
+        Demux Operator
+          Statistics: Num rows: 30 Data size: 3172 Basic stats: COMPLETE Column stats: NONE
+          Group By Operator
+            aggregations: avg(VALUE._col0)
+            keys: KEY._col0 (type: string)
+            mode: mergepartial
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+            Mux Operator
+              Statistics: Num rows: 20 Data size: 2114 Basic stats: COMPLETE Column stats: NONE
+              Join Operator
+                condition map:
+                     Left Semi Join 0 to 1
+                condition expressions:
+                  0 {KEY.reducesinkkey0} {VALUE._col0}
+                  1 
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Filter Operator
+                  predicate: (1 = 1) (type: boolean)
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: double)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          Group By Operator
+            aggregations: max(VALUE._col0), min(VALUE._col1)
+            keys: KEY._col0 (type: string)
+            mode: mergepartial
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 15 Data size: 1586 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((_col1 - _col2) < 20) (type: boolean)
+              Statistics: Num rows: 5 Data size: 528 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: _col0 (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 5 Data size: 528 Basic stats: COMPLETE Column stats: NONE
+                Mux Operator
+                  Statistics: Num rows: 20 Data size: 2114 Basic stats: COMPLETE Column stats: NONE
+                  Join Operator
+                    condition map:
+                         Left Semi Join 0 to 1
+                    condition expressions:
+                      0 {KEY.reducesinkkey0} {VALUE._col0}
+                      1 
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Filter Operator
+                      predicate: (1 = 1) (type: boolean)
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: string), _col1 (type: double)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: -- join on agg
 select b.key, min(b.value)
 from src b

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/temp_table.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/temp_table.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/temp_table.q.out Tue Jul 15 18:50:51 2014
@@ -58,6 +58,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: foo
           isTemporary: true
 
@@ -161,6 +162,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: bar
           isTemporary: true
 

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/tez/ctas.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/tez/ctas.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/tez/ctas.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/tez/ctas.q.out Tue Jul 15 18:50:51 2014
@@ -89,6 +89,7 @@ STAGE PLANS:
           columns: k string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_CTAS1
 
   Stage: Stage-3
@@ -233,6 +234,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas2
 
   Stage: Stage-3
@@ -588,6 +590,7 @@ STAGE PLANS:
           field delimiter: ,
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas4
 
   Stage: Stage-3
@@ -677,7 +680,8 @@ TOK_CREATETABLE
             ','
          TOK_TABLEROWFORMATLINES
             '\012'
-   TOK_TBLTEXTFILE
+   TOK_FILEFORMAT_GENERIC
+      textfile
    TOK_QUERY
       TOK_FROM
          TOK_TABREF
@@ -848,6 +852,7 @@ STAGE PLANS:
           line delimiter: 
 
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: nzhang_ctas5
 
   Stage: Stage-3

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/tez/tez_dml.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/tez/tez_dml.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/tez/tez_dml.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/tez/tez_dml.q.out Tue Jul 15 18:50:51 2014
@@ -81,6 +81,7 @@ STAGE PLANS:
           columns: value string, cnt bigint
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: tmp_src
 
   Stage: Stage-3

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/truncate_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/truncate_table.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/truncate_table.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/truncate_table.q.out Tue Jul 15 18:50:51 2014
@@ -80,6 +80,38 @@ POSTHOOK: query: load data local inpath 
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: query: analyze table src_truncate     compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_truncate
+PREHOOK: Output: default@src_truncate
+POSTHOOK: query: analyze table src_truncate     compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_truncate
+POSTHOOK: Output: default@src_truncate
+PREHOOK: query: analyze table srcpart_truncate partition(ds,hr) compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: Output: default@srcpart_truncate
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+POSTHOOK: query: analyze table srcpart_truncate partition(ds,hr) compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@srcpart_truncate
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
 PREHOOK: query: -- truncate non-partitioned table
 explain TRUNCATE TABLE src_truncate
 PREHOOK: type: TRUNCATETABLE
@@ -109,6 +141,15 @@ POSTHOOK: query: select * from src_trunc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_truncate
 #### A masked pattern was here ####
+PREHOOK: query: select count (*) from src_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_truncate
+#### A masked pattern was here ####
+POSTHOOK: query: select count (*) from src_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_truncate
+#### A masked pattern was here ####
+0
 PREHOOK: query: -- truncate a partition
 explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11')
 PREHOOK: type: TRUNCATETABLE
@@ -143,6 +184,17 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
+PREHOOK: query: select count(*) from srcpart_truncate where ds='2008-04-08' and hr='11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from srcpart_truncate where ds='2008-04-08' and hr='11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+0
 PREHOOK: query: -- truncate partitions with partial spec
 explain TRUNCATE TABLE srcpart_truncate partition (ds, hr='12')
 PREHOOK: type: TRUNCATETABLE
@@ -181,6 +233,19 @@ POSTHOOK: Input: default@srcpart_truncat
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
+PREHOOK: query: select count(*) from srcpart_truncate where hr='12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from srcpart_truncate where hr='12'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0
 PREHOOK: query: -- truncate partitioned table
 explain TRUNCATE TABLE srcpart_truncate
 PREHOOK: type: TRUNCATETABLE
@@ -224,3 +289,20 @@ POSTHOOK: Input: default@srcpart_truncat
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
+PREHOOK: query: select count(*) from srcpart_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from srcpart_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/udf_format_number.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/udf_format_number.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/udf_format_number.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/udf_format_number.q.out Tue Jul 15 18:50:51 2014
@@ -188,3 +188,24 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 12,332.1230	12,332.0996	-12,332
+PREHOOK: query: -- decimals
+SELECT format_number(12332.123456BD, 4),
+    format_number(12332.123456BD, 2),
+    format_number(12332.1BD, 4),
+    format_number(-12332.2BD, 0),
+    format_number(CAST(12332.567 AS DECIMAL(8, 1)), 4)
+FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- decimals
+SELECT format_number(12332.123456BD, 4),
+    format_number(12332.123456BD, 2),
+    format_number(12332.1BD, 4),
+    format_number(-12332.2BD, 0),
+    format_number(CAST(12332.567 AS DECIMAL(8, 1)), 4)
+FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+12,332.1235	12,332.12	12,332.1000	-12,332	12,332.6000

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/udf_in_file.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/udf_in_file.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/udf_in_file.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/udf_in_file.q.out Tue Jul 15 18:50:51 2014
@@ -3,17 +3,48 @@ PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION in_file
 POSTHOOK: type: DESCFUNCTION
 in_file(str, filename) - Returns true if str appears in the file
+PREHOOK: query: CREATE TABLE value_src (str_val char(3), ch_val STRING, vch_val varchar(10),
+                        str_val_neg char(3), ch_val_neg STRING, vch_val_neg varchar(10))
+       ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: CREATE TABLE value_src (str_val char(3), ch_val STRING, vch_val varchar(10),
+                        str_val_neg char(3), ch_val_neg STRING, vch_val_neg varchar(10))
+       ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@value_src
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in_file.dat' INTO TABLE value_src
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@value_src
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in_file.dat' INTO TABLE value_src
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@value_src
 PREHOOK: query: EXPLAIN
-SELECT in_file("303", "../../data/files/test2.dat"),
+SELECT in_file(str_val, "../../data/files/test2.dat"),
+       in_file(ch_val, "../../data/files/test2.dat"),
+       in_file(vch_val, "../../data/files/test2.dat"),
+       in_file(str_val_neg, "../../data/files/test2.dat"),
+       in_file(ch_val_neg, "../../data/files/test2.dat"),
+       in_file(vch_val_neg, "../../data/files/test2.dat"),
+       in_file("303", "../../data/files/test2.dat"),
        in_file("304", "../../data/files/test2.dat"),
        in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
-FROM src LIMIT 1
+FROM value_src LIMIT 1
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN
-SELECT in_file("303", "../../data/files/test2.dat"),
+SELECT in_file(str_val, "../../data/files/test2.dat"),
+       in_file(ch_val, "../../data/files/test2.dat"),
+       in_file(vch_val, "../../data/files/test2.dat"),
+       in_file(str_val_neg, "../../data/files/test2.dat"),
+       in_file(ch_val_neg, "../../data/files/test2.dat"),
+       in_file(vch_val_neg, "../../data/files/test2.dat"),
+       in_file("303", "../../data/files/test2.dat"),
        in_file("304", "../../data/files/test2.dat"),
        in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
-FROM src LIMIT 1
+FROM value_src LIMIT 1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -24,18 +55,18 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
-            alias: src
-            Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+            alias: value_src
+            Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
             Select Operator
-              expressions: in_file('303', '../../data/files/test2.dat') (type: boolean), in_file('304', '../../data/files/test2.dat') (type: boolean), in_file(UDFToString(null), '../../data/files/test2.dat') (type: boolean)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+              expressions: in_file(str_val, '../../data/files/test2.dat') (type: boolean), in_file(ch_val, '../../data/files/test2.dat') (type: boolean), in_file(vch_val, '../../data/files/test2.dat') (type: boolean), in_file(str_val_neg, '../../data/files/test2.dat') (type: boolean), in_file(ch_val_neg, '../../data/files/test2.dat') (type: boolean), in_file(vch_val_neg, '../../data/files/test2.dat') (type: boolean), in_file('303', '../../data/files/test2.dat') (type: boolean), in_file('304', '../../data/files/test2.dat') (type: boolean), in_file(UDFToString(null), '../../data/files/test2.dat') (type: boolean)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+              Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
               Limit
                 Number of rows: 1
-                Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+                  Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -47,18 +78,30 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT in_file("303", "../../data/files/test2.dat"),
+PREHOOK: query: SELECT in_file(str_val, "../../data/files/test2.dat"),
+       in_file(ch_val, "../../data/files/test2.dat"),
+       in_file(vch_val, "../../data/files/test2.dat"),
+       in_file(str_val_neg, "../../data/files/test2.dat"),
+       in_file(ch_val_neg, "../../data/files/test2.dat"),
+       in_file(vch_val_neg, "../../data/files/test2.dat"),
+       in_file("303", "../../data/files/test2.dat"),
        in_file("304", "../../data/files/test2.dat"),
        in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
-FROM src LIMIT 1
+FROM value_src LIMIT 1
 PREHOOK: type: QUERY
-PREHOOK: Input: default@src
+PREHOOK: Input: default@value_src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT in_file("303", "../../data/files/test2.dat"),
+POSTHOOK: query: SELECT in_file(str_val, "../../data/files/test2.dat"),
+       in_file(ch_val, "../../data/files/test2.dat"),
+       in_file(vch_val, "../../data/files/test2.dat"),
+       in_file(str_val_neg, "../../data/files/test2.dat"),
+       in_file(ch_val_neg, "../../data/files/test2.dat"),
+       in_file(vch_val_neg, "../../data/files/test2.dat"),
+       in_file("303", "../../data/files/test2.dat"),
        in_file("304", "../../data/files/test2.dat"),
        in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
-FROM src LIMIT 1
+FROM value_src LIMIT 1
 POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
+POSTHOOK: Input: default@value_src
 #### A masked pattern was here ####
-true	false	NULL
+true	true	true	false	false	false	true	false	NULL

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/union25.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/union25.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/union25.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/union25.q.out Tue Jul 15 18:50:51 2014
@@ -208,6 +208,7 @@ STAGE PLANS:
           columns: counts bigint, key string, value string
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: tmp_unionall
 
   Stage: Stage-3

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/union_top_level.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/union_top_level.q.out?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/union_top_level.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/union_top_level.q.out Tue Jul 15 18:50:51 2014
@@ -579,6 +579,7 @@ STAGE PLANS:
           columns: key string, value int
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           name: union_top
 
   Stage: Stage-3

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java Tue Jul 15 18:50:51 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -443,7 +444,10 @@ public class CLIService extends Composit
 
   // create the give Path if doesn't exists and make it writable
   private void setupStagingDir(String dirPath, boolean isLocal) throws IOException {
-    Path scratchDir = new Path(dirPath);
+    Path scratchDir = getStaticPath(new Path(dirPath));
+    if (scratchDir == null) {
+      return;
+    }
     FileSystem fs;
     if (isLocal) {
       fs = FileSystem.getLocal(hiveConf);
@@ -480,4 +484,16 @@ public class CLIService extends Composit
     sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, tokenStr);
     LOG.info(sessionHandle  + ": renewDelegationToken()");
   }
+
+  // DOWNLOADED_RESOURCES_DIR for example, which is by default ${system:java.io.tmpdir}/${hive.session.id}_resources,
+  // {system:java.io.tmpdir} would be already evaluated but ${hive.session.id} would be not in here.
+  // for that case, this returns evaluatd parts only, in this case, "/tmp"
+  // what for ${hive.session.id}_resources/${system:java.io.tmpdir}? just don't do that.
+  private Path getStaticPath(Path path) {
+    Path current = path;
+    for (; current != null && SystemVariables.containsVar(current.getName());
+        current = current.getParent()) {
+    }
+    return current;
+  }
 }

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java Tue Jul 15 18:50:51 2014
@@ -53,8 +53,8 @@ public class TestExecutor extends Thread
   private final BlockingQueue<Test> mTestQueue;
   private final PTest.Builder mPTestBuilder;
   private ExecutionContext mExecutionContext;
-
   private boolean execute;
+
   public TestExecutor(ExecutionContextConfiguration executionContextConfiguration,
       ExecutionContextProvider executionContextProvider,
       BlockingQueue<Test> testQueue, PTest.Builder pTestBuilder) {
@@ -111,10 +111,12 @@ public class TestExecutor extends Thread
             testConfiguration.setPatch(startRequest.getPatchURL());
             testConfiguration.setJiraName(startRequest.getJiraName());
             testConfiguration.setClearLibraryCache(startRequest.isClearLibraryCache());
+            LocalCommandFactory localCommandFactory = new LocalCommandFactory(logger);
             PTest ptest = mPTestBuilder.build(testConfiguration, mExecutionContext,
                 test.getStartRequest().getTestHandle(), logDir,
-                new LocalCommandFactory(logger), new SSHCommandExecutor(logger),
-                new RSyncCommandExecutor(logger), logger);
+                localCommandFactory, new SSHCommandExecutor(logger),
+                new RSyncCommandExecutor(logger, mExecutionContextConfiguration.getMaxRsyncThreads(),
+                  localCommandFactory), logger);
             int result = ptest.run();
             if(result == Constants.EXIT_CODE_SUCCESS) {
               test.setStatus(Status.ok());

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java Tue Jul 15 18:50:51 2014
@@ -345,9 +345,10 @@ public class PTest {
           executionContextProvider = executionContextConfiguration
               .getExecutionContextProvider();
           executionContext = executionContextProvider.createExecutionContext();
+          LocalCommandFactory localCommandFactory = new LocalCommandFactory(LOG);
           PTest ptest = new PTest(conf, executionContext, buildTag, logDir,
-              new LocalCommandFactory(LOG), new SSHCommandExecutor(LOG),
-              new RSyncCommandExecutor(LOG), LOG);
+              localCommandFactory, new SSHCommandExecutor(LOG),
+              new RSyncCommandExecutor(LOG, 10, localCommandFactory), LOG);
           exitCode = ptest.run();
         } finally {
           if(executionContext != null) {

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/ExecutionContextConfiguration.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/ExecutionContextConfiguration.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/ExecutionContextConfiguration.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/ExecutionContextConfiguration.java Tue Jul 15 18:50:51 2014
@@ -37,11 +37,14 @@ public class ExecutionContextConfigurati
   public static final String WORKING_DIRECTORY = "workingDirectory";
   public static final String PROFILE_DIRECTORY = "profileDirectory";
   public static final String MAX_LOG_DIRS_PER_PROFILE = "maxLogDirectoriesPerProfile";
+  private static final String MAX_RSYNC_THREADS = "maxRsyncThreads";
+  private static final int MAX_RSYNC_THREADS_DEFAULT = 10;
   private final ExecutionContextProvider mExecutionContextProvider;
   private final String mWorkingDirectory;
   private final String mGlobalLogDirectory;
   private final String mProfileDirectory;
   private final int mMaxLogDirectoriesPerProfile;
+  private final int mMaxRsyncThreads;
 
   @VisibleForTesting
   public ExecutionContextConfiguration(Context context)
@@ -52,6 +55,7 @@ public class ExecutionContextConfigurati
     Preconditions.checkArgument(!mProfileDirectory.isEmpty(), PROFILE_DIRECTORY + " is required");
     mGlobalLogDirectory = Dirs.create(new File(mWorkingDirectory, "logs")).getAbsolutePath();
     mMaxLogDirectoriesPerProfile = context.getInteger(MAX_LOG_DIRS_PER_PROFILE, 10);
+    mMaxRsyncThreads = context.getInteger(MAX_RSYNC_THREADS, MAX_RSYNC_THREADS_DEFAULT);
     String executionContextProviderBuilder = context.getString("executionContextProvider",
         FixedExecutionContextProvider.Builder.class.getName()).trim();
     try {
@@ -66,6 +70,9 @@ public class ExecutionContextConfigurati
       throw Throwables.propagate(e);
     }
   }
+  public int getMaxRsyncThreads() {
+    return mMaxRsyncThreads;
+  }
   public int getMaxLogDirectoriesPerProfile() {
     return mMaxLogDirectoriesPerProfile;
   }

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java Tue Jul 15 18:50:51 2014
@@ -21,12 +21,24 @@ package org.apache.hive.ptest.execution.
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.PatternLayout;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Splitter;
@@ -97,6 +109,8 @@ public class TestParser {
     return result;
   }
   private List<QFileTestBatch> parseQFileTests() {
+    Map<String, Properties> properties = parseQTestProperties();
+
     Splitter splitter = Splitter.on(" ").trimResults().omitEmptyStrings();
     List<QFileTestBatch> result = Lists.newArrayList();
     for(String alias : context.getString("qFileTests", "").split(" ")) {
@@ -111,22 +125,25 @@ public class TestParser {
       for(String excludedTestGroup : splitter.split(testContext.getString("exclude", ""))) {
         excludedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", excludedTestGroup), "").trim().split(" ")));
+        expandTestProperties(excludedTests, properties);
       }
       Set<String> isolatedTests = Sets.newHashSet();
       for(String ioslatedTestGroup : splitter.split(testContext.getString("isolate", ""))) {
         isolatedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", ioslatedTestGroup), "").trim().split(" ")));
+        expandTestProperties(isolatedTests, properties);
       }
 
       Set<String> includedTests = Sets.newHashSet();
       for(String includedTestGroup : splitter.split(testContext.getString("include", ""))) {
         includedTests.addAll(Arrays.asList(testContext.
             getString(Joiner.on(".").join("groups", includedTestGroup), "").trim().split(" ")));
+        expandTestProperties(includedTests, properties);
       }
-      if(!includedTests.isEmpty() && !excludedTests.isEmpty()) {
-        throw new IllegalArgumentException(String.format("Included and excluded mutally exclusive." +
-            " Included = %s, excluded = %s", includedTests.toString(), excludedTests.toString()));
-      }
+
+      //excluded overrides included
+      includedTests.removeAll(excludedTests);
+
       result.addAll(createQFileTestBatches(
           driver,
           checkNotNull(testContext.getString("queryFilesProperty"), "queryFilesProperty").trim(),
@@ -175,6 +192,72 @@ public class TestParser {
     return testBatches;
   }
 
+  /**
+   * @return properties loaded from files specified in qFileTests.propertyFiles.${fileName}=${filePath}
+   */
+  private Map<String, Properties> parseQTestProperties() {
+    Map<String, String> propFiles = context.getSubProperties("qFileTests.propertyFiles.");
+    Map<String, Properties> propertyMap = new HashMap<String, Properties>();
+    for (String propFile : propFiles.keySet()) {
+      Properties properties = new Properties();
+      String path = sourceDirectory + File.separator + propFiles.get(propFile);
+      FileInputStream fis = null;
+      try {
+        fis = new FileInputStream(path);
+        properties.load(fis);
+      } catch (IOException e) {
+        logger.warn("Error processing Qtest property file", e);
+        throw new IllegalArgumentException("Error processing Qtest property file: " + path);
+      } finally {
+        try {
+          if (fis != null) {
+            fis.close();
+          }
+        } catch (IOException e) { //ignore
+        }
+      }
+      propertyMap.put(propFile, properties);
+      logger.info("Loaded Qtest property file: " + path);
+    }
+    return propertyMap;
+  }
+
+  /**
+   * If any of given tests are of the form: ${fileName}.${property} (test list within a property file),
+   * then expand them.  Then remove those markers from the list of tests.
+   */
+  private void expandTestProperties(Set<String> tests, Map<String, Properties> propMap) {
+    Set<String> toRemove = new HashSet<String>();
+    Set<String> toAdd = new HashSet<String>();
+
+    String pattern = "([^\\.]*)\\.\\$\\{([^}]*)}";
+    Pattern r = Pattern.compile(pattern);
+    for (String test : tests) {
+      Matcher m = r.matcher(test);
+      if (m.find()) {
+        toRemove.add(test);
+        logger.info("Expanding qfile property: " + test);
+        String propName = m.group(1);
+        String propValue = m.group(2);
+        Properties props = propMap.get(propName);
+        if (props == null) {
+          logger.warn("No properties found for : " + propName);
+          throw new IllegalArgumentException("No properties found for : " + propName);
+        }
+        String result = (String) props.get(propValue);
+        if (result == null || result.isEmpty()) {
+          logger.warn("No properties found in file: " + propName + " for property: " + propValue);
+          throw new IllegalArgumentException("No propertifies found in file: " + propName + " for property: " + propValue);
+        }
+        Iterable<String> splits = Splitter.on(',').trimResults().omitEmptyStrings().split(result);
+        for (String split : splits) {
+          toAdd.add(split);
+        }
+      }
+    }
+    tests.removeAll(toRemove);
+    tests.addAll(toAdd);
+  }
 
   public Supplier<List<TestBatch>> parse() {
     return new Supplier<List<TestBatch>>() {
@@ -184,4 +267,25 @@ public class TestParser {
       }
     };
   }
+
+  /**
+   * Manually test this against any property file.
+   * @param args
+   * @throws Exception
+   */
+  public static void main(String[] args) throws Exception {
+    if (args.length < 1) {
+      throw new IllegalArgumentException("Enter the property file location");
+    }
+    Logger log = LoggerFactory
+        .getLogger(TestParser.class);
+    File workingDir = new File("../..");
+    File testConfigurationFile = new File(args[0]);
+    TestConfiguration conf = TestConfiguration.fromFile(testConfigurationFile, log);
+    TestParser testParser = new TestParser(conf.getContext(), "test", workingDir, log);
+    List<TestBatch> testBatches = testParser.parse().get();
+    for (TestBatch testBatch : testBatches) {
+      System.out.println(testBatch.getTestArguments());
+    }
+  }
 }

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java Tue Jul 15 18:50:51 2014
@@ -34,6 +34,8 @@ import org.jclouds.compute.domain.NodeMe
 import org.jclouds.compute.domain.NodeMetadata.Status;
 import org.jclouds.compute.domain.Template;
 import org.jclouds.logging.log4j.config.Log4JLoggingModule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
@@ -41,6 +43,8 @@ import com.google.common.collect.Immutab
 import com.google.common.collect.Sets;
 
 public class CloudComputeService {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(CloudComputeService.class);
   private final ComputeServiceContext mComputeServiceContext;
   private final ComputeService mComputeService;
   private final String mInstanceType;
@@ -49,9 +53,12 @@ public class CloudComputeService {
   private final String mImageId;
   private final String mkeyPair;
   private final String mSecurityGroup;
-  private final float mMaxBid;
+  /**
+   * JClouds requests on-demand instances when null
+   */
+  private final Float mMaxBid;
   public CloudComputeService(String apiKey, String accessKey, String instanceType, String groupName,
-      String imageId, String keyPair, String securityGroup, float maxBid) {
+      String imageId, String keyPair, String securityGroup, Float maxBid) {
     mInstanceType = instanceType;
     mGroupName = groupName;
     mImageId = imageId;
@@ -90,15 +97,20 @@ public class CloudComputeService {
         return nodeMetadata.getStatus() == Status.RUNNING && isPTestHost(nodeMetadata);
       }
       private boolean isPTestHost(NodeMetadata node) {
+        String result = "false non-ptest host";
         if(groupName.equalsIgnoreCase(node.getGroup())) {
+          result = "true due to group " + groupName;
           return true;
         }
         if(Strings.nullToEmpty(node.getName()).startsWith(groupName)) {
+          result = "true due to name " + groupName;
           return true;
         }
         if(node.getTags().contains(groupTag)) {
+          result = "true due to tag " + groupName;
           return true;
         }
+        LOG.debug("Found node: " + node + ", Result: " + result);
         return false;
       }
     };

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java Tue Jul 15 18:50:51 2014
@@ -24,6 +24,7 @@ import java.io.RandomAccessFile;
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashSet;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
@@ -58,6 +59,7 @@ public class CloudExecutionContextProvid
   public static final String API_KEY = "apiKey";
   public static final String ACCESS_KEY = "accessKey";
   public static final String NUM_HOSTS = "numHosts";
+  public static final String MAX_HOSTS_PER_CREATE_REQUEST = "maxHostsPerCreateRequest";
   public static final String GROUP_NAME = "groupName";
   public static final String IMAGE_ID = "imageId";
   public static final String KEY_PAIR = "keyPair";
@@ -74,9 +76,11 @@ public class CloudExecutionContextProvid
   private final String[] mSlaveLocalDirs;
   private final int mNumThreads;
   private final int mNumHosts;
+  private final int mMaxHostsPerCreateRequest;
   private final long mRetrySleepInterval;
   private final CloudComputeService mCloudComputeService;
   private final Map<String, Long> mTerminatedHosts;
+  private final Map<String, Long> mLiveHosts;
   private final ExecutorService mTerminationExecutor;
   private final File mWorkingDir;
   private final SSHCommandExecutor mSSHCommandExecutor;
@@ -85,8 +89,9 @@ public class CloudExecutionContextProvid
   CloudExecutionContextProvider(String dataDir,
       int numHosts, CloudComputeService cloudComputeService, SSHCommandExecutor sshCommandExecutor,
       String workingDirectory, String privateKey, String user, String[] slaveLocalDirs, int numThreads,
-      long retrySleepInterval) throws IOException {
+      long retrySleepInterval, int maxHostsPerCreateRequest) throws IOException {
     mNumHosts = numHosts;
+    mMaxHostsPerCreateRequest = maxHostsPerCreateRequest;
     mCloudComputeService = cloudComputeService;
     mPrivateKey = privateKey;
     mUser = user;
@@ -95,6 +100,7 @@ public class CloudExecutionContextProvid
     mRetrySleepInterval = retrySleepInterval;
     mSSHCommandExecutor = sshCommandExecutor;
     mWorkingDir = Dirs.create(new File(workingDirectory, "working"));
+    mLiveHosts = Collections.synchronizedMap(new HashMap<String, Long>());
     mTerminatedHosts = Collections
         .synchronizedMap(new LinkedHashMap<String, Long>() {
           private static final long serialVersionUID = 1L;
@@ -110,6 +116,7 @@ public class CloudExecutionContextProvid
   }
 
   private void initialize() throws IOException {
+    LOG.info("CloudExecutionContextProvider maxHostsPerCreateRequest = " + mMaxHostsPerCreateRequest);
     Set<String> hosts = Sets.newHashSet();
     String host = null;
     mHostLog.seek(0); // should already be true
@@ -164,7 +171,7 @@ public class CloudExecutionContextProvid
       terminate(hostsToTerminate, true);
       Set<NodeMetadata> nodes = createNodes(hostsToTerminate.size());
       for (NodeMetadata node : nodes) {
-        executionContext.addHost(new Host(node.getHostname(), mUser, mSlaveLocalDirs,
+        executionContext.addHost(new Host(publicIp(node), mUser, mSlaveLocalDirs,
             mNumThreads));
       }
     }
@@ -179,8 +186,8 @@ public class CloudExecutionContextProvid
       Set<NodeMetadata> nodes = createNodes(mNumHosts);
       Set<Host> hosts = Sets.newHashSet();
       for (NodeMetadata node : nodes) {
-        hosts.add(new Host(node.getHostname(), mUser, mSlaveLocalDirs,
-            mNumThreads));
+        hosts.add(new Host(publicIp(node), mUser, mSlaveLocalDirs,
+          mNumThreads));
       }
       return new ExecutionContext(this, hosts, mWorkingDir.getAbsolutePath(),
           mPrivateKey);
@@ -204,7 +211,7 @@ public class CloudExecutionContextProvid
       boolean error = false;
       LOG.info("Attempting to create " + numRequired + " nodes");
       try {
-        result.addAll(mCloudComputeService.createNodes(Math.min(2, numRequired)));
+        result.addAll(mCloudComputeService.createNodes(Math.min(mMaxHostsPerCreateRequest, numRequired)));
       } catch (RunNodesException e) {
         error = true;
         LOG.warn("Error creating nodes", e);
@@ -212,6 +219,9 @@ public class CloudExecutionContextProvid
         result.addAll(e.getSuccessfulNodes());
       }
       result = verifyHosts(result);
+      for (NodeMetadata node : result) {
+        mLiveHosts.put(publicIpOrHostname(node), System.currentTimeMillis());
+      }
       LOG.info("Successfully created " + result.size() + " nodes");
       numRequired = numHosts - result.size();
       if (numRequired > 0) {
@@ -247,6 +257,23 @@ public class CloudExecutionContextProvid
     }
   }
 
+
+  private static String publicIpOrHostname(NodeMetadata node) {
+    Set<String> publicIps = node.getPublicAddresses();
+    if (publicIps.size() == 1) {
+      return Iterables.getOnlyElement(publicIps);
+    }
+    return node.getHostname();
+  }
+
+  private static String publicIp(NodeMetadata node) {
+    Set<String> publicIps = node.getPublicAddresses();
+    if (publicIps.size() == 1) {
+      return Iterables.getOnlyElement(publicIps);
+    }
+    throw new IllegalStateException("Node does not have exactly one public ip: " + node);
+  }
+
   private Set<NodeMetadata> verifyHosts(Set<? extends NodeMetadata> hosts)
       throws CreateHostsFailedException {
     final Set<NodeMetadata> result = Collections.synchronizedSet(new HashSet<NodeMetadata>());
@@ -258,7 +285,8 @@ public class CloudExecutionContextProvid
           executorService.submit(new Runnable() {
             @Override
             public void run() {
-              SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser, node.getHostname(), 0, "pkill -f java");
+              String ip = publicIpOrHostname(node);
+              SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser, ip, 0, "pkill -f java");
               mSSHCommandExecutor.execute(command);
               if(command.getExitCode() == Constants.EXIT_CODE_UNKNOWN ||
                   command.getException() != null) {
@@ -293,10 +321,13 @@ public class CloudExecutionContextProvid
       terminatedHosts.putAll(mTerminatedHosts);
     }
     for (NodeMetadata node : getRunningNodes()) {
-      if (terminatedHosts.containsKey(node.getHostname())) {
+      String ip = publicIpOrHostname(node);
+      if (terminatedHosts.containsKey(ip)) {
         terminateInternal(node);
         LOG.warn("Found zombie node: " + node + " previously terminated at "
-            + new Date(terminatedHosts.get(node.getHostname())));
+            + new Date(terminatedHosts.get(ip)));
+      } else if(!mLiveHosts.containsKey(ip)) {
+        LOG.warn("Found zombie node: " + node + " previously unknown to ptest");
       }
     }
   }
@@ -318,6 +349,7 @@ public class CloudExecutionContextProvid
 
   private void terminateInternal(final NodeMetadata node) {
     LOG.info("Submitting termination for " + node);
+    mLiveHosts.remove(publicIpOrHostname(node));
     mTerminationExecutor.submit(new Runnable() {
       @Override
       public void run() {
@@ -328,9 +360,10 @@ public class CloudExecutionContextProvid
          Thread.currentThread().interrupt();
         }
         try {
-          LOG.info("Terminating " + node.getHostname());
-          if (!mTerminatedHosts.containsKey(node.getHostname())) {
-            mTerminatedHosts.put(node.getHostname(), System.currentTimeMillis());
+          String ip = publicIpOrHostname(node);
+          LOG.info("Terminating " + ip);
+          if (!mTerminatedHosts.containsKey(ip)) {
+            mTerminatedHosts.put(ip, System.currentTimeMillis());
           }
           mCloudComputeService.destroyNode(node.getId());
         } catch (Exception e) {
@@ -343,8 +376,9 @@ public class CloudExecutionContextProvid
   private void persistHostnamesToLog(Set<? extends NodeMetadata> nodes) {
     for (NodeMetadata node : nodes) {
       try {
-        if(!Strings.nullToEmpty(node.getHostname()).trim().isEmpty()) {
-          mHostLog.writeBytes(node.getHostname() + "\n");
+        String ip = publicIpOrHostname(node);
+        if(!Strings.nullToEmpty(ip).trim().isEmpty()) {
+          mHostLog.writeBytes(ip + "\n");
         }
       } catch (IOException e) {
         Throwables.propagate(e);
@@ -364,7 +398,8 @@ public class CloudExecutionContextProvid
     LOG.info("Requesting termination of " + hosts);
     Set<NodeMetadata> nodesToTerminate = Sets.newHashSet();
     for (NodeMetadata node : getRunningNodes()) {
-      if (hosts.contains(node.getHostname())) {
+      String ip = publicIpOrHostname(node);
+      if (hosts.contains(ip)) {
         nodesToTerminate.add(node);
       }
     }
@@ -391,6 +426,7 @@ public class CloudExecutionContextProvid
         API_KEY + " is required");
     String accessKey = Preconditions.checkNotNull(
         context.getString(ACCESS_KEY), ACCESS_KEY + " is required");
+    int maxHostsPerCreateRequest = context.getInteger(MAX_HOSTS_PER_CREATE_REQUEST, 2);
     Integer numHosts = context.getInteger(NUM_HOSTS, 8);
     Preconditions.checkArgument(numHosts > 0, NUM_HOSTS
         + " must be greater than zero");
@@ -401,10 +437,9 @@ public class CloudExecutionContextProvid
         KEY_PAIR + " is required");
     String securityGroup = Preconditions.checkNotNull(
         context.getString(SECURITY_GROUP), SECURITY_GROUP + " is required");
-    Float maxBid = Preconditions.checkNotNull(context.getFloat(MAX_BID),
-        MAX_BID + " is required");
-    Preconditions.checkArgument(maxBid > 0, MAX_BID
-        + " must be greater than zero");
+    Float maxBid = context.getFloat(MAX_BID);
+    Preconditions.checkArgument(maxBid == null || maxBid > 0, MAX_BID
+        + " must be null or greater than zero");
     String privateKey = Preconditions.checkNotNull(
         context.getString(PRIVATE_KEY), PRIVATE_KEY + " is required");
     String user = context.getString(USERNAME, "hiveptest");
@@ -417,7 +452,7 @@ public class CloudExecutionContextProvid
         instanceType, groupName, imageId, keyPair, securityGroup, maxBid);
     CloudExecutionContextProvider service = new CloudExecutionContextProvider(
         dataDir, numHosts, cloudComputeService, new SSHCommandExecutor(LOG), workingDirectory,
-        privateKey, user, localDirs, numThreads, 60);
+        privateKey, user, localDirs, numThreads, 60, maxHostsPerCreateRequest);
     return service;
   }
 }

Modified: hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java Tue Jul 15 18:50:51 2014
@@ -30,20 +30,20 @@ import org.slf4j.Logger;
 
 
 public class RSyncCommandExecutor {
-
   private final Logger mLogger;
+  private final int mMaxRsyncThreads;
   private final LocalCommandFactory mLocalCommandFactory;
   private final Semaphore mSemaphore;
   private volatile boolean mShutdown;
 
-  public RSyncCommandExecutor(Logger logger, LocalCommandFactory localCommandFactory) {
+  public RSyncCommandExecutor(Logger logger, int maxRsyncThreads, LocalCommandFactory localCommandFactory) {
     mLogger = logger;
+    mMaxRsyncThreads = Math.min(Runtime.getRuntime().availableProcessors() * 5, maxRsyncThreads);
     mLocalCommandFactory = localCommandFactory;
-    mSemaphore = new Semaphore(Math.min(Runtime.getRuntime().availableProcessors() * 5, 10));
+    mSemaphore = new Semaphore(mMaxRsyncThreads);
     mShutdown = false;
-  }
-  public RSyncCommandExecutor(Logger logger) {
-    this(logger, new LocalCommandFactory(logger));
+    mLogger.info("RSyncCommandExecutor has " + mMaxRsyncThreads + " threads on " + Runtime.getRuntime()
+      .availableProcessors() + " cpus");
   }
 
   /**
@@ -105,4 +105,4 @@ public class RSyncCommandExecutor {
   public void shutdownNow() {
     this.mShutdown = true;
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java Tue Jul 15 18:50:51 2014
@@ -34,7 +34,7 @@ public class MockRSyncCommandExecutor ex
   private final List<String> mCommands;
   private final Map<String, Queue<Integer>> mFailures;
   public MockRSyncCommandExecutor(Logger logger) {
-    super(logger);
+    super(logger, 0, null);
     mCommands = Lists.newArrayList();
     mFailures = Maps.newHashMap();
   }

Modified: hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/conf/TestTestParser.java Tue Jul 15 18:50:51 2014
@@ -19,7 +19,10 @@
 package org.apache.hive.ptest.execution.conf;
 
 import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.util.List;
+import java.util.Properties;
 
 import junit.framework.Assert;
 
@@ -46,6 +49,7 @@ public class TestTestParser {
   private File unitTestDir1;
   private File unitTestDir2;
   private File qFileTestDir;
+  private File propertyDir;
 
   @Before
   public void setup() throws Exception {
@@ -56,6 +60,7 @@ public class TestTestParser {
         "test", "classes")));
     unitTestDir2 = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "build", "2", "units", "test", "classes")));
     qFileTestDir = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "qfiles")));
+    propertyDir = Dirs.create(new File(baseDir, Joiner.on("/").join("source", "props")));
     Assert.assertTrue((new File(unitTestDir1, "TestA.class")).createNewFile());
     Assert.assertTrue((new File(unitTestDir2, "TestB.class")).createNewFile());
     Assert.assertTrue((new File(unitTestDir1, "TestC.class")).createNewFile());
@@ -64,10 +69,20 @@ public class TestTestParser {
     Assert.assertTrue((new File(qFileTestDir, ".svn")).mkdirs());
     Assert.assertTrue((new File(qFileTestDir, "dir.q")).mkdirs());
     Assert.assertTrue((new File(qFileTestDir, "normal.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal2.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal3.q")).createNewFile());
+    Assert.assertTrue((new File(qFileTestDir, "normal4.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "excluded.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "isolated.q")).createNewFile());
     Assert.assertTrue((new File(qFileTestDir, "included.q")).createNewFile());
 
+    Properties normalProp = new Properties();
+    normalProp.setProperty("normal.one.group", "normal.q,normal2.q");
+    normalProp.setProperty("normal.two.group", "normal3.q,normal4.q");
+    normalProp.setProperty("excluded.group", "excluded.q");
+    normalProp.setProperty("isolated.group", "isolated.q");
+    normalProp.setProperty("included.group", "included.q");
+    serialize("normal.properties", normalProp);
   }
   @After
   public void teardown() {
@@ -107,4 +122,35 @@ public class TestTestParser {
     List<TestBatch> testBatches = testParser.parse().get();
     Assert.assertEquals(4, testBatches.size());
   }
+  @Test
+  public void testParsePropertyFile() throws Exception {
+    context.put("unitTests.directories", "build/1 build/2");
+    context.put("unitTests.include", "TestA TestB");
+    context.put("unitTests.isolate", "TestB");
+    context.put("qFileTests", "f");
+    context.put("qFileTests.propertyFiles.prop",
+      "props" + File.separator + "normal.properties");
+    context.put("qFileTest.f.driver", DRIVER);
+    context.put("qFileTest.f.directory", "qfiles");
+    context.put("qFileTest.f.include", "included");
+    context.put("qFileTest.f.isolate", "isolated");
+    context.put("qFileTest.f.exclude", "excluded");
+    context.put("qFileTest.f.queryFilesProperty", "qfile");
+    context.put("qFileTest.f.groups.included", "prop.${normal.one.group} prop.${normal.two.group} prop.${isolated.group}");
+    context.put("qFileTest.f.groups.isolated", "prop.${isolated.group}");
+    context.put("qFileTest.f.groups.excluded", "prop.${excluded.group}");
+    testParser = new TestParser(context, "testcase", workingDirectory, LOG);
+    List<TestBatch> testBatches = testParser.parse().get();
+    Assert.assertEquals(4, testBatches.size());
+  }
+
+  private void serialize(String propFileName, Properties props) throws Exception {
+    File f = new File(propertyDir, propFileName);
+    OutputStream out = new FileOutputStream(f);
+    try {
+      props.store(out, null);
+    } finally {
+      out.close();
+    }
+  }
 }

Modified: hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java Tue Jul 15 18:50:51 2014
@@ -80,8 +80,11 @@ public class TestCloudExecutionContextPr
     when(template.getImage()).thenReturn(mock(Image.class));
     when(template.getHardware()).thenReturn(mock(Hardware.class));
     when(node1.getHostname()).thenReturn("node1");
+    when(node1.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.1"));
     when(node2.getHostname()).thenReturn("node2");
+    when(node2.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.2"));
     when(node3.getHostname()).thenReturn("node3");
+    when(node3.getPublicAddresses()).thenReturn(Collections.singleton("1.1.1.3"));
     runNodesException = new RunNodesException("", 2, template,
         Collections.singleton(node1), Collections.<String, Exception>emptyMap(),
         Collections.singletonMap(node2, new Exception("For testing")));
@@ -105,12 +108,12 @@ public class TestCloudExecutionContextPr
       }
     });
     CloudExecutionContextProvider provider = new CloudExecutionContextProvider(dataDir, NUM_NODES,
-        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS, 1, 0);
+        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS, 1, 0, 1);
     ExecutionContext executionContext = provider.createExecutionContext();
     Set<String> hosts = Sets.newHashSet();
     for(Host host : executionContext.getHosts()) {
       hosts.add(host.getName());
     }
-    Assert.assertEquals(Sets.newHashSet("node1", "node3"), hosts);
+    Assert.assertEquals(Sets.newHashSet("1.1.1.1", "1.1.1.3"), hosts);
   }
 }

Modified: hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java?rev=1610806&r1=1610805&r2=1610806&view=diff
==============================================================================
--- hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java (original)
+++ hive/branches/cbo/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/ssh/TestRSyncCommandExecutor.java Tue Jul 15 18:50:51 2014
@@ -50,7 +50,7 @@ public class TestRSyncCommandExecutor {
   public void testShutdownBeforeWaitFor() throws Exception {
     LocalCommand localCommand = mock(LocalCommand.class);
     localCommandFactory.setInstance(localCommand);
-    RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, localCommandFactory);
+    RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, 1, localCommandFactory);
     Assert.assertFalse(executor.isShutdown());
     executor.shutdownNow();
     RSyncCommand command = new RSyncCommand(executor, "privateKey", "user", "host", 1, "local", "remote", RSyncCommand.Type.FROM_LOCAL);
@@ -66,7 +66,7 @@ public class TestRSyncCommandExecutor {
   public void testShutdownDuringWaitFor() throws Exception {
     LocalCommand localCommand = mock(LocalCommand.class);
     localCommandFactory.setInstance(localCommand);
-    final RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, localCommandFactory);
+    final RSyncCommandExecutor executor = new RSyncCommandExecutor(LOG, 1, localCommandFactory);
     Assert.assertFalse(executor.isShutdown());
     when(localCommand.getExitCode()).thenAnswer(new Answer<Integer>() {
       @Override
@@ -84,4 +84,4 @@ public class TestRSyncCommandExecutor {
     }
     verify(localCommand, never()).kill();
   }
-}
\ No newline at end of file
+}



Mime
View raw message