hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1545564 [22/22] - in /hive/branches/tez: ./ ant/ beeline/ bin/ cli/ common/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org...
Date Tue, 26 Nov 2013 08:19:34 GMT
Modified: hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/udf4.q.xml Tue Nov 26 08:19:25 2013
@@ -197,22 +197,6 @@
                   </void> 
                  </object> 
                 </void> 
-                <void property="counterNames"> 
-                 <object class="java.util.ArrayList"> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_NUM_INPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_NUM_OUTPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_TIME_TAKEN</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_FATAL_ERROR</string> 
-                  </void> 
-                 </object> 
-                </void> 
                 <void property="operatorId"> 
                  <string>FS_2</string> 
                 </void> 
@@ -1103,17 +1087,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="operator"> 
-                   <boolean>true</boolean> 
-                  </void> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFOPPlus</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>+</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo2"/> 
@@ -1148,17 +1122,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="operator"> 
-                   <boolean>true</boolean> 
-                  </void> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFOPPlus</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>+</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo2"/> 
@@ -1399,22 +1363,6 @@
               </void> 
              </object> 
             </void> 
-            <void property="counterNames"> 
-             <object class="java.util.ArrayList"> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_INPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_OUTPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_TIME_TAKEN</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_FATAL_ERROR</string> 
-              </void> 
-             </object> 
-            </void> 
             <void property="operatorId"> 
              <string>SEL_1</string> 
             </void> 
@@ -1751,22 +1699,6 @@
           </void> 
          </object> 
         </void> 
-        <void property="counterNames"> 
-         <object class="java.util.ArrayList"> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_TIME_TAKEN</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_FATAL_ERROR</string> 
-          </void> 
-         </object> 
-        </void> 
         <void property="neededColumnIDs"> 
          <object class="java.util.ArrayList"/> 
         </void> 

Modified: hive/branches/tez/ql/src/test/results/compiler/plan/udf6.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/udf6.q.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/udf6.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/udf6.q.xml Tue Nov 26 08:19:25 2013
@@ -217,22 +217,6 @@
                   </void> 
                  </object> 
                 </void> 
-                <void property="counterNames"> 
-                 <object class="java.util.ArrayList"> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_NUM_INPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_NUM_OUTPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_TIME_TAKEN</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_FS_2_FATAL_ERROR</string> 
-                  </void> 
-                 </object> 
-                </void> 
                 <void property="operatorId"> 
                  <string>FS_2</string> 
                 </void> 
@@ -407,22 +391,6 @@
               </void> 
              </object> 
             </void> 
-            <void property="counterNames"> 
-             <object class="java.util.ArrayList"> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_INPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_OUTPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_TIME_TAKEN</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_FATAL_ERROR</string> 
-              </void> 
-             </object> 
-            </void> 
             <void property="operatorId"> 
              <string>SEL_1</string> 
             </void> 
@@ -487,22 +455,6 @@
           </void> 
          </object> 
         </void> 
-        <void property="counterNames"> 
-         <object class="java.util.ArrayList"> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_TIME_TAKEN</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_FATAL_ERROR</string> 
-          </void> 
-         </object> 
-        </void> 
         <void property="neededColumnIDs"> 
          <object class="java.util.ArrayList"/> 
         </void> 

Modified: hive/branches/tez/ql/src/test/results/compiler/plan/udf_case.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/udf_case.q.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/udf_case.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/udf_case.q.xml Tue Nov 26 08:19:25 2013
@@ -221,22 +221,6 @@
                       </void> 
                      </object> 
                     </void> 
-                    <void property="counterNames"> 
-                     <object class="java.util.ArrayList"> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_NUM_INPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_NUM_OUTPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_TIME_TAKEN</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_FATAL_ERROR</string> 
-                      </void> 
-                     </object> 
-                    </void> 
                     <void property="operatorId"> 
                      <string>FS_3</string> 
                     </void> 
@@ -302,22 +286,6 @@
                   </void> 
                  </object> 
                 </void> 
-                <void property="counterNames"> 
-                 <object class="java.util.ArrayList"> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_NUM_INPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_NUM_OUTPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_TIME_TAKEN</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_FATAL_ERROR</string> 
-                  </void> 
-                 </object> 
-                </void> 
                 <void property="operatorId"> 
                  <string>LIM_2</string> 
                 </void> 
@@ -540,22 +508,6 @@
               </void> 
              </object> 
             </void> 
-            <void property="counterNames"> 
-             <object class="java.util.ArrayList"> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_INPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_OUTPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_TIME_TAKEN</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_FATAL_ERROR</string> 
-              </void> 
-             </object> 
-            </void> 
             <void property="operatorId"> 
              <string>SEL_1</string> 
             </void> 
@@ -587,22 +539,6 @@
           </void> 
          </object> 
         </void> 
-        <void property="counterNames"> 
-         <object class="java.util.ArrayList"> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_TIME_TAKEN</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_FATAL_ERROR</string> 
-          </void> 
-         </object> 
-        </void> 
         <void property="neededColumnIDs"> 
          <object class="java.util.ArrayList"/> 
         </void> 

Modified: hive/branches/tez/ql/src/test/results/compiler/plan/udf_when.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/udf_when.q.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/udf_when.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/udf_when.q.xml Tue Nov 26 08:19:25 2013
@@ -221,22 +221,6 @@
                       </void> 
                      </object> 
                     </void> 
-                    <void property="counterNames"> 
-                     <object class="java.util.ArrayList"> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_NUM_INPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_NUM_OUTPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_TIME_TAKEN</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_FS_3_FATAL_ERROR</string> 
-                      </void> 
-                     </object> 
-                    </void> 
                     <void property="operatorId"> 
                      <string>FS_3</string> 
                     </void> 
@@ -302,22 +286,6 @@
                   </void> 
                  </object> 
                 </void> 
-                <void property="counterNames"> 
-                 <object class="java.util.ArrayList"> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_NUM_INPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_NUM_OUTPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_TIME_TAKEN</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_LIM_2_FATAL_ERROR</string> 
-                  </void> 
-                 </object> 
-                </void> 
                 <void property="operatorId"> 
                  <string>LIM_2</string> 
                 </void> 
@@ -620,22 +588,6 @@
               </void> 
              </object> 
             </void> 
-            <void property="counterNames"> 
-             <object class="java.util.ArrayList"> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_INPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_NUM_OUTPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_TIME_TAKEN</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_SEL_1_FATAL_ERROR</string> 
-              </void> 
-             </object> 
-            </void> 
             <void property="operatorId"> 
              <string>SEL_1</string> 
             </void> 
@@ -667,22 +619,6 @@
           </void> 
          </object> 
         </void> 
-        <void property="counterNames"> 
-         <object class="java.util.ArrayList"> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_TIME_TAKEN</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_FATAL_ERROR</string> 
-          </void> 
-         </object> 
-        </void> 
         <void property="neededColumnIDs"> 
          <object class="java.util.ArrayList"/> 
         </void> 

Modified: hive/branches/tez/ql/src/test/results/compiler/plan/union.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/compiler/plan/union.q.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/compiler/plan/union.q.xml (original)
+++ hive/branches/tez/ql/src/test/results/compiler/plan/union.q.xml Tue Nov 26 08:19:25 2013
@@ -90,22 +90,6 @@
                               </void> 
                              </object> 
                             </void> 
-                            <void property="counterNames"> 
-                             <object class="java.util.ArrayList"> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_12_NUM_INPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_12_NUM_OUTPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_12_TIME_TAKEN</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_12_FATAL_ERROR</string> 
-                              </void> 
-                             </object> 
-                            </void> 
                             <void property="operatorId"> 
                              <string>FS_12</string> 
                             </void> 
@@ -167,22 +151,6 @@
                         <void property="conf"> 
                          <object class="org.apache.hadoop.hive.ql.plan.TableScanDesc"/> 
                         </void> 
-                        <void property="counterNames"> 
-                         <object class="java.util.ArrayList"> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_TS_11_NUM_INPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_TS_11_NUM_OUTPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_TS_11_TIME_TAKEN</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_TS_11_FATAL_ERROR</string> 
-                          </void> 
-                         </object> 
-                        </void> 
                         <void property="neededColumnIDs"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
@@ -749,22 +717,6 @@
                               </void> 
                              </object> 
                             </void> 
-                            <void property="counterNames"> 
-                             <object class="java.util.ArrayList"> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_8_NUM_INPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_8_NUM_OUTPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_8_TIME_TAKEN</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FS_8_FATAL_ERROR</string> 
-                              </void> 
-                             </object> 
-                            </void> 
                             <void property="operatorId"> 
                              <string>FS_8</string> 
                             </void> 
@@ -841,22 +793,6 @@
                           </void> 
                          </object> 
                         </void> 
-                        <void property="counterNames"> 
-                         <object class="java.util.ArrayList"> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_7_NUM_INPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_7_NUM_OUTPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_7_TIME_TAKEN</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_7_FATAL_ERROR</string> 
-                          </void> 
-                         </object> 
-                        </void> 
                         <void property="operatorId"> 
                          <string>SEL_7</string> 
                         </void> 
@@ -920,22 +856,6 @@
                     <void property="conf"> 
                      <object class="org.apache.hadoop.hive.ql.plan.UnionDesc"/> 
                     </void> 
-                    <void property="counterNames"> 
-                     <object class="java.util.ArrayList"> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_UNION_6_NUM_INPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_UNION_6_NUM_OUTPUT_ROWS</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_UNION_6_TIME_TAKEN</string> 
-                      </void> 
-                      <void method="add"> 
-                       <string>CNTR_NAME_UNION_6_FATAL_ERROR</string> 
-                      </void> 
-                     </object> 
-                    </void> 
                     <void property="operatorId"> 
                      <string>UNION_6</string> 
                     </void> 
@@ -1012,22 +932,6 @@
                           </void> 
                          </object> 
                         </void> 
-                        <void property="counterNames"> 
-                         <object class="java.util.ArrayList"> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_5_NUM_INPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_5_TIME_TAKEN</string> 
-                          </void> 
-                          <void method="add"> 
-                           <string>CNTR_NAME_SEL_5_FATAL_ERROR</string> 
-                          </void> 
-                         </object> 
-                        </void> 
                         <void property="operatorId"> 
                          <string>SEL_5</string> 
                         </void> 
@@ -1091,22 +995,6 @@
                               </void> 
                              </object> 
                             </void> 
-                            <void property="counterNames"> 
-                             <object class="java.util.ArrayList"> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FIL_10_NUM_INPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FIL_10_NUM_OUTPUT_ROWS</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FIL_10_TIME_TAKEN</string> 
-                              </void> 
-                              <void method="add"> 
-                               <string>CNTR_NAME_FIL_10_FATAL_ERROR</string> 
-                              </void> 
-                             </object> 
-                            </void> 
                             <void property="operatorId"> 
                              <string>FIL_10</string> 
                             </void> 
@@ -1131,22 +1019,6 @@
                                   </void> 
                                  </object> 
                                 </void> 
-                                <void property="counterNames"> 
-                                 <object class="java.util.ArrayList"> 
-                                  <void method="add"> 
-                                   <string>CNTR_NAME_TS_3_NUM_INPUT_ROWS</string> 
-                                  </void> 
-                                  <void method="add"> 
-                                   <string>CNTR_NAME_TS_3_NUM_OUTPUT_ROWS</string> 
-                                  </void> 
-                                  <void method="add"> 
-                                   <string>CNTR_NAME_TS_3_TIME_TAKEN</string> 
-                                  </void> 
-                                  <void method="add"> 
-                                   <string>CNTR_NAME_TS_3_FATAL_ERROR</string> 
-                                  </void> 
-                                 </object> 
-                                </void> 
                                 <void property="neededColumnIDs"> 
                                  <object class="java.util.ArrayList"> 
                                   <void method="add"> 
@@ -1418,22 +1290,6 @@
                   </void> 
                  </object> 
                 </void> 
-                <void property="counterNames"> 
-                 <object class="java.util.ArrayList"> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_SEL_2_NUM_INPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_SEL_2_TIME_TAKEN</string> 
-                  </void> 
-                  <void method="add"> 
-                   <string>CNTR_NAME_SEL_2_FATAL_ERROR</string> 
-                  </void> 
-                 </object> 
-                </void> 
                 <void property="operatorId"> 
                  <string>SEL_2</string> 
                 </void> 
@@ -1529,22 +1385,6 @@
               </void> 
              </object> 
             </void> 
-            <void property="counterNames"> 
-             <object class="java.util.ArrayList"> 
-              <void method="add"> 
-               <string>CNTR_NAME_FIL_9_NUM_INPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_FIL_9_NUM_OUTPUT_ROWS</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_FIL_9_TIME_TAKEN</string> 
-              </void> 
-              <void method="add"> 
-               <string>CNTR_NAME_FIL_9_FATAL_ERROR</string> 
-              </void> 
-             </object> 
-            </void> 
             <void property="operatorId"> 
              <string>FIL_9</string> 
             </void> 
@@ -1609,22 +1449,6 @@
           </void> 
          </object> 
         </void> 
-        <void property="counterNames"> 
-         <object class="java.util.ArrayList"> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_TIME_TAKEN</string> 
-          </void> 
-          <void method="add"> 
-           <string>CNTR_NAME_TS_0_FATAL_ERROR</string> 
-          </void> 
-         </object> 
-        </void> 
         <void property="neededColumnIDs"> 
          <object class="java.util.ArrayList"> 
           <void method="add"> 

Modified: hive/branches/tez/serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/pom.xml (original)
+++ hive/branches/tez/serde/pom.xml Tue Nov 26 08:19:25 2013
@@ -32,6 +32,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java Tue Nov 26 08:19:25 2013
@@ -139,38 +139,15 @@ public class BytesRefArrayWritable imple
       return sizeDiff;
     }
     for (int i = 0; i < valid; i++) {
-      if (other.contains(bytesRefWritables[i])) {
-        continue;
-      } else {
-        return 1;
+      int res = bytesRefWritables[i].compareTo(other.bytesRefWritables[i]);
+      if (res != 0) {
+        return res;
       }
     }
     return 0;
   }
 
   /**
-   * Returns <tt>true</tt> if this instance contains one or more the specified
-   * BytesRefWritable.
-   *
-   * @param bytesRefWritable
-   *          BytesRefWritable element to be tested
-   * @return <tt>true</tt> if contains the specified element
-   * @throws IllegalArgumentException
-   *           if the specified element is null
-   */
-  public boolean contains(BytesRefWritable bytesRefWritable) {
-    if (bytesRefWritable == null) {
-      throw new IllegalArgumentException("Argument can not be null.");
-    }
-    for (int i = 0; i < valid; i++) {
-      if (bytesRefWritables[i].equals(bytesRefWritable)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  /**
    * {@inheritDoc}
    */
   @Override

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java Tue Nov 26 08:19:25 2013
@@ -67,6 +67,7 @@ public abstract class ColumnarStructBase
       if (col != null) {
         rawBytesField = col;
         inited = false;
+        fieldSkipped = false;
       } else {
         // select columns that actually do not exist in the file.
         fieldSkipped = true;

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -80,4 +80,14 @@ public interface PrimitiveObjectInspecto
    */
   boolean preferWritable();
 
+  /**
+   * The precision of the underlying data.
+   */
+  int precision();
+
+  /**
+   * The scale of the underlying data.
+   */
+  int scale();
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 
 /**
@@ -86,4 +87,20 @@ public abstract class AbstractPrimitiveO
     return this.typeInfo;
   }
 
+  /**
+   * Default implementation.  Maybe overridden by exact types.
+   */
+  @Override
+  public int precision() {
+    return HiveDecimalUtils.getPrecisionForType(typeInfo);
+  }
+
+  /**
+   * Default implementation.  Maybe overridden by exact types.
+   */
+  @Override
+  public int scale() {
+    return HiveDecimalUtils.getScaleForType(typeInfo);
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import java.math.BigDecimal;
 
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
 /**
  * A WritableConstantByteObjectInspector is a WritableByteObjectInspector
@@ -34,6 +35,7 @@ public class WritableConstantByteObjectI
   protected WritableConstantByteObjectInspector() {
     super();
   }
+
   WritableConstantByteObjectInspector(ByteWritable value) {
     super();
     this.value = value;
@@ -43,4 +45,10 @@ public class WritableConstantByteObjectI
   public ByteWritable getWritableConstantValue() {
     return value;
   }
+
+  @Override
+  public int precision() {
+    return BigDecimal.valueOf(value.get()).precision();
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -56,4 +56,14 @@ implements ConstantObjectInspector {
     return new HiveDecimalWritable(dec);
   }
 
+  @Override
+  public int precision() {
+    return value.getHiveDecimal().precision();
+  }
+
+  @Override
+  public int scale() {
+    return value.getHiveDecimal().scale();
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import java.math.BigDecimal;
 
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.io.IntWritable;
 
 /**
@@ -34,6 +35,7 @@ public class WritableConstantIntObjectIn
   protected WritableConstantIntObjectInspector() {
     super();
   }
+
   WritableConstantIntObjectInspector(IntWritable value) {
     super();
     this.value = value;
@@ -43,4 +45,10 @@ public class WritableConstantIntObjectIn
   public IntWritable getWritableConstantValue() {
     return value;
   }
+
+  @Override
+  public int precision() {
+    return BigDecimal.valueOf(value.get()).precision();
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import java.math.BigDecimal;
 
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.io.LongWritable;
 
 /**
@@ -34,6 +35,7 @@ public class WritableConstantLongObjectI
   protected WritableConstantLongObjectInspector() {
     super();
   }
+
   WritableConstantLongObjectInspector(LongWritable value) {
     super();
     this.value = value;
@@ -43,4 +45,10 @@ public class WritableConstantLongObjectI
   public LongWritable getWritableConstantValue() {
     return value;
   }
+
+  @Override
+  public int precision() {
+    return BigDecimal.valueOf(value.get()).precision();
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java Tue Nov 26 08:19:25 2013
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import java.math.BigDecimal;
 
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
 /**
  * A WritableConstantShortObjectInspector is a WritableShortObjectInspector
@@ -34,6 +35,7 @@ public class WritableConstantShortObject
   protected WritableConstantShortObjectInspector() {
     super();
   }
+
   WritableConstantShortObjectInspector(ShortWritable value) {
     super();
     this.value = value;
@@ -43,4 +45,10 @@ public class WritableConstantShortObject
   public ShortWritable getWritableConstantValue() {
     return value;
   }
+
+  @Override
+  public int precision() {
+    return BigDecimal.valueOf(value.get()).precision();
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java Tue Nov 26 08:19:25 2013
@@ -67,10 +67,7 @@ public class HiveDecimalUtils {
   }
 
   /**
-   * Get the precision of double type can be tricky. While a double may have more digits than
-   * a HiveDecimal can hold, in reality those numbers are of no practical use. Thus, we assume
-   * that a double can have at most HiveDecimal.MAX_PRECISION, which is generous enough. This
-   * implies that casting a double to a decimal type is always valid.
+   * Need to keep consistent with JdbcColumn.columnPrecision
    *
    */
   public static int getPrecisionForType(PrimitiveTypeInfo typeInfo) {
@@ -78,7 +75,9 @@ public class HiveDecimalUtils {
     case DECIMAL:
       return ((DecimalTypeInfo)typeInfo).precision();
     case FLOAT:
-      return 23;
+      return 7;
+    case DOUBLE:
+      return 15;
     case BYTE:
       return 3;
     case SHORT:
@@ -87,16 +86,15 @@ public class HiveDecimalUtils {
       return 10;
     case LONG:
       return 19;
+    case VOID:
+      return 1;
     default:
       return HiveDecimal.MAX_PRECISION;
     }
   }
 
   /**
-   * Get the scale of double type can be tricky. While a double may have more decimal digits than
-   * HiveDecimal, in reality those numbers are of no practical use. Thus, we assume that a double
-   * can have at most HiveDecimal.MAX_SCALE, which is generous enough. This implies implies that
-   * casting a double to a decimal type is always valid.
+   * Need to keep consistent with JdbcColumn.columnScale()
    *
    */
   public static int getScaleForType(PrimitiveTypeInfo typeInfo) {
@@ -105,13 +103,13 @@ public class HiveDecimalUtils {
       return ((DecimalTypeInfo)typeInfo).scale();
     case FLOAT:
       return 7;
+    case DOUBLE:
+      return 15;
     case BYTE:
-      return 0;
     case SHORT:
-      return 0;
     case INT:
-      return 0;
     case LONG:
+    case VOID:
       return 0;
     default:
       return HiveDecimal.MAX_SCALE;

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java Tue Nov 26 08:19:25 2013
@@ -64,8 +64,8 @@ public final class TypeInfoFactory {
   /**
    * A DecimalTypeInfo instance that has max precision and max scale.
    */
-  public static final DecimalTypeInfo decimalTypeInfo = new DecimalTypeInfo(HiveDecimal.MAX_PRECISION,
-      HiveDecimal.MAX_SCALE);
+  public static final DecimalTypeInfo decimalTypeInfo = new DecimalTypeInfo(HiveDecimal.SYSTEM_DEFAULT_PRECISION,
+      HiveDecimal.SYSTEM_DEFAULT_SCALE);
 
   public static final PrimitiveTypeInfo unknownTypeInfo = new PrimitiveTypeInfo("unknown");
 

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java Tue Nov 26 08:19:25 2013
@@ -29,7 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -395,7 +395,6 @@ public final class TypeInfoUtils {
       PrimitiveTypeEntry typeEntry =
           PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
       if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN ) {
-        String qualifiedTypeName = typeEntry.typeName;
         String[] params = parseParams();
         switch (typeEntry.primitiveCategory) {
         case CHAR:
@@ -405,40 +404,41 @@ public final class TypeInfoUtils {
                 + " type is specified without length: " + typeInfoString);
           }
 
+          int length = 1;
           if (params.length == 1) {
-            int length = Integer.valueOf(params[0]);
+            length = Integer.valueOf(params[0]);
             if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
               BaseCharUtils.validateVarcharParameter(length);
+              return TypeInfoFactory.getVarcharTypeInfo(length);
             } else {
               BaseCharUtils.validateCharParameter(length);
+              return TypeInfoFactory.getCharTypeInfo(length);
             }
-            qualifiedTypeName = BaseCharTypeInfo.getQualifiedName(typeEntry.typeName, length);
           } else if (params.length > 1) {
             throw new IllegalArgumentException(
                 "Type " + typeEntry.typeName+ " only takes one parameter, but " +
                 params.length + " is seen");
           }
-
-          break;
         case DECIMAL:
+          int precision = HiveDecimal.USER_DEFAULT_PRECISION;
+          int scale = HiveDecimal.USER_DEFAULT_SCALE;
           if (params == null || params.length == 0) {
-            throw new IllegalArgumentException( "Decimal type is specified without length: " + typeInfoString);
-          }
-
-          if (params.length == 2) {
-            int precision = Integer.valueOf(params[0]);
-            int scale = Integer.valueOf(params[1]);
+            // It's possible that old metadata still refers to "decimal" as a column type w/o
+            // precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
+          } else if (params.length == 2) {
+            // New metadata always have two parameters.
+            precision = Integer.valueOf(params[0]);
+            scale = Integer.valueOf(params[1]);
             HiveDecimalUtils.validateParameter(precision, scale);
-            qualifiedTypeName = DecimalTypeInfo.getQualifiedName(precision, scale);
-          } else if (params.length > 1) {
-            throw new IllegalArgumentException("Type varchar only takes one parameter, but " +
+          } else if (params.length > 2) {
+            throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
                 params.length + " is seen");
           }
 
-          break;
+          return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
+        default:
+          return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName);
         }
-
-        return TypeInfoFactory.getPrimitiveTypeInfo(qualifiedTypeName);
       }
 
       // Is this a list type?

Modified: hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java (original)
+++ hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java Tue Nov 26 08:19:25 2013
@@ -25,6 +25,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.TreeMap;
 
+import junit.framework.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
@@ -38,6 +39,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.SimpleMapEqualComparer;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.LongWritable;
 
 public class TestLazyBinaryColumnarSerDe extends TestCase {
 
@@ -238,5 +240,86 @@ public class TestLazyBinaryColumnarSerDe
     }
   }
 
+  private static class BeforeStruct {
+    Long l1;
+    Long l2;
+  }
+
+  private static class AfterStruct {
+    Long l1;
+    Long l2;
+    Long l3;
+  }
+
+  /**
+   * HIVE-5788
+   * <p>
+   * Background: in cases of "add column", table metadata changes but data does not.  Columns
+   * missing from the data but which are required by metadata are interpreted as null.
+   * <p>
+   * This tests the use-case of altering columns of a table with already some data, then adding more data
+   * in the new schema, and seeing if this serde can to read both types of data from the resultant table.
+   * @throws SerDeException
+   */
+  public void testHandlingAlteredSchemas() throws SerDeException {
+    StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory
+        .getReflectionObjectInspector(BeforeStruct.class,
+            ObjectInspectorOptions.JAVA);
+    String cols = ObjectInspectorUtils.getFieldNames(oi);
+    Properties props = new Properties();
+    props.setProperty(serdeConstants.LIST_COLUMNS, cols);
+    props.setProperty(serdeConstants.LIST_COLUMN_TYPES,
+        ObjectInspectorUtils.getFieldTypes(oi));
+
+    // serialize some data in the schema before it is altered.
+    LazyBinaryColumnarSerDe serde = new LazyBinaryColumnarSerDe();
+    serde.initialize(new Configuration(), props);
+
+    BeforeStruct bs1 = new BeforeStruct();
+    bs1.l1 = 1L;
+    bs1.l2 = 2L;
+    BytesRefArrayWritable braw1 = (BytesRefArrayWritable) serde.serialize(bs1,
+        oi);
+
+    // alter table add column: change the metadata
+    oi = (StructObjectInspector) ObjectInspectorFactory
+        .getReflectionObjectInspector(AfterStruct.class,
+            ObjectInspectorOptions.JAVA);
+    cols = ObjectInspectorUtils.getFieldNames(oi);
+    props = new Properties();
+    props.setProperty(serdeConstants.LIST_COLUMNS, cols);
+    props.setProperty(serdeConstants.LIST_COLUMN_TYPES,
+        ObjectInspectorUtils.getFieldTypes(oi));
+    serde = new LazyBinaryColumnarSerDe();
+    serde.initialize(new Configuration(), props);
 
-}
+    // serialize some data in the schema after it is altered.
+    AfterStruct as = new AfterStruct();
+    as.l1 = 11L;
+    as.l2 = 12L;
+    as.l3 = 13L;
+    BytesRefArrayWritable braw2 = (BytesRefArrayWritable) serde.serialize(as,
+        oi);
+
+    // fetch operator
+    serde = new LazyBinaryColumnarSerDe();
+    serde.initialize(new Configuration(), props);
+
+    //fetch the row inserted before schema is altered and verify
+    LazyBinaryColumnarStruct struct1 = (LazyBinaryColumnarStruct) serde
+        .deserialize(braw1);
+    oi = (StructObjectInspector) serde.getObjectInspector();
+    List<Object> objs1 = oi.getStructFieldsDataAsList(struct1);
+    Assert.assertEquals(((LongWritable) objs1.get(0)).get(), 1L);
+    Assert.assertEquals(((LongWritable) objs1.get(1)).get(), 2L);
+    Assert.assertNull(objs1.get(2));
+
+    //fetch the row inserted after schema is altered and verify
+    LazyBinaryColumnarStruct struct2 = (LazyBinaryColumnarStruct) serde
+        .deserialize(braw2);
+    List<Object> objs2 = struct2.getFieldsAsList();
+    Assert.assertEquals(((LongWritable) objs2.get(0)).get(), 11L);
+    Assert.assertEquals(((LongWritable) objs2.get(1)).get(), 12L);
+    Assert.assertEquals(((LongWritable) objs2.get(2)).get(), 13L);
+  }
+}
\ No newline at end of file

Modified: hive/branches/tez/service/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/service/pom.xml (original)
+++ hive/branches/tez/service/pom.xml Tue Nov 26 08:19:25 2013
@@ -32,6 +32,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive</groupId>

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/SessionManager.java Tue Nov 26 08:19:25 2013
@@ -18,9 +18,9 @@
 
 package org.apache.hive.service.cli.session;
 
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -41,11 +41,15 @@ import org.apache.hive.service.cli.opera
  *
  */
 public class SessionManager extends CompositeService {
+
   private static final Log LOG = LogFactory.getLog(CompositeService.class);
+
   private HiveConf hiveConf;
-  private final Map<SessionHandle, HiveSession> handleToSession = new HashMap<SessionHandle, HiveSession>();
-  private OperationManager operationManager = new OperationManager();
-  private static final Object sessionMapLock = new Object();
+
+  private final Map<SessionHandle, HiveSession> handleToSession =
+      new ConcurrentHashMap<SessionHandle, HiveSession>();
+  private final OperationManager operationManager = new OperationManager();
+
   private ThreadPoolExecutor backgroundOperationPool;
 
   public SessionManager() {
@@ -55,7 +59,6 @@ public class SessionManager extends Comp
   @Override
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
-    operationManager = new OperationManager();
     int backgroundPoolSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS);
     LOG.info("HiveServer2: Async execution thread pool size: " + backgroundPoolSize);
     int backgroundPoolQueueSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_WAIT_QUEUE_SIZE);
@@ -113,9 +116,9 @@ public class SessionManager extends Comp
     }
     session.setSessionManager(this);
     session.setOperationManager(operationManager);
-    synchronized(sessionMapLock) {
-      handleToSession.put(session.getSessionHandle(), session);
-    }
+
+    handleToSession.put(session.getSessionHandle(), session);
+
     try {
       executeSessionHooks(session);
     } catch (Exception e) {
@@ -125,10 +128,7 @@ public class SessionManager extends Comp
   }
 
   public void closeSession(SessionHandle sessionHandle) throws HiveSQLException {
-    HiveSession session;
-    synchronized(sessionMapLock) {
-      session = handleToSession.remove(sessionHandle);
-    }
+    HiveSession session = handleToSession.remove(sessionHandle);
     if (session == null) {
       throw new HiveSQLException("Session does not exist!");
     }
@@ -136,10 +136,7 @@ public class SessionManager extends Comp
   }
 
   public HiveSession getSession(SessionHandle sessionHandle) throws HiveSQLException {
-    HiveSession session;
-    synchronized(sessionMapLock) {
-      session = handleToSession.get(sessionHandle);
-    }
+    HiveSession session = handleToSession.get(sessionHandle);
     if (session == null) {
       throw new HiveSQLException("Invalid SessionHandle: " + sessionHandle);
     }

Modified: hive/branches/tez/shims/0.20/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/0.20/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/0.20/pom.xml (original)
+++ hive/branches/tez/shims/0.20/pom.xml Tue Nov 26 08:19:25 2013
@@ -33,6 +33,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive.shims</groupId>

Modified: hive/branches/tez/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/tez/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Nov 26 08:19:25 2013
@@ -572,6 +572,11 @@ public class Hadoop20Shims implements Ha
     throwKerberosUnsupportedError();
   }
 
+  @Override
+  public boolean isLoginKeytabBased() throws IOException {
+    return false;
+  }
+
   private void throwKerberosUnsupportedError() throws UnsupportedOperationException{
     throw new UnsupportedOperationException("Kerberos login is not supported" +
         " in this hadoop version (" + VersionInfo.getVersion() + ")");

Modified: hive/branches/tez/shims/0.20S/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/0.20S/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/0.20S/pom.xml (original)
+++ hive/branches/tez/shims/0.20S/pom.xml Tue Nov 26 08:19:25 2013
@@ -33,6 +33,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive.shims</groupId>

Modified: hive/branches/tez/shims/0.23/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/0.23/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/0.23/pom.xml (original)
+++ hive/branches/tez/shims/0.23/pom.xml Tue Nov 26 08:19:25 2013
@@ -33,6 +33,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive.shims</groupId>

Modified: hive/branches/tez/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java (original)
+++ hive/branches/tez/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java Tue Nov 26 08:19:25 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.shims.Hado
 
 import java.io.IOException;
 import java.net.URI;
+import java.security.PrivilegedExceptionAction;
 
 public class WebHCatJTShim23 implements WebHCatJTShim {
   private JobClient jc;
@@ -31,10 +32,23 @@ public class WebHCatJTShim23 implements 
   /**
    * Create a connection to the Job Tracker.
    */
-  public WebHCatJTShim23(Configuration conf, final UserGroupInformation ugi)
+  public WebHCatJTShim23(final Configuration conf, final UserGroupInformation ugi)
           throws IOException {
-
-    jc = new JobClient(conf);
+    try {
+    jc = ugi.doAs(new PrivilegedExceptionAction<JobClient>() {
+      public JobClient run() throws IOException, InterruptedException  {
+        return ugi.doAs(new PrivilegedExceptionAction<JobClient>() {
+          public JobClient run() throws IOException {
+            //create this in doAs() so that it gets a security context based passed in 'ugi'
+            return new JobClient(conf);
+          }
+        });
+      }
+    });
+    }
+    catch(InterruptedException ex) {
+      throw new RuntimeException("Failed to create JobClient", ex);
+    }
   }
 
   /**

Modified: hive/branches/tez/shims/assembly/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/assembly/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/assembly/pom.xml (original)
+++ hive/branches/tez/shims/assembly/pom.xml Tue Nov 26 08:19:25 2013
@@ -32,30 +32,36 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-common</artifactId>
       <version>${project.version}</version>
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-0.20</artifactId>
       <version>${project.version}</version>
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-common-secure</artifactId>
       <version>${project.version}</version>
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-0.20S</artifactId>
       <version>${project.version}</version>
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.hive.shims</groupId>
       <artifactId>hive-shims-0.23</artifactId>
       <version>${project.version}</version>
+      <optional>true</optional>
     </dependency>
   </dependencies>
 

Modified: hive/branches/tez/shims/common-secure/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/common-secure/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/common-secure/pom.xml (original)
+++ hive/branches/tez/shims/common-secure/pom.xml Tue Nov 26 08:19:25 2013
@@ -33,6 +33,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- intra-project -->
     <dependency>
       <groupId>org.apache.hive.shims</groupId>

Modified: hive/branches/tez/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/branches/tez/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Tue Nov 26 08:19:25 2013
@@ -537,6 +537,11 @@ public abstract class HadoopShimsSecure 
   }
 
   @Override
+  public boolean isLoginKeytabBased() throws IOException {
+    return UserGroupInformation.isLoginKeytabBased();
+  }
+
+  @Override
   abstract public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception;
 
   @Override

Modified: hive/branches/tez/shims/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/common/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/common/pom.xml (original)
+++ hive/branches/tez/shims/common/pom.xml Tue Nov 26 08:19:25 2013
@@ -33,6 +33,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- inter-project -->
     <dependency>
       <groupId>commons-logging</groupId>

Modified: hive/branches/tez/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/tez/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Nov 26 08:19:25 2013
@@ -303,6 +303,13 @@ public interface HadoopShims {
    */
   public void reLoginUserFromKeytab() throws IOException;
 
+  /***
+   * Check if the current UGI is keytab based
+   * @return
+   * @throws IOException
+   */
+  public boolean isLoginKeytabBased() throws IOException;
+
   /**
    * Move the directory/file to trash. In case of the symlinks or mount points, the file is
    * moved to the trashbin in the actual volume of the path p being deleted

Modified: hive/branches/tez/testutils/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/testutils/pom.xml (original)
+++ hive/branches/tez/testutils/pom.xml Tue Nov 26 08:19:25 2013
@@ -32,6 +32,7 @@
   </properties>
 
   <dependencies>
+    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
     <!-- inter-project -->
     <dependency>
       <groupId>com.google.code.tempus-fugit</groupId>

Modified: hive/branches/tez/testutils/ptest2/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/pom.xml?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
Binary files - no diff available.

Modified: hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java (original)
+++ hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java Tue Nov 26 08:19:25 2013
@@ -35,6 +35,7 @@ import org.jclouds.compute.domain.Templa
 import org.jclouds.logging.log4j.config.Log4JLoggingModule;
 
 import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Sets;
 
@@ -77,18 +78,32 @@ public class CloudComputeService {
     result.addAll(mComputeService.createNodesInGroup(mGroupName, count, template));
     return result;
   }
+  static Predicate<ComputeMetadata> createFilterPTestPredicate(final String groupName,
+      final String groupTag) {
+    return new Predicate<ComputeMetadata>() {
+      @Override
+      public boolean apply(ComputeMetadata computeMetadata) {
+        NodeMetadata nodeMetadata = (NodeMetadata) computeMetadata;
+        return nodeMetadata.getStatus() == Status.RUNNING && isPTestHost(nodeMetadata);
+      }
+      private boolean isPTestHost(NodeMetadata node) {
+        if(groupName.equalsIgnoreCase(node.getGroup())) {
+          return true;
+        }
+        if(Strings.nullToEmpty(node.getName()).startsWith(groupName)) {
+          return true;
+        }
+        if(node.getTags().contains(groupTag)) {
+          return true;
+        }
+        return false;
+      }
+    };
+  }
   public Set<NodeMetadata> listRunningNodes(){
     Set<NodeMetadata> result = Sets.newHashSet();
-    result.addAll(mComputeService
-        .listNodesDetailsMatching(new Predicate<ComputeMetadata>() {
-          @Override
-          public boolean apply(ComputeMetadata computeMetadata) {
-            NodeMetadata nodeMetadata = (NodeMetadata) computeMetadata;
-            return nodeMetadata.getStatus() == Status.RUNNING
-                && (mGroupName.equalsIgnoreCase(nodeMetadata.getGroup()) ||
-                    nodeMetadata.getTags().contains(mGroupTag));
-          }
-        }));
+    result.addAll(mComputeService.listNodesDetailsMatching(
+        createFilterPTestPredicate(mGroupName, mGroupTag)));
     return result;
   }
   public void destroyNode(String nodeId) {

Modified: hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java (original)
+++ hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java Tue Nov 26 08:19:25 2013
@@ -74,7 +74,6 @@ public class CloudExecutionContextProvid
   private final String[] mSlaveLocalDirs;
   private final int mNumThreads;
   private final int mNumHosts;
-  private final int mNumRetries;
   private final long mRetrySleepInterval;
   private final CloudComputeService mCloudComputeService;
   private final Map<String, Long> mTerminatedHosts;
@@ -86,14 +85,13 @@ public class CloudExecutionContextProvid
   CloudExecutionContextProvider(String dataDir,
       int numHosts, CloudComputeService cloudComputeService, SSHCommandExecutor sshCommandExecutor,
       String workingDirectory, String privateKey, String user, String[] slaveLocalDirs, int numThreads,
-      int numRetries, long retrySleepInterval) throws IOException {
+      long retrySleepInterval) throws IOException {
     mNumHosts = numHosts;
     mCloudComputeService = cloudComputeService;
     mPrivateKey = privateKey;
     mUser = user;
     mSlaveLocalDirs = slaveLocalDirs;
     mNumThreads = numThreads;
-    mNumRetries = numRetries;
     mRetrySleepInterval = retrySleepInterval;
     mSSHCommandExecutor = sshCommandExecutor;
     mWorkingDir = Dirs.create(new File(workingDirectory, "working"));
@@ -191,35 +189,33 @@ public class CloudExecutionContextProvid
     }
   }
 
-  private Set<NodeMetadata> createNodes(int numHosts)
+  private Set<NodeMetadata> createNodes(final int numHosts)
       throws CreateHostsFailedException {
     Set<NodeMetadata> result = Sets.newHashSet();
     int attempts = 0;
     int numRequired = numHosts;
-    RunNodesException exception = null;
     do {
       LOG.info("Attempting to create " + numRequired + " nodes");
       try {
-        result.addAll(verifyHosts(mCloudComputeService.createNodes(numRequired)));
+        result.addAll(mCloudComputeService.createNodes(numRequired));
       } catch (RunNodesException e) {
-        exception = e;
         LOG.warn("Error creating nodes", e);
         terminateInternal(e.getNodeErrors().keySet());
-        result.addAll(verifyHosts(e.getSuccessfulNodes()));
+        result.addAll(e.getSuccessfulNodes());
       }
+      result = verifyHosts(result);
       LOG.info("Successfully created " + result.size() + " nodes");
       numRequired = numHosts - result.size();
       if(numRequired > 0) {
         try {
-          TimeUnit.SECONDS.sleep(mRetrySleepInterval);
+          TimeUnit.SECONDS.sleep(++attempts * mRetrySleepInterval);
         } catch(InterruptedException e) {
           throw new CreateHostsFailedException("Interrupted while trying to create hosts", e);
         }
       }
-    } while(attempts++ < mNumRetries && numRequired > 0);
-    if(result.size() < numHosts) {
-      throw new CreateHostsFailedException("Error creating nodes", exception);
-    }
+    } while(numRequired > 0);
+    Preconditions.checkState(result.size() >= numHosts,
+        "Results should always be >= numHosts " + numHosts + " => " + result.size());
     return result;
   }
 
@@ -240,39 +236,37 @@ public class CloudExecutionContextProvid
 
   private Set<NodeMetadata> verifyHosts(Set<? extends NodeMetadata> hosts)
       throws CreateHostsFailedException {
-    persistHostnamesToLog(hosts);
     final Set<NodeMetadata> result = Collections.synchronizedSet(new HashSet<NodeMetadata>());
-    ExecutorService executorService = Executors.newFixedThreadPool(Math.min(hosts.size(), 25));
-    try {
-      for(final NodeMetadata node : hosts) {
-        executorService.submit(new Runnable() {
-          @Override
-          public void run() {
-            SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser, node.getHostname(), 0, "pkill -f java");
-            mSSHCommandExecutor.execute(command);
-            if(command.getExitCode() == Constants.EXIT_CODE_UNKNOWN ||
-                command.getException() != null) {
-              if(command.getException() == null) {
-                LOG.error("Node " + node.getHostname() + " is bad on startup");
+    if(!hosts.isEmpty()) {
+      persistHostnamesToLog(hosts);
+      ExecutorService executorService = Executors.newFixedThreadPool(Math.min(hosts.size(), 25));
+      try {
+        for(final NodeMetadata node : hosts) {
+          executorService.submit(new Runnable() {
+            @Override
+            public void run() {
+              SSHCommand command = new SSHCommand(mSSHCommandExecutor, mPrivateKey, mUser, node.getHostname(), 0, "pkill -f java");
+              mSSHCommandExecutor.execute(command);
+              if(command.getExitCode() == Constants.EXIT_CODE_UNKNOWN ||
+                  command.getException() != null) {
+                LOG.error("Node " + node + " is bad on startup", command.getException());
+                terminateInternal(node);
               } else {
-                LOG.error("Node " + node.getHostname() + " is bad on startup", command.getException());
+                result.add(node);
               }
-              terminateInternal(node);
-            } else {
-              result.add(node);
             }
-          }
-        });
-      }
-      executorService.shutdown();
-      if(!executorService.awaitTermination(10, TimeUnit.MINUTES)) {
-        LOG.error("Verify command still executing on a host after 10 minutes");
-      }
-    } catch (InterruptedException e) {
-      throw new CreateHostsFailedException("Interrupted while trying to create hosts", e);
-    } finally {
-      if(!executorService.isShutdown()) {
-        executorService.shutdownNow();
+          });
+        }
+        executorService.shutdown();
+        if(!executorService.awaitTermination(10, TimeUnit.MINUTES)) {
+          LOG.error("Verify command still executing on a host after 10 minutes");
+        }
+      } catch (InterruptedException e) {
+        throw new CreateHostsFailedException("Interrupted while trying to create hosts", e);
+      } finally {
+        if(!executorService.isShutdown()) {
+          executorService.shutdownNow();
+        }
       }
     }
     return result;
@@ -310,7 +304,7 @@ public class CloudExecutionContextProvid
   }
 
   private void terminateInternal(final NodeMetadata node) {
-    LOG.info("Submitting termination for " + node.getHostname());
+    LOG.info("Submitting termination for " + node);
     mTerminationExecutor.submit(new Runnable() {
       @Override
       public void run() {
@@ -404,7 +398,7 @@ public class CloudExecutionContextProvid
         instanceType, groupName, imageId, keyPair, securityGroup, maxBid);
     CloudExecutionContextProvider service = new CloudExecutionContextProvider(
         dataDir, numHosts, cloudComputeService, new SSHCommandExecutor(LOG), workingDirectory,
-        privateKey, user, localDirs, numThreads, 10, 10);
+        privateKey, user, localDirs, numThreads, 60);
     return service;
   }
 }

Modified: hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java (original)
+++ hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/context/TestCloudExecutionContextProvider.java Tue Nov 26 08:19:25 2013
@@ -92,7 +92,7 @@ public class TestCloudExecutionContextPr
   }
 
   @org.junit.Test
-  public void testRetrySucceeds() throws Exception {
+  public void testRetry() throws Exception {
     when(cloudComputeService.createNodes(anyInt())).then(new Answer<Set<NodeMetadata>>() {
       int count = 0;
       @Override
@@ -105,7 +105,7 @@ public class TestCloudExecutionContextPr
       }
     });
     CloudExecutionContextProvider provider = new CloudExecutionContextProvider(dataDir, NUM_NODES,
-        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS, 1, 1, 0);
+        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS, 1, 0);
     ExecutionContext executionContext = provider.createExecutionContext();
     Set<String> hosts = Sets.newHashSet();
     for(Host host : executionContext.getHosts()) {
@@ -113,11 +113,4 @@ public class TestCloudExecutionContextPr
     }
     Assert.assertEquals(Sets.newHashSet("node1", "node3"), hosts);
   }
-  @org.junit.Test(expected=CreateHostsFailedException.class)
-  public void testRetryFails() throws Exception {
-    when(cloudComputeService.createNodes(anyInt())).thenThrow(runNodesException);
-    CloudExecutionContextProvider provider = new CloudExecutionContextProvider(dataDir, NUM_NODES,
-        cloudComputeService, sshCommandExecutor, workingDir, PRIVATE_KEY, USER, SLAVE_DIRS, 1, 1, 0);
-    provider.createExecutionContext();
-  }
 }



Mime
View raw message