hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1529308 [7/10] - in /hive/branches/maven: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ bin/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/...
Date Fri, 04 Oct 2013 21:30:46 GMT
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Fri Oct  4 21:30:38 2013
@@ -18,12 +18,13 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
+import java.util.ArrayDeque;
 import java.util.ArrayList;
+import java.util.Deque;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.Stack;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
@@ -31,16 +32,16 @@ import org.apache.hadoop.hive.ql.exec.PT
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
 import org.apache.hadoop.hive.ql.parse.WindowingExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.BoundaryDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFInputDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFQueryInputDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.ShapeDetails;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.ValueBoundaryDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFrameDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFInputDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFQueryInputDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.ShapeDetails;
+import org.apache.hadoop.hive.ql.plan.ptf.ValueBoundaryDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
@@ -76,7 +77,7 @@ public class PTFDeserializer {
   }
 
   public void initializePTFChain(PartitionedTableFunctionDef tblFnDef) throws HiveException {
-    Stack<PTFInputDef> ptfChain = new Stack<PTFInputDef>();
+    Deque<PTFInputDef> ptfChain = new ArrayDeque<PTFInputDef>();
     PTFInputDef currentDef = tblFnDef;
     while (currentDef != null) {
       ptfChain.push(currentDef);
@@ -188,8 +189,8 @@ public class PTFDeserializer {
   }
 
   static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException {
-    ArrayList<PTFExpressionDef> args = def.getArgs();
-    ArrayList<ObjectInspector> argOIs = new ArrayList<ObjectInspector>();
+    List<PTFExpressionDef> args = def.getArgs();
+    List<ObjectInspector> argOIs = new ArrayList<ObjectInspector>();
     ObjectInspector[] funcArgOIs = null;
 
     if (args != null) {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java Fri Oct  4 21:30:38 2013
@@ -30,7 +30,7 @@ public class HadoopDefaultAuthenticator 
 
   private String userName;
   private List<String> groupNames;
-
+  
   private Configuration conf;
 
   @Override
@@ -58,7 +58,7 @@ public class HadoopDefaultAuthenticator 
           "Can not initialize HadoopDefaultAuthenticator.");
     }
 
-    this.userName = ugi.getUserName();
+    this.userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
     if (ugi.getGroupNames() != null) {
       this.groupNames = Arrays.asList(ugi.getGroupNames());
     }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java Fri Oct  4 21:30:38 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringUnaryUDF.IUDFUnaryString;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.Text;
     + "  'H1'\n"
     + "  > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n"
     + "  '46616365626F6F6B'")
-public class UDFHex extends UDF {
+public class UDFHex extends UDF implements IUDFUnaryString {
   private final Text result = new Text();
   private byte[] value = new byte[16];
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java Fri Oct  4 21:30:38 2013
@@ -40,9 +40,9 @@ import org.apache.hadoop.hive.ql.parse.W
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFInputDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFInputDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -101,7 +101,7 @@ public class NPath extends TableFunction
       {
         int sz = syFnRes.nextRow - (pItr.getIndex() - 1);
         Object selectListInput = NPath.getSelectListInput(iRow,
-            tDef.getInput().getOutputShape().getOI(), pItr, sz);
+            tableDef.getInput().getOutputShape().getOI(), pItr, sz);
         ArrayList<Object> oRow = new ArrayList<Object>();
         for(ExprNodeEvaluator resExprEval : resultExprInfo.resultExprEvals)
         {
@@ -161,7 +161,7 @@ public class NPath extends TableFunction
       NPath evaluator = (NPath) getEvaluator();
       PartitionedTableFunctionDef tDef = evaluator.getTableDef();
 
-      ArrayList<PTFExpressionDef> args = tDef.getArgs();
+      List<PTFExpressionDef> args = tDef.getArgs();
       int argsNum = args == null ? 0 : args.size();
 
       if ( argsNum < 4 )
@@ -199,7 +199,7 @@ public class NPath extends TableFunction
      * validate and setup patternStr
      */
     private void validateAndSetupPatternStr(NPath evaluator,
-        ArrayList<PTFExpressionDef> args) throws SemanticException {
+        List<PTFExpressionDef> args) throws SemanticException {
       PTFExpressionDef symboPatternArg = args.get(0);
       ObjectInspector symbolPatternArgOI = symboPatternArg.getOI();
 
@@ -219,7 +219,7 @@ public class NPath extends TableFunction
      * validate and setup SymbolInfo
      */
     private void validateAndSetupSymbolInfo(NPath evaluator,
-        ArrayList<PTFExpressionDef> args,
+        List<PTFExpressionDef> args,
         int argsNum) throws SemanticException {
       int symbolArgsSz = argsNum - 2;
       if ( symbolArgsSz % 2 != 0)
@@ -263,7 +263,7 @@ public class NPath extends TableFunction
      * validate and setup resultExprStr
      */
     private void validateAndSetupResultExprStr(NPath evaluator,
-        ArrayList<PTFExpressionDef> args,
+        List<PTFExpressionDef> args,
         int argsNum) throws SemanticException {
       PTFExpressionDef resultExprArg = args.get(argsNum - 1);
       ObjectInspector resultExprArgOI = resultExprArg.getOI();
@@ -303,7 +303,7 @@ public class NPath extends TableFunction
         NPath evaluator = (NPath) getEvaluator();
         PartitionedTableFunctionDef tDef = evaluator.getTableDef();
 
-        ArrayList<PTFExpressionDef> args = tDef.getArgs();
+        List<PTFExpressionDef> args = tDef.getArgs();
         int argsNum = args.size();
 
         validateAndSetupPatternStr(evaluator, args);

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java Fri Oct  4 21:30:38 2013
@@ -18,43 +18,37 @@
 
 package org.apache.hadoop.hive.ql.udf.ptf;
 
-import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
-public class Noop extends TableFunctionEvaluator
-{
+public class Noop extends TableFunctionEvaluator {
 
   @Override
-  public PTFPartition execute(PTFPartition iPart) throws HiveException
-  {
+  public PTFPartition execute(PTFPartition iPart) throws HiveException {
     return iPart;
   }
 
   @Override
-  protected void execute(PTFPartitionIterator<Object> pItr, PTFPartition oPart)
-  {
+  protected void execute(PTFPartitionIterator<Object> pItr, PTFPartition oPart) {
     throw new UnsupportedOperationException();
   }
 
-  public static class NoopResolver extends TableFunctionResolver
-  {
+  public static class NoopResolver extends TableFunctionResolver {
 
     @Override
-    protected TableFunctionEvaluator createEvaluator(PTFDesc ptfDesc, PartitionedTableFunctionDef tDef)
-    {
+    protected TableFunctionEvaluator createEvaluator(PTFDesc ptfDesc, PartitionedTableFunctionDef tDef) {
       return new Noop();
     }
 
     @Override
-    public void setupOutputOI() throws SemanticException
-    {
+    public void setupOutputOI() throws SemanticException {
       StructObjectInspector OI = getEvaluator().getTableDef().getInput().getOutputShape().getOI();
       setOutputOI(OI);
     }
@@ -75,7 +69,7 @@ public class Noop extends TableFunctionE
      * Set to null only because carryForwardNames is true.
      */
     @Override
-    public ArrayList<String> getOutputColumnNames() {
+    public List<String> getOutputColumnNames() {
       return null;
     }
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java Fri Oct  4 21:30:38 2013
@@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.PT
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
 public class NoopWithMap extends Noop

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java Fri Oct  4 21:30:38 2013
@@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.PT
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
@@ -46,8 +46,7 @@ import org.apache.hadoop.hive.serde2.obj
  * </ol>
  *
  */
-public abstract class TableFunctionEvaluator
-{
+public abstract class TableFunctionEvaluator {
   /*
    * how is this different from the OutpuShape set on the TableDef.
    * This is the OI of the object coming out of the PTF.
@@ -59,52 +58,45 @@ public abstract class TableFunctionEvalu
    * same comment as OI applies here.
    */
   transient protected StructObjectInspector rawInputOI;
-  protected PartitionedTableFunctionDef tDef;
+  protected PartitionedTableFunctionDef tableDef;
   protected PTFDesc ptfDesc;
   boolean transformsRawInput;
   transient protected PTFPartition outputPartition;
 
-  static{
+  static {
+    //TODO is this a bug? The field is not named outputOI it is named OI
     PTFUtils.makeTransient(TableFunctionEvaluator.class, "outputOI", "rawInputOI");
   }
 
-  public StructObjectInspector getOutputOI()
-  {
+  public StructObjectInspector getOutputOI() {
     return OI;
   }
 
-  protected void setOutputOI(StructObjectInspector outputOI)
-  {
+  protected void setOutputOI(StructObjectInspector outputOI) {
     OI = outputOI;
   }
 
-  public PartitionedTableFunctionDef getTableDef()
-  {
-    return tDef;
+  public PartitionedTableFunctionDef getTableDef() {
+    return tableDef;
   }
 
-  public void setTableDef(PartitionedTableFunctionDef tDef)
-  {
-    this.tDef = tDef;
+  public void setTableDef(PartitionedTableFunctionDef tDef) {
+    this.tableDef = tDef;
   }
 
-  protected PTFDesc getQueryDef()
-  {
+  protected PTFDesc getQueryDef() {
     return ptfDesc;
   }
 
-  protected void setQueryDef(PTFDesc ptfDesc)
-  {
+  protected void setQueryDef(PTFDesc ptfDesc) {
     this.ptfDesc = ptfDesc;
   }
 
-  public StructObjectInspector getRawInputOI()
-  {
+  public StructObjectInspector getRawInputOI() {
     return rawInputOI;
   }
 
-  protected void setRawInputOI(StructObjectInspector rawInputOI)
-  {
+  protected void setRawInputOI(StructObjectInspector rawInputOI) {
     this.rawInputOI = rawInputOI;
   }
 
@@ -117,17 +109,15 @@ public abstract class TableFunctionEvalu
   }
 
   public PTFPartition execute(PTFPartition iPart)
-      throws HiveException
-  {
+      throws HiveException {
     PTFPartitionIterator<Object> pItr = iPart.iterator();
     PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, pItr);
 
     if ( outputPartition == null ) {
       outputPartition = PTFPartition.create(ptfDesc.getCfg(),
-          tDef.getOutputShape().getSerde(),
-          OI, tDef.getOutputShape().getOI());
-    }
-    else {
+          tableDef.getOutputShape().getSerde(),
+          OI, tableDef.getOutputShape().getOI());
+    } else {
       outputPartition.reset();
     }
 
@@ -137,17 +127,14 @@ public abstract class TableFunctionEvalu
 
   protected abstract void execute(PTFPartitionIterator<Object> pItr, PTFPartition oPart) throws HiveException;
 
-  public PTFPartition transformRawInput(PTFPartition iPart) throws HiveException
-  {
-    if ( !isTransformsRawInput())
-    {
-      throw new HiveException(String.format("Internal Error: mapExecute called on function (%s)that has no Map Phase", tDef.getName()));
+  public PTFPartition transformRawInput(PTFPartition iPart) throws HiveException {
+    if (!isTransformsRawInput()) {
+      throw new HiveException(String.format("Internal Error: mapExecute called on function (%s)that has no Map Phase", tableDef.getName()));
     }
     return _transformRawInput(iPart);
   }
 
-  protected PTFPartition _transformRawInput(PTFPartition iPart) throws HiveException
-  {
+  protected PTFPartition _transformRawInput(PTFPartition iPart) throws HiveException {
     return null;
   }
 }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java Fri Oct  4 21:30:38 2013
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.ptf;
 
-import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
@@ -53,8 +53,7 @@ import org.apache.hadoop.hive.serde2.obj
  * </ol>
  */
 @SuppressWarnings("deprecation")
-public abstract class TableFunctionResolver
-{
+public abstract class TableFunctionResolver {
   TableFunctionEvaluator evaluator;
   PTFDesc ptfDesc;
 
@@ -65,8 +64,7 @@ public abstract class TableFunctionResol
    *   the transformsRawInput boolean.
    */
   public void initialize(HiveConf cfg, PTFDesc ptfDesc, PartitionedTableFunctionDef tDef)
-      throws SemanticException
-  {
+      throws SemanticException {
     this.ptfDesc = ptfDesc;
 
     evaluator = createEvaluator(ptfDesc, tDef);
@@ -79,16 +77,14 @@ public abstract class TableFunctionResol
    * called during deserialization of a QueryDef during runtime.
    */
   public void initialize(PTFDesc ptfDesc, PartitionedTableFunctionDef tDef, TableFunctionEvaluator evaluator)
-      throws HiveException
-  {
+      throws HiveException {
     this.evaluator = evaluator;
     this.ptfDesc = ptfDesc;
     evaluator.setTableDef(tDef);
     evaluator.setQueryDef(ptfDesc);
   }
 
-  public TableFunctionEvaluator getEvaluator()
-  {
+  public TableFunctionEvaluator getEvaluator() {
     return evaluator;
   }
 
@@ -105,7 +101,7 @@ public abstract class TableFunctionResol
    * A PTF Function must provide the 'external' names of the columns in its Output.
    *
    */
-  public abstract ArrayList<String> getOutputColumnNames() throws SemanticException;
+  public abstract List<String> getOutputColumnNames() throws SemanticException;
 
 
   /**
@@ -127,10 +123,8 @@ public abstract class TableFunctionResol
    * - subsequent to this call, a call to getRawInputOI call on the {@link TableFunctionEvaluator} must return the OI
    *   of the output of this function.
    */
-  public void setupRawInputOI() throws SemanticException
-  {
-    if (!transformsRawInput())
-    {
+  public void setupRawInputOI() throws SemanticException {
+    if (!transformsRawInput()) {
       return;
     }
     throw new SemanticException(
@@ -141,9 +135,8 @@ public abstract class TableFunctionResol
    * A PTF Function must provide the 'external' names of the columns in the transformed Raw Input.
    *
    */
-  public ArrayList<String> getRawInputColumnNames() throws SemanticException {
-    if (!transformsRawInput())
-    {
+  public List<String> getRawInputColumnNames() throws SemanticException {
+    if (!transformsRawInput()) {
       return null;
     }
     throw new SemanticException(
@@ -153,10 +146,8 @@ public abstract class TableFunctionResol
   /*
    * Same responsibility as initializeOI, but for the RawInput.
    */
-  public void initializeRawInputOI() throws HiveException
-  {
-    if (!transformsRawInput())
-    {
+  public void initializeRawInputOI() throws HiveException {
+    if (!transformsRawInput()) {
       return;
     }
     throw new HiveException(
@@ -166,21 +157,18 @@ public abstract class TableFunctionResol
   /*
    * callback method used by subclasses to set the RawInputOI on the Evaluator.
    */
-  protected void setRawInputOI(StructObjectInspector rawInputOI)
-  {
+  protected void setRawInputOI(StructObjectInspector rawInputOI) {
     evaluator.setRawInputOI(rawInputOI);
   }
 
   /*
    * callback method used by subclasses to set the OutputOI on the Evaluator.
    */
-  protected void setOutputOI(StructObjectInspector outputOI)
-  {
+  protected void setOutputOI(StructObjectInspector outputOI) {
     evaluator.setOutputOI(outputOI);
   }
 
-  public PTFDesc getPtfDesc()
-  {
+  public PTFDesc getPtfDesc() {
     return ptfDesc;
   }
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java Fri Oct  4 21:30:38 2013
@@ -31,13 +31,13 @@ import org.apache.hadoop.hive.ql.parse.S
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.BoundaryDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.ValueBoundaryDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFrameDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.ValueBoundaryDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef;
+import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/util/JavaDataModel.java Fri Oct  4 21:30:38 2013
@@ -27,99 +27,137 @@ import org.apache.hadoop.hive.ql.udf.gen
 public enum JavaDataModel {
 
   JAVA32 {
+    @Override
     public int object() {
       return JAVA32_OBJECT;
     }
 
+    @Override
     public int array() {
       return JAVA32_ARRAY;
     }
 
+    @Override
     public int ref() {
       return JAVA32_REF;
     }
 
+    @Override
     public int hashMap(int entry) {
       // base  = JAVA32_OBJECT + PRIMITIVES1 * 4 + JAVA32_FIELDREF * 3 + JAVA32_ARRAY;
       // entry = JAVA32_OBJECT + JAVA32_FIELDREF + PRIMITIVES1
       return 64 + 24 * entry;
     }
 
+    @Override
+    public int hashMapEntry() {
+      return 24;
+    }
+
+    @Override
     public int hashSet(int entry) {
       // hashMap += JAVA32_OBJECT
       return 80 + 24 * entry;
     }
 
+    @Override
     public int linkedHashMap(int entry) {
       // hashMap += JAVA32_FIELDREF + PRIMITIVES1
       // hashMap.entry += JAVA32_FIELDREF * 2
       return 72 + 32 * entry;
     }
 
+    @Override
     public int linkedList(int entry) {
       // base  = JAVA32_OBJECT + PRIMITIVES1 * 2 + JAVA32_FIELDREF;
       // entry = JAVA32_OBJECT + JAVA32_FIELDREF * 2
       return 28 + 24 * entry;
     }
 
+    @Override
     public int arrayList() {
       // JAVA32_OBJECT + PRIMITIVES1 * 2 + JAVA32_ARRAY;
       return 44;
     }
+
+    @Override
+    public int memoryAlign() {
+      return 8;
+    }
   }, JAVA64 {
+    @Override
     public int object() {
       return JAVA64_OBJECT;
     }
 
+    @Override
     public int array() {
       return JAVA64_ARRAY;
     }
 
+    @Override
     public int ref() {
       return JAVA64_REF;
     }
 
+    @Override
     public int hashMap(int entry) {
       // base  = JAVA64_OBJECT + PRIMITIVES1 * 4 + JAVA64_FIELDREF * 3 + JAVA64_ARRAY;
       // entry = JAVA64_OBJECT + JAVA64_FIELDREF + PRIMITIVES1
       return 112 + 44 * entry;
     }
 
+    @Override
+    public int hashMapEntry() {
+      return 44;
+    }
+
+    @Override
     public int hashSet(int entry) {
       // hashMap += JAVA64_OBJECT
       return 144 + 44 * entry;
     }
 
+    @Override
     public int linkedHashMap(int entry) {
       // hashMap += JAVA64_FIELDREF + PRIMITIVES1
       // hashMap.entry += JAVA64_FIELDREF * 2
       return 128 + 60 * entry;
     }
 
+    @Override
     public int linkedList(int entry) {
       // base  = JAVA64_OBJECT + PRIMITIVES1 * 2 + JAVA64_FIELDREF;
       // entry = JAVA64_OBJECT + JAVA64_FIELDREF * 2
       return 48 + 48 * entry;
     }
 
+    @Override
     public int arrayList() {
       // JAVA64_OBJECT + PRIMITIVES1 * 2 + JAVA64_ARRAY;
       return 80;
     }
+
+    @Override
+    public int memoryAlign() {
+      return 8;
+    }
   };
 
   public abstract int object();
   public abstract int array();
   public abstract int ref();
   public abstract int hashMap(int entry);
+  public abstract int hashMapEntry();
   public abstract int hashSet(int entry);
   public abstract int linkedHashMap(int entry);
   public abstract int linkedList(int entry);
   public abstract int arrayList();
+  public abstract int memoryAlign();
 
   // ascii string
   public int lengthFor(String string) {
-    return object() + primitive1() * 3 + array() + string.length();
+    return lengthForStringOfLength(string.length());
   }
 
   public int lengthFor(NumericHistogram histogram) {
@@ -161,6 +199,10 @@ public enum JavaDataModel {
     return PRIMITIVES2;
   }
 
+  public static int alignUp(int value, int align) {
+    return (value + align - 1) & ~(align - 1);
+  }
+
   public static final int JAVA32_META = 12;
   public static final int JAVA32_ARRAY_META = 16;
   public static final int JAVA32_REF = 4;
@@ -176,6 +218,8 @@ public enum JavaDataModel {
   public static final int PRIMITIVES1 = 4;      // void, boolean, byte, short, int, float
   public static final int PRIMITIVES2 = 8;      // long, double
 
+  public static final int PRIMITIVE_BYTE = 1;    // byte
+
   private static JavaDataModel current;
 
   public static JavaDataModel get() {
@@ -200,4 +244,56 @@ public enum JavaDataModel {
     }
     return ((size + 8) >> 3) << 3;
   }
+
+  private int lengthForPrimitiveArrayOfSize(int primitiveSize, int length) {
+    return alignUp(array() + primitiveSize*length, memoryAlign());
+  }
+
+  public int lengthForByteArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(PRIMITIVE_BYTE, length);
+  }
+  public int lengthForObjectArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(ref(), length);
+  }
+  public int lengthForLongArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(primitive2(), length);
+  }
+  public int lengthForDoubleArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(primitive2(), length);
+  }
+  public int lengthForIntArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(primitive1(), length);
+  }
+  public int lengthForBooleanArrayOfSize(int length) {
+    return lengthForPrimitiveArrayOfSize(PRIMITIVE_BYTE, length);
+  }
+
+  public int lengthOfDecimal() {
+    // object overhead + 8 bytes for intCompact + 4 bytes for precision
+    // + 4 bytes for scale + size of BigInteger
+    return object() + 2 * primitive2() + lengthOfBigInteger();
+  }
+
+  private int lengthOfBigInteger() {
+    // object overhead + 4 bytes for bitCount + 4 bytes for bitLength
+    // + 4 bytes for firstNonzeroByteNum + 4 bytes for firstNonzeroIntNum +
+    // + 4 bytes for lowestSetBit + 5 bytes for size of magnitude (since max precision
+    // is only 38 for HiveDecimal) + 7 bytes of padding (since java memory allocations
+    // are 8 byte aligned)
+    return object() + 4 * primitive2();
+  }
+
+  public int lengthOfTimestamp() {
+    // object overhead + 4 bytes for int (nanos) + 4 bytes of padding
+    return object() + primitive2();
+  }
+
+  public int lengthOfDate() {
+    // object overhead + 8 bytes for long (fastTime) + 16 bytes for cdate
+    return object() + 3 * primitive2();
+  }
+
+  public int lengthForStringOfLength(int strLen) {
+    return object() + primitive1() * 3 + array() + strLen;
+  }
 }

Modified: hive/branches/maven/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto (original)
+++ hive/branches/maven/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto Fri Oct  4 21:30:38 2013
@@ -15,6 +15,8 @@ message DoubleStatistics {
 message StringStatistics {
   optional string minimum = 1;
   optional string maximum = 2;
+  // sum will store the total length of all strings in a stripe
+  optional sint64 sum = 3;
 }
 
 message BucketStatistics {
@@ -33,6 +35,11 @@ message DateStatistics {
   optional sint32 maximum = 2;
 }
 
+message BinaryStatistics {
+  // sum will store the total binary blob length in a stripe
+  optional sint64 sum = 1;
+}
+
 message ColumnStatistics {
   optional uint64 numberOfValues = 1;
   optional IntegerStatistics intStatistics = 2;
@@ -41,6 +48,7 @@ message ColumnStatistics {
   optional BucketStatistics bucketStatistics = 5;
   optional DecimalStatistics decimalStatistics = 6;
   optional DateStatistics dateStatistics = 7;
+  optional BinaryStatistics binaryStatistics = 8;
 }
 
 message RowIndexEntry {

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Oct  4 21:30:38 2013
@@ -59,12 +59,15 @@ import org.apache.hadoop.hive.cli.CliDri
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.common.io.CachingPrintStream;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
+import org.apache.hadoop.hive.ql.exec.vector.util.AllVectorTypesRecord;
+import org.apache.hadoop.hive.ql.exec.vector.util.OrcFileGenerator;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -108,7 +111,7 @@ public class QTestUtil {
   public static final HashSet<String> srcTables = new HashSet<String>
     (Arrays.asList(new String [] {
         "src", "src1", "srcbucket", "srcbucket2", "src_json", "src_thrift",
-        "src_sequencefile", "srcpart"
+        "src_sequencefile", "srcpart", "alltypesorc"
       }));
 
   private ParseDriver pd;
@@ -221,6 +224,11 @@ public class QTestUtil {
       convertPathsFromWindowsToHdfs();
     }
 
+    String vectorizationEnabled = System.getProperty("test.vectorization.enabled");
+    if(vectorizationEnabled != null && vectorizationEnabled.equalsIgnoreCase("true")) {
+      conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, true);
+    }
+
     // Plug verifying metastore in for testing.
     conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
         "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
@@ -260,6 +268,11 @@ public class QTestUtil {
 
     String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
     conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getHdfsUriString(orgScratchDir));
+
+    if (miniMr) {
+      String orgAuxJarFolder = conf.getAuxJars();
+      conf.setAuxJars(getHdfsUriString("file://" + orgAuxJarFolder));
+    }
   }
 
   private String getHdfsUriString(String uriStr) {
@@ -302,12 +315,7 @@ public class QTestUtil {
 
     testFiles = dataDir;
 
-    String ow = System.getProperty("test.output.overwrite");
-    if ((ow != null) && ow.equalsIgnoreCase("true")) {
-      overWrite = true;
-    } else {
-      overWrite = false;
-    }
+    overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite"));
 
     setup = new QTestSetup();
     setup.preTest(conf);
@@ -516,7 +524,8 @@ public class QTestUtil {
     for (String s : new String[] {"src", "src1", "src_json", "src_thrift",
         "src_sequencefile", "srcpart", "srcbucket", "srcbucket2", "dest1",
         "dest2", "dest3", "dest4", "dest4_sequencefile", "dest_j1", "dest_j2",
-        "dest_g1", "dest_g2", "fetchtask_ioexception"}) {
+        "dest_g1", "dest_g2", "fetchtask_ioexception",
+        AllVectorTypesRecord.TABLE_NAME}) {
       db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s);
     }
 
@@ -658,7 +667,17 @@ public class QTestUtil {
     fpath = new Path(testFiles, "json.txt");
     runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
         + "' INTO TABLE src_json");
+
+    FileSystem localFs = FileSystem.getLocal(conf);
+    // create and load data into orc table
+    fpath = new Path(testFiles, AllVectorTypesRecord.TABLE_NAME);
+
+    runCreateTableCmd(AllVectorTypesRecord.TABLE_CREATE_COMMAND);
+    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toUri().getPath()
+        + "' INTO  TABLE "+AllVectorTypesRecord.TABLE_NAME);
+
     conf.setBoolean("hive.test.init.phase", false);
+
   }
 
   public void init() throws Exception {
@@ -1068,6 +1087,10 @@ public class QTestUtil {
     in = new BufferedReader(new FileReader(fname));
     out = new BufferedWriter(new FileWriter(fname + ".orig"));
     while (null != (line = in.readLine())) {
+      // Ignore the empty lines on windows
+      if(line.isEmpty() && Shell.WINDOWS) {
+        continue;
+      }
       out.write(line);
       out.write('\n');
     }
@@ -1506,4 +1529,15 @@ public class QTestUtil {
         + "or try \"ant test ... -Dtest.silent=false\" to get more logs.");
     System.err.flush();
   }
+
+  public static String ensurePathEndsInSlash(String path) {
+    if(path == null) {
+      throw new NullPointerException("Path cannot be null");
+    }
+    if(path.endsWith(File.separator)) {
+      return path;
+    } else {
+      return path + File.separator;
+    }
+  }
 }

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Fri Oct  4 21:30:38 2013
@@ -41,7 +41,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.io.FSRecordWriter;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.InputFormatChecker;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -521,7 +521,7 @@ public class TestInputOutputFormat {
     }
     SerDe serde = new OrcSerde();
     HiveOutputFormat<?, ?> outFormat = new OrcOutputFormat();
-    FileSinkOperator.RecordWriter writer =
+    FSRecordWriter writer =
         outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true,
             properties, Reporter.NULL);
     writer.write(serde.serialize(new MyRow(1,2), inspector));
@@ -686,7 +686,7 @@ public class TestInputOutputFormat {
     JobConf job = new JobConf(conf);
     Properties properties = new Properties();
     HiveOutputFormat<?, ?> outFormat = new OrcOutputFormat();
-    FileSinkOperator.RecordWriter writer =
+    FSRecordWriter writer =
         outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true,
             properties, Reporter.NULL);
     writer.close(true);
@@ -731,7 +731,7 @@ public class TestInputOutputFormat {
     }
     SerDe serde = new OrcSerde();
     HiveOutputFormat<?, ?> outFormat = new OrcOutputFormat();
-    FileSinkOperator.RecordWriter writer =
+    FSRecordWriter writer =
         outFormat.getHiveRecordWriter(conf, testFilePath, StringRow.class,
             true, properties, Reporter.NULL);
     writer.write(serde.serialize(new StringRow("owen"), inspector));

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Fri Oct  4 21:30:38 2013
@@ -18,12 +18,28 @@
 
 package org.apache.hadoop.hive.ql.io.orc;
 
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
-import org.apache.hadoop.hive.ql.io.sarg.TestSearchArgumentImpl;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -55,26 +71,25 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
 
-import java.io.File;
-import java.io.IOException;
-import java.math.BigInteger;
-import java.nio.ByteBuffer;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import static junit.framework.Assert.*;
-import static junit.framework.Assert.assertEquals;
-
 /**
  * Tests for the top level reader/streamFactory of ORC files.
  */
 public class TestOrcFile {
 
+  public static class SimpleStruct {
+    BytesWritable bytes1;
+    Text string1;
+
+    SimpleStruct(BytesWritable b1, String s1) {
+      this.bytes1 = b1;
+      if(s1 == null) {
+        this.string1 = null;
+      } else {
+        this.string1 = new Text(s1);
+      }
+    }
+  }
+
   public static class InnerStruct {
     int int1;
     Text string1 = new Text();
@@ -132,48 +147,6 @@ public class TestOrcFile {
     }
   }
 
-  public static class AllTypesRow {
-    Boolean boolean1;
-    Byte byte1;
-    Short short1;
-    Integer int1;
-    Long long1;
-    Float float1;
-    Double double1;
-    BytesWritable bytes1;
-    Text string1;
-    MiddleStruct middle;
-    List<InnerStruct> list = new ArrayList<InnerStruct>();
-    Map<Text, InnerStruct> map = new HashMap<Text, InnerStruct>();
-    Timestamp ts;
-    HiveDecimal decimal1;
-
-    AllTypesRow(Boolean b1, Byte b2, Short s1, Integer i1, Long l1, Float f1,
-           Double d1,
-           BytesWritable b3, String s2, MiddleStruct m1,
-           List<InnerStruct> l2, Map<Text, InnerStruct> m2,
-           Timestamp ts1, HiveDecimal decimal) {
-      this.boolean1 = b1;
-      this.byte1 = b2;
-      this.short1 = s1;
-      this.int1 = i1;
-      this.long1 = l1;
-      this.float1 = f1;
-      this.double1 = d1;
-      this.bytes1 = b3;
-      if (s2 == null) {
-        this.string1 = null;
-      } else {
-        this.string1 = new Text(s2);
-      }
-      this.middle = m1;
-      this.list = l2;
-      this.map = m2;
-      this.ts = ts1;
-      this.decimal1 = decimal;
-    }
-  }
-
   private static InnerStruct inner(int i, String s) {
     return new InnerStruct(i, s);
   }
@@ -231,39 +204,6 @@ public class TestOrcFile {
   }
 
   @Test
-  public void testWriteFormat_0_11() throws Exception {
-    ObjectInspector inspector;
-    synchronized (TestOrcFile.class) {
-      inspector = ObjectInspectorFactory
-          .getReflectionObjectInspector(AllTypesRow.class,
-              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
-    }
-    conf.set("hive.exec.orc.write.format", "0.11");
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
-    for(int i = 0; i < 7500; i++) {
-      if (i % 2 == 0) {
-        writer.addRow(new AllTypesRow(false, (byte) 1, (short) 1024, 65536,
-            Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi",
-            new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(
-                inner(3, "good"), inner(4, "bad")), map(), Timestamp
-                .valueOf("2000-03-12 15:00:00"), new HiveDecimal(
-                "12345678.6547456")));
-      } else {
-        writer.addRow(new AllTypesRow(true, (byte) 100, (short) 2048, 65536,
-            Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye",
-            new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(
-                inner(100000000, "cat"), inner(-100000, "in"),
-                inner(1234, "hat")),
-            map(inner(5, "chani"), inner(1, "mauddib")), Timestamp
-                .valueOf("2000-03-12 15:00:01"), new HiveDecimal(
-                "12345678.6547457")));
-      }
-    }
-    writer.close();
-  }
-
-  @Test
   public void testReadFormat_0_11() throws Exception {
     Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
         + File.separator + "src" + File.separator + "test" + File.separator
@@ -319,7 +259,7 @@ public class TestOrcFile {
     assertEquals("count: 7500 min: -15.0 max: -5.0 sum: -75000.0",
         stats[7].toString());
 
-    assertEquals("count: 7500 min: bye max: hi", stats[9].toString());
+    assertEquals("count: 7500 min: bye max: hi sum: 0", stats[9].toString());
 
     // check the inspectors
     StructObjectInspector readerInspector = (StructObjectInspector) reader
@@ -515,6 +455,93 @@ public class TestOrcFile {
   }
 
   @Test
+  public void testStringAndBinaryStatistics() throws Exception {
+
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (SimpleStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3,4), "foo"));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3), "bar"));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3,4,5), null));
+    writer.addRow(new SimpleStruct(null, "hi"));
+    writer.close();
+    Reader reader = OrcFile.createReader(fs, testFilePath);
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(4, stats[0].getNumberOfValues());
+    assertEquals("count: 4", stats[0].toString());
+
+    assertEquals(3, stats[1].getNumberOfValues());
+    assertEquals(15, ((BinaryColumnStatistics) stats[1]).getSum());
+    assertEquals("count: 3 sum: 15", stats[1].toString());
+
+    assertEquals(3, stats[2].getNumberOfValues());
+    assertEquals("bar", ((StringColumnStatistics) stats[2]).getMinimum());
+    assertEquals("hi", ((StringColumnStatistics) stats[2]).getMaximum());
+    assertEquals(8, ((StringColumnStatistics) stats[2]).getSum());
+    assertEquals("count: 3 min: bar max: hi sum: 8",
+        stats[2].toString());
+
+    // check the inspectors
+    StructObjectInspector readerInspector =
+        (StructObjectInspector) reader.getObjectInspector();
+    assertEquals(ObjectInspector.Category.STRUCT,
+        readerInspector.getCategory());
+    assertEquals("struct<bytes1:binary,string1:string>",
+        readerInspector.getTypeName());
+    List<? extends StructField> fields =
+        readerInspector.getAllStructFieldRefs();
+    BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector.
+        getStructFieldRef("bytes1").getFieldObjectInspector();
+    StringObjectInspector st = (StringObjectInspector) readerInspector.
+        getStructFieldRef("string1").getFieldObjectInspector();
+    RecordReader rows = reader.rows(null);
+    Object row = rows.next(null);
+    assertNotNull(row);
+    // check the contents of the first row
+    assertEquals(bytes(0,1,2,3,4), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("foo", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertEquals(bytes(0,1,2,3), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("bar", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertEquals(bytes(0,1,2,3,4,5), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertNull(st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertNull(bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("hi", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // handle the close up
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  @Test
   public void test1() throws Exception {
     ObjectInspector inspector;
     synchronized (TestOrcFile.class) {
@@ -567,7 +594,7 @@ public class TestOrcFile {
     assertEquals("count: 2 min: -15.0 max: -5.0 sum: -20.0",
         stats[7].toString());
 
-    assertEquals("count: 2 min: bye max: hi", stats[9].toString());
+    assertEquals("count: 2 min: bye max: hi sum: 5", stats[9].toString());
 
     // check the inspectors
     StructObjectInspector readerInspector =

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java Fri Oct  4 21:30:38 2013
@@ -125,7 +125,7 @@ public class TestOrcNullOptimization {
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
     assertEquals(19998,
                  ((StringColumnStatistics) stats[2]).getNumberOfValues());
-    assertEquals("count: 19998 min: a max: a",
+    assertEquals("count: 19998 min: a max: a sum: 19998",
         stats[2].toString());
 
     // check the inspectors
@@ -229,7 +229,7 @@ public class TestOrcNullOptimization {
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
     assertEquals(20000,
                  ((StringColumnStatistics) stats[2]).getNumberOfValues());
-    assertEquals("count: 20000 min: a max: b",
+    assertEquals("count: 20000 min: a max: b sum: 20000",
         stats[2].toString());
 
     // check the inspectors
@@ -329,7 +329,7 @@ public class TestOrcNullOptimization {
     assertEquals("h", ((StringColumnStatistics) stats[2]).getMaximum());
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
     assertEquals(7, ((StringColumnStatistics) stats[2]).getNumberOfValues());
-    assertEquals("count: 7 min: a max: h",
+    assertEquals("count: 7 min: a max: h sum: 7",
         stats[2].toString());
 
     // check the inspectors

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java Fri Oct  4 21:30:38 2013
@@ -1,7 +1,10 @@
 package org.apache.hadoop.hive.ql.io.udf;
 
+import java.io.IOException;
+import java.util.Properties;
+
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.hive.ql.io.FSRecordWriter;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -11,27 +14,24 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.Progressable;
 
-import java.io.IOException;
-import java.util.Properties;
-
 public class Rot13OutputFormat
   extends HiveIgnoreKeyTextOutputFormat<LongWritable,Text> {
 
   @Override
-  public RecordWriter
+  public FSRecordWriter
     getHiveRecordWriter(JobConf jc,
                         Path outPath,
                         Class<? extends Writable> valueClass,
                         boolean isCompressed,
                         Properties tableProperties,
                         Progressable progress) throws IOException {
-    final RecordWriter result =
+    final FSRecordWriter result =
       super.getHiveRecordWriter(jc,outPath,valueClass,isCompressed,
         tableProperties,progress);
     final Reporter reporter = (Reporter) progress;
     reporter.setStatus("got here");
     System.out.println("Got a reporter " + reporter);
-    return new RecordWriter() {
+    return new FSRecordWriter() {
       @Override
       public void write(Writable w) throws IOException {
         if (w instanceof Text) {

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Fri Oct  4 21:30:38 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.metada
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -47,8 +48,8 @@ public class TestHiveMetaStoreChecker ex
   private FileSystem fs;
   private HiveMetaStoreChecker checker = null;
 
-  private final String dbName = "dbname";
-  private final String tableName = "tablename";
+  private final String dbName = "testhivemetastorechecker_db";
+  private final String tableName = "testhivemetastorechecker_table";
 
   private final String partDateName = "partdate";
   private final String partCityName = "partcity";
@@ -76,17 +77,25 @@ public class TestHiveMetaStoreChecker ex
     part2.put(partCityName, "stockholm");
     parts.add(part2);
 
+    //cleanup just in case something is left over from previous run
+    dropDbTable();
+  }
+
+  private void dropDbTable()  {
     // cleanup
-    hive.dropTable(dbName, tableName, true, true);
     try {
-      hive.dropDatabase(dbName);
+      hive.dropTable(dbName, tableName, true, true);
+      hive.dropDatabase(dbName, true, true, true);
     } catch (NoSuchObjectException e) {
       // ignore
+    } catch (HiveException e) {
+      // ignore
     }
   }
 
   @Override
   protected void tearDown() throws Exception {
+    dropDbTable();
     super.tearDown();
     Hive.closeCurrent();
   }
@@ -97,19 +106,19 @@ public class TestHiveMetaStoreChecker ex
     CheckResult result = new CheckResult();
     checker.checkMetastore(dbName, null, null, result);
     // we haven't added anything so should return an all ok
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // check table only, should not exist in ms
     result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
     assertEquals(1, result.getTablesNotInMs().size());
     assertEquals(tableName, result.getTablesNotInMs().get(0));
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     Database db = new Database();
     db.setName(dbName);
@@ -125,18 +134,18 @@ public class TestHiveMetaStoreChecker ex
     // first check all (1) tables
     result = new CheckResult();
     checker.checkMetastore(dbName, null, null, result);
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // then let's check the one we know about
     result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // remove the table folder
     fs = table.getPath().getFileSystem(hive.getConf());
@@ -145,26 +154,27 @@ public class TestHiveMetaStoreChecker ex
     // now this shouldn't find the path on the fs
     result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
-    assertTrue(result.getTablesNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());;
     assertEquals(1, result.getTablesNotOnFs().size());
     assertEquals(tableName, result.getTablesNotOnFs().get(0));
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // put it back and one additional table
     fs.mkdirs(table.getPath());
     Path fakeTable = table.getPath().getParent().suffix(
         Path.SEPARATOR + "faketable");
     fs.mkdirs(fakeTable);
+    fs.deleteOnExit(fakeTable);    
 
     // find the extra table
     result = new CheckResult();
     checker.checkMetastore(dbName, null, null, result);
     assertEquals(1, result.getTablesNotInMs().size());
     assertEquals(fakeTable.getName(), result.getTablesNotInMs().get(0));
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // create a new external table
     hive.dropTable(dbName, tableName);
@@ -174,10 +184,10 @@ public class TestHiveMetaStoreChecker ex
     // should return all ok
     result = new CheckResult();
     checker.checkMetastore(dbName, null, null, result);
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
   }
 
   public void testPartitionsCheck() throws HiveException, MetaException,
@@ -203,10 +213,10 @@ public class TestHiveMetaStoreChecker ex
     CheckResult result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
     // all is well
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     List<Partition> partitions = hive.getPartitions(table);
     assertEquals(2, partitions.size());
@@ -218,24 +228,24 @@ public class TestHiveMetaStoreChecker ex
     result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
     // missing one partition on fs
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
     assertEquals(1, result.getPartitionsNotOnFs().size());
     assertEquals(partToRemove.getName(), result.getPartitionsNotOnFs().get(0)
         .getPartitionName());
     assertEquals(partToRemove.getTable().getTableName(), result
         .getPartitionsNotOnFs().get(0).getTableName());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     List<Map<String, String>> partsCopy = new ArrayList<Map<String, String>>();
     partsCopy.add(partitions.get(1).getSpec());
     // check only the partition that exists, all should be well
     result = new CheckResult();
     checker.checkMetastore(dbName, tableName, partsCopy, result);
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotInMs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs());
 
     // put the other one back
     fs.mkdirs(partToRemovePath);
@@ -244,15 +254,60 @@ public class TestHiveMetaStoreChecker ex
     Path fakePart = new Path(table.getDataLocation().toString(),
         "fakepartition=fakevalue");
     fs.mkdirs(fakePart);
+    fs.deleteOnExit(fakePart);
 
     checker.checkMetastore(dbName, tableName, null, result);
     // one extra partition
-    assertTrue(result.getTablesNotInMs().isEmpty());
-    assertTrue(result.getTablesNotOnFs().isEmpty());
-    assertTrue(result.getPartitionsNotOnFs().isEmpty());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
     assertEquals(1, result.getPartitionsNotInMs().size());
     assertEquals(fakePart.getName(), result.getPartitionsNotInMs().get(0)
         .getPartitionName());
+
+    // cleanup
+    hive.dropTable(dbName, tableName, true, true);
+    hive.createTable(table);
+    result = new CheckResult();
+    checker.checkMetastore(dbName, null, null, result);
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotInMs());
+    assertEquals(Collections.<String>emptyList(), result.getTablesNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotOnFs());
+    assertEquals(Collections.<String>emptyList(), result.getPartitionsNotInMs()); //--0e
+    System.err.println("Test completed - partition check");
+
   }
 
+  public void testDataDeletion() throws HiveException, MetaException,
+      IOException, TException, AlreadyExistsException, NoSuchObjectException {
+
+    Database db = new Database();
+    db.setName(dbName);
+    hive.createDatabase(db);
+
+    Table table = new Table(dbName, tableName);
+    table.setDbName(dbName);
+    table.setInputFormatClass(TextInputFormat.class);
+    table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
+    table.setPartCols(partCols);
+
+    hive.createTable(table);
+    table = hive.getTable(dbName, tableName);
+
+    Path fakeTable = table.getPath().getParent().suffix(
+        Path.SEPARATOR + "faketable");
+    fs = fakeTable.getFileSystem(hive.getConf());
+    fs.mkdirs(fakeTable);
+    fs.deleteOnExit(fakeTable);    
+    
+    Path fakePart = new Path(table.getDataLocation().toString(),
+        "fakepartition=fakevalue");
+    fs.mkdirs(fakePart);
+    fs.deleteOnExit(fakePart);
+
+    hive.dropTable(dbName, tableName, true, true);
+    assertFalse(fs.exists(fakePart));
+    hive.dropDatabase(dbName);
+    assertFalse(fs.exists(fakeTable));
+  }
 }

Modified: hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q Fri Oct  4 21:30:38 2013
@@ -1,4 +1,4 @@
 
-ADD JAR ../data/files/TestSerDe.jar;
-DELETE JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
+DELETE JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;

Modified: hive/branches/maven/ql/src/test/queries/clientnegative/invalid_columns.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/invalid_columns.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/invalid_columns.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/invalid_columns.q Fri Oct  4 21:30:38 2013
@@ -1,4 +1,4 @@
-ADD JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' 
 STORED AS TEXTFILE
 TBLPROPERTIES('columns'='valid_colname,invalid.colname');

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/alter1.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/alter1.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/alter1.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/alter1.q Fri Oct  4 21:30:38 2013
@@ -15,7 +15,7 @@ describe extended alter1;
 alter table alter1 set serdeproperties('s1'='10', 's2' ='20');
 describe extended alter1;
 
-add jar ../data/files/TestSerDe.jar;
+add jar ../build/ql/test/TestSerDe.jar;
 alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9');
 describe extended alter1;
 
@@ -56,7 +56,7 @@ DESCRIBE EXTENDED alter1;
 ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20');
 DESCRIBE EXTENDED alter1;
 
-add jar ../data/files/TestSerDe.jar;
+add jar ../build/ql/test/TestSerDe.jar;
 ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9');
 DESCRIBE EXTENDED alter1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/alter_partition_coltype.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/alter_partition_coltype.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/alter_partition_coltype.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/alter_partition_coltype.q Fri Oct  4 21:30:38 2013
@@ -24,6 +24,8 @@ select count(*) from alter_coltype where
 -- alter partition key column data type for ts column.
 alter table alter_coltype partition column (ts double);
 
+alter table alter_coltype partition column (dt string);
+
 -- load a new partition using new data type.
 insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/input16.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/input16.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/input16.q Fri Oct  4 21:30:38 2013
@@ -1,6 +1,6 @@
 -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
 DROP TABLE INPUT16;
-ADD JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
 SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/input16_cc.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/input16_cc.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/input16_cc.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/input16_cc.q Fri Oct  4 21:30:38 2013
@@ -4,7 +4,7 @@ set hive.input.format=org.apache.hadoop.
 -- the user is overwriting it with ctrlC
 
 DROP TABLE INPUT16_CC;
-ADD JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'  with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
 SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/orc_create.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/orc_create.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/orc_create.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/orc_create.q Fri Oct  4 21:30:38 2013
@@ -1,6 +1,8 @@
 DROP TABLE orc_create;
 DROP TABLE orc_create_complex;
 DROP TABLE orc_create_staging;
+DROP TABLE orc_create_people_staging;
+DROP TABLE orc_create_people;
 
 CREATE TABLE orc_create_staging (
   str STRING,
@@ -38,6 +40,8 @@ set hive.default.fileformat=orc;
 CREATE TABLE orc_create (key INT, value STRING)
    PARTITIONED BY (ds string);
 
+set hive.default.fileformat=text;
+
 DESCRIBE FORMATTED orc_create;
 
 CREATE TABLE orc_create_complex (
@@ -61,6 +65,39 @@ SELECT mp from orc_create_complex;
 SELECT lst from orc_create_complex;
 SELECT strct from orc_create_complex;
 
+CREATE TABLE orc_create_people_staging (
+  id int,
+  first_name string,
+  last_name string,
+  address string,
+  state string);
+
+LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+  OVERWRITE INTO TABLE orc_create_people_staging;
+
+CREATE TABLE orc_create_people (
+  id int,
+  first_name string,
+  last_name string,
+  address string)
+PARTITIONED BY (state string)
+STORED AS orc;
+
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+INSERT OVERWRITE TABLE orc_create_people PARTITION (state)
+  SELECT * FROM orc_create_people_staging;
+
+SET hive.optimize.index.filter=true;
+-- test predicate push down with partition pruning
+SELECT COUNT(*) FROM orc_create_people where id < 10 and state = 'Ca';
+
+-- test predicate push down with no column projection
+SELECT id, first_name, last_name, address
+  FROM orc_create_people WHERE id > 90;
+
 DROP TABLE orc_create;
 DROP TABLE orc_create_complex;
 DROP TABLE orc_create_staging;
+DROP TABLE orc_create_people_staging;
+DROP TABLE orc_create_people;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q Fri Oct  4 21:30:38 2013
@@ -18,6 +18,7 @@ explain select src.key, sum(src.key) FRO
 explain select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 -- mGBY-RS-rGBY-mGBY-RS-rGBY
 explain from (select key, value from src group by key, value) s select s.key group by s.key;
+explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key;
 select key, sum(key), lower(value) from (select * from src order by key) Q1 group by key, lower(value);
@@ -26,6 +27,7 @@ select key, sum(key) as value from src g
 select src.key, sum(src.key) FROM src JOIN src1 ON src.key = src1.key group by src.key, src.value;
 select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 from (select key, value from src group by key, value) s select s.key group by s.key;
+select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 set hive.map.aggr=false;
 
@@ -41,6 +43,7 @@ explain select src.key, sum(src.key) FRO
 explain select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 -- RS-GBY-RS-GBY
 explain from (select key, value from src group by key, value) s select s.key group by s.key;
+explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key;
 select key, sum(key), lower(value) from (select * from src order by key) Q1 group by key, lower(value);
@@ -49,3 +52,4 @@ select key, sum(key) as value from src g
 select src.key, sum(src.key) FROM src JOIN src1 ON src.key = src1.key group by src.key, src.value;
 select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 from (select key, value from src group by key, value) s select s.key group by s.key;
+select key, count(distinct value) from (select key, value from src group by key, value) t group by key;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/stats_partscan_1.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/stats_partscan_1.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/stats_partscan_1.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/stats_partscan_1.q Fri Oct  4 21:30:38 2013
@@ -7,6 +7,11 @@ set mapred.min.split.size.per.node=256;
 set mapred.min.split.size.per.rack=256;
 set mapred.max.split.size=256;
 
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits, which is not effective in hive 0.20.
+-- stats_partscan_1_23.q is the same test with this but has different result.
+
 -- test analyze table ... compute statistics partialscan
 
 -- 1. prepare data

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/union_null.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/union_null.q?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/union_null.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/union_null.q Fri Oct  4 21:30:38 2013
@@ -1,2 +1,5 @@
 -- HIVE-2901
 select x from (select value as x from src union all select NULL as x from src)a limit 10;
+
+-- HIVE-4837
+select * from (select null as N from src1 group by key UNION ALL select null as N from src1 group by key ) a;

Modified: hive/branches/maven/ql/src/test/resources/orc-file-dump-dictionary-threshold.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/resources/orc-file-dump-dictionary-threshold.out?rev=1529308&r1=1529307&r2=1529308&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/resources/orc-file-dump-dictionary-threshold.out (original)
+++ hive/branches/maven/ql/src/test/resources/orc-file-dump-dictionary-threshold.out Fri Oct  4 21:30:38 2013
@@ -8,71 +8,71 @@ Statistics:
   Column 0: count: 21000
   Column 1: count: 21000 min: -2147390285 max: 2147453086 sum: 109128518326
   Column 2: count: 21000 min: -9222731174895935707 max: 9222919052987871506
-  Column 3: count: 21000 min: Darkness,-230 max: worst-54-290-346-648-908-996-1038-1080-1560-1584-1620-1744-1770-1798-1852-1966-2162-2244-2286-2296-2534-2660-3114-3676-3788-4068-4150-4706-4744-5350-5420-5582-5696-5726-6006-6020-6024-6098-6184-6568-6636-6802-6994-7004-7318-7498-7758-7780-7798-7920-7952-7960-7988-8232-8256-8390-8416-8478-8620-8840-8984-9038-9128-9236-9248-9344-9594-9650-9714-9928-9938-10178-10368-10414-10502-10732-10876-11008-11158-11410-11722-11836-11964-12054-12096-12126-12136-12202-12246-12298-12616-12774-12782-12790-12802-12976-13216-13246-13502-13766-14454-14974-15004-15124-15252-15294-15356-15530-15610-16316-16936-17024-17122-17214-17310-17528-17682-17742-17870-17878-18010-18410-18524-18788-19204-19254-19518-19596-19786-19874-19904-20390-20752-20936
+  Column 3: count: 21000 min: Darkness,-230 max: worst-54-290-346-648-908-996-1038-1080-1560-1584-1620-1744-1770-1798-1852-1966-2162-2244-2286-2296-2534-2660-3114-3676-3788-4068-4150-4706-4744-5350-5420-5582-5696-5726-6006-6020-6024-6098-6184-6568-6636-6802-6994-7004-7318-7498-7758-7780-7798-7920-7952-7960-7988-8232-8256-8390-8416-8478-8620-8840-8984-9038-9128-9236-9248-9344-9594-9650-9714-9928-9938-10178-10368-10414-10502-10732-10876-11008-11158-11410-11722-11836-11964-12054-12096-12126-12136-12202-12246-12298-12616-12774-12782-12790-12802-12976-13216-13246-13502-13766-14454-14974-15004-15124-15252-15294-15356-15530-15610-16316-16936-17024-17122-17214-17310-17528-17682-17742-17870-17878-18010-18410-18524-18788-19204-19254-19518-19596-19786-19874-19904-20390-20752-20936 sum: 6910238
 
 Stripes:
-  Stripe: offset: 3 data: 102311 rows: 4000 tail: 68 index: 217
+  Stripe: offset: 3 data: 102311 rows: 4000 tail: 68 index: 224
     Stream: column 0 section ROW_INDEX start: 3 length 10
     Stream: column 1 section ROW_INDEX start: 13 length 36
     Stream: column 2 section ROW_INDEX start: 49 length 39
-    Stream: column 3 section ROW_INDEX start: 88 length 132
-    Stream: column 1 section DATA start: 220 length 16022
-    Stream: column 2 section DATA start: 16242 length 32028
-    Stream: column 3 section DATA start: 48270 length 50887
-    Stream: column 3 section LENGTH start: 99157 length 3374
+    Stream: column 3 section ROW_INDEX start: 88 length 139
+    Stream: column 1 section DATA start: 227 length 16022
+    Stream: column 2 section DATA start: 16249 length 32028
+    Stream: column 3 section DATA start: 48277 length 50887
+    Stream: column 3 section LENGTH start: 99164 length 3374
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DIRECT_V2
-  Stripe: offset: 102599 data: 284999 rows: 5000 tail: 68 index: 349
-    Stream: column 0 section ROW_INDEX start: 102599 length 10
-    Stream: column 1 section ROW_INDEX start: 102609 length 36
-    Stream: column 2 section ROW_INDEX start: 102645 length 39
-    Stream: column 3 section ROW_INDEX start: 102684 length 264
-    Stream: column 1 section DATA start: 102948 length 20029
-    Stream: column 2 section DATA start: 122977 length 40035
-    Stream: column 3 section DATA start: 163012 length 219588
-    Stream: column 3 section LENGTH start: 382600 length 5347
+  Stripe: offset: 102606 data: 284999 rows: 5000 tail: 68 index: 356
+    Stream: column 0 section ROW_INDEX start: 102606 length 10
+    Stream: column 1 section ROW_INDEX start: 102616 length 36
+    Stream: column 2 section ROW_INDEX start: 102652 length 39
+    Stream: column 3 section ROW_INDEX start: 102691 length 271
+    Stream: column 1 section DATA start: 102962 length 20029
+    Stream: column 2 section DATA start: 122991 length 40035
+    Stream: column 3 section DATA start: 163026 length 219588
+    Stream: column 3 section LENGTH start: 382614 length 5347
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DIRECT_V2
-  Stripe: offset: 388015 data: 491655 rows: 5000 tail: 69 index: 536
-    Stream: column 0 section ROW_INDEX start: 388015 length 10
-    Stream: column 1 section ROW_INDEX start: 388025 length 36
-    Stream: column 2 section ROW_INDEX start: 388061 length 39
-    Stream: column 3 section ROW_INDEX start: 388100 length 451
-    Stream: column 1 section DATA start: 388551 length 20029
-    Stream: column 2 section DATA start: 408580 length 40035
-    Stream: column 3 section DATA start: 448615 length 425862
-    Stream: column 3 section LENGTH start: 874477 length 5729
+  Stripe: offset: 388029 data: 491655 rows: 5000 tail: 69 index: 544
+    Stream: column 0 section ROW_INDEX start: 388029 length 10
+    Stream: column 1 section ROW_INDEX start: 388039 length 36
+    Stream: column 2 section ROW_INDEX start: 388075 length 39
+    Stream: column 3 section ROW_INDEX start: 388114 length 459
+    Stream: column 1 section DATA start: 388573 length 20029
+    Stream: column 2 section DATA start: 408602 length 40035
+    Stream: column 3 section DATA start: 448637 length 425862
+    Stream: column 3 section LENGTH start: 874499 length 5729
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DIRECT_V2
-  Stripe: offset: 880275 data: 707368 rows: 5000 tail: 68 index: 677
-    Stream: column 0 section ROW_INDEX start: 880275 length 10
-    Stream: column 1 section ROW_INDEX start: 880285 length 36
-    Stream: column 2 section ROW_INDEX start: 880321 length 39
-    Stream: column 3 section ROW_INDEX start: 880360 length 592
-    Stream: column 1 section DATA start: 880952 length 20029
-    Stream: column 2 section DATA start: 900981 length 40035
-    Stream: column 3 section DATA start: 941016 length 641580
-    Stream: column 3 section LENGTH start: 1582596 length 5724
+  Stripe: offset: 880297 data: 707368 rows: 5000 tail: 68 index: 691
+    Stream: column 0 section ROW_INDEX start: 880297 length 10
+    Stream: column 1 section ROW_INDEX start: 880307 length 36
+    Stream: column 2 section ROW_INDEX start: 880343 length 39
+    Stream: column 3 section ROW_INDEX start: 880382 length 606
+    Stream: column 1 section DATA start: 880988 length 20029
+    Stream: column 2 section DATA start: 901017 length 40035
+    Stream: column 3 section DATA start: 941052 length 641580
+    Stream: column 3 section LENGTH start: 1582632 length 5724
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DIRECT_V2
-  Stripe: offset: 1588388 data: 348697 rows: 2000 tail: 67 index: 786
-    Stream: column 0 section ROW_INDEX start: 1588388 length 10
-    Stream: column 1 section ROW_INDEX start: 1588398 length 36
-    Stream: column 2 section ROW_INDEX start: 1588434 length 39
-    Stream: column 3 section ROW_INDEX start: 1588473 length 701
-    Stream: column 1 section DATA start: 1589174 length 8011
-    Stream: column 2 section DATA start: 1597185 length 16014
-    Stream: column 3 section DATA start: 1613199 length 322259
-    Stream: column 3 section LENGTH start: 1935458 length 2413
+  Stripe: offset: 1588424 data: 348697 rows: 2000 tail: 67 index: 797
+    Stream: column 0 section ROW_INDEX start: 1588424 length 10
+    Stream: column 1 section ROW_INDEX start: 1588434 length 36
+    Stream: column 2 section ROW_INDEX start: 1588470 length 39
+    Stream: column 3 section ROW_INDEX start: 1588509 length 712
+    Stream: column 1 section DATA start: 1589221 length 8011
+    Stream: column 2 section DATA start: 1597232 length 16014
+    Stream: column 3 section DATA start: 1613246 length 322259
+    Stream: column 3 section LENGTH start: 1935505 length 2413
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
-    Encoding column 3: DIRECT_V2
\ No newline at end of file
+    Encoding column 3: DIRECT_V2



Mime
View raw message