hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dhr...@apache.org
Subject svn commit: r712905 [6/38] - in /hadoop/core/trunk: ./ src/contrib/hive/ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/ src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/ src/contrib/hive/conf/ src/contrib/hive/data/files/ src/con...
Date Tue, 11 Nov 2008 01:50:18 GMT
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Mon Nov 10 17:50:06 2008
@@ -39,6 +39,9 @@
       case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
       case HiveParser.TOK_ALTERTABLE_RENAME:
       case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
+      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
       case HiveParser.TOK_SHOWTABLES:
       case HiveParser.TOK_SHOWPARTITIONS:
         return new DDLSemanticAnalyzer(conf);

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java Mon Nov 10 17:50:06 2008
@@ -19,34 +19,127 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.util.ArrayList;
+import org.antlr.runtime.tree.CommonTree;
 
+/**
+ * 
+ * This class stores all the information specified in the TABLESAMPLE clause. e.g. 
+ * for the clause "FROM t TABLESAMPLE(1 OUT OF 2 ON c1) it will store the numerator
+ * 1, the denominator 2 and the list of expressions(in this case c1) in the appropriate
+ * fields. The afore-mentioned sampling clause causes the 1st bucket to be picked out of
+ * the 2 buckets created by hashing on c1.
+ *
+ */
 public class TableSample {
+	
+  /**
+   * The numerator of the TABLESAMPLE clause
+   */
   private int numerator;
+  
+  /**
+   * The denominator of the TABLESAMPLE clause
+   */
   private int denominator;
-  private ArrayList<String> cols;
   
-  public TableSample(String num, String den, ArrayList<String> cols) {
+  /**
+   * The list of expressions following ON part of the TABLESAMPLE clause. This list is
+   * empty in case there are no expressions such as in the clause
+   * "FROM t TABLESAMPLE(1 OUT OF 2)". For this expression the sampling is done
+   * on the tables clustering column(as specified when the table was created). In case
+   * the table does not have any clustering column, the usage of a table sample clause
+   * without an ON part is disallowed by the compiler
+   */
+  private ArrayList<CommonTree> exprs;
+  
+  /**
+   * Flag to indicate that input files can be pruned
+   */
+  private boolean inputPruning;
+  
+  /**
+   * Constructs the TableSample given the numerator, denominator and the list of
+   * ON clause expressions
+   * 
+   * @param num The numerator
+   * @param den The denominator
+   * @param exprs The list of expressions in the ON part of the TABLESAMPLE clause
+   */
+  public TableSample(String num, String den, ArrayList<CommonTree> exprs) {
     this.numerator = Integer.valueOf(num).intValue();
     this.denominator = Integer.valueOf(den).intValue();
-    this.cols = cols;
+    this.exprs = exprs;
   }
+  
+  /**
+   * Gets the numerator
+   * 
+   * @return int
+   */
   public int getNumerator() {
     return this.numerator;
   }
+  
+  /**
+   * Sets the numerator
+   * 
+   * @param num The numerator
+   */
   public void setNumerator(int num) {
     this.numerator = num;
   }
+  
+  /**
+   * Gets the denominator
+   * 
+   * @return int
+   */
   public int getDenominator() {
     return this.denominator;
   }
+  
+  /**
+   * Sets the denominator
+   * 
+   * @param den The denominator
+   */
   public void setDenominator(int den) {
     this.denominator = den;
   }
-  public ArrayList<String> getCols() {
-    return this.cols;
+  
+  /**
+   * Gets the ON part's expression list
+   * 
+   * @return ArrayList<CommonTree>
+   */
+  public ArrayList<CommonTree> getExprs() {
+    return this.exprs;
   }
-  public void setCols(ArrayList<String> cols) {
-    this.cols = cols;
+  
+  /**
+   * Sets the expression list
+   * 
+   * @param exprs The expression list
+   */
+  public void setExprs(ArrayList<CommonTree> exprs) {
+    this.exprs = exprs;
   }
 
+  /**
+   * Gets the flag that indicates whether input pruning is possible
+   * 
+   * @return boolean
+   */
+  public boolean getInputPruning() {
+	  return this.inputPruning;
+  }
+ 
+  /**
+   * Sets the flag that indicates whether input pruning is possible or not
+   * 
+   * @param inputPruning true if input pruning is possible
+   */
+  public void setInputPruning(boolean inputPruning) {
+	  this.inputPruning = inputPruning;
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Mon Nov 10 17:50:06 2008
@@ -20,13 +20,27 @@
 
 import java.util.*;
 import java.io.*;
+
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.thrift.TBinarySortableProtocol;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
+import com.facebook.thrift.protocol.TBinaryProtocol;
+
 public class PlanUtils {
 
   public static enum ExpressionTypes {FIELD, JEXL};
@@ -37,21 +51,45 @@
                           new LinkedHashMap<String, ArrayList<String>> (),
                           new LinkedHashMap<String, partitionDesc> (),
                           new HashMap<String, Operator<? extends Serializable>> (),
-                          new schemaDesc(),
-                          new HashMap<String, schemaDesc> (),
+                          new tableDesc(),
+                          new ArrayList<tableDesc> (),
                           null,
                           Integer.valueOf (1));
   }
   
+  /** 
+   * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode
+   * and column names (comma separated string).
+   */
   public static tableDesc getDefaultTableDesc(String separatorCode, String columns) {
+    return getDefaultTableDesc(separatorCode, columns, false);
+  }
+
+  /** 
+   * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode
+   * and column names (comma separated string), and whether the last column should take
+   * the rest of the line.
+   */
+  public static tableDesc getDefaultTableDesc(String separatorCode, String columns,
+      boolean lastColumnTakesRestOfTheLine) {
+    Properties properties = Utilities.makeProperties(
+        Constants.SERIALIZATION_FORMAT, separatorCode,
+        "columns", columns);
+    if (lastColumnTakesRestOfTheLine) {
+      properties.setProperty(
+          Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+          "true");
+    }
     return new tableDesc(
         MetadataTypedColumnsetSerDe.class,
         TextInputFormat.class,
         IgnoreKeyTextOutputFormat.class,
-        Utilities.makeProperties(
-            org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, separatorCode,
-            "columns", columns));    
+        properties);    
   }
+
+  /** 
+   * Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode
+   */
   public static tableDesc getDefaultTableDesc(String separatorCode) {
     return new tableDesc(
         MetadataTypedColumnsetSerDe.class,
@@ -61,26 +99,131 @@
             org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, separatorCode));    
   }
 
+  /** 
+   * Generate the table descriptor of DynamicSerDe and TBinarySortableProtocol.
+   */
+  public static tableDesc getBinarySortableTableDesc(List<FieldSchema> fieldSchemas) {
+    String structName = "binary_sortable_table";
+    return new tableDesc(
+        DynamicSerDe.class,
+        SequenceFileInputFormat.class,
+        SequenceFileOutputFormat.class,
+        Utilities.makeProperties(
+            "name", structName,        
+            org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, TBinarySortableProtocol.class.getName(),
+            org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, 
+              MetaStoreUtils.getDDLFromFieldSchema(structName, fieldSchemas)
+        ));    
+  }
+
+  /** 
+   * Generate the table descriptor of DynamicSerDe and TBinaryProtocol.
+   */
+  public static tableDesc getBinaryTableDesc(List<FieldSchema> fieldSchemas) {
+    String structName = "binary_table";
+    return new tableDesc(
+        DynamicSerDe.class,
+        SequenceFileInputFormat.class,
+        SequenceFileOutputFormat.class,
+        Utilities.makeProperties(
+            "name", structName,
+            org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName(),
+            org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, 
+              MetaStoreUtils.getDDLFromFieldSchema(structName, fieldSchemas)
+        ));    
+  }
+  
   
-  // We will make reduce key and reduce value TableDesc with configurable SerDes   
+  /** 
+   * Convert the ColumnList to FieldSchema list.
+   */
+  public static List<FieldSchema> getFieldSchemasFromColumnList(ArrayList<exprNodeDesc> cols, 
+      String fieldPrefix) {
+    List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
+    for (int i=0; i<cols.size(); i++) {
+      schemas.add(TypeInfoUtils.getFieldSchemaFromTypeInfo(fieldPrefix + i, cols.get(i).getTypeInfo()));
+    }
+    return schemas;
+  }
+  
+  /** 
+   * Convert the RowSchema to FieldSchema list.
+   */
+  public static List<FieldSchema> getFieldSchemasFromRowSchema(RowSchema row, String fieldPrefix) {
+    Vector<ColumnInfo> c = row.getSignature();
+    return getFieldSchemasFromColumnInfo(c, fieldPrefix);
+  }
+  
+  /** 
+   * Convert the ColumnInfo to FieldSchema.
+   */
+  public static List<FieldSchema> getFieldSchemasFromColumnInfo(Vector<ColumnInfo> cols, String fieldPrefix) {
+    List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
+    for (int i=0; i<cols.size(); i++) {
+      String name = cols.get(i).getInternalName();
+      if (name.equals(Integer.valueOf(i).toString())) {
+        name = fieldPrefix + name; 
+      }
+      schemas.add(TypeInfoUtils.getFieldSchemaFromTypeInfo(name, cols.get(i).getType()));
+    }
+    return schemas;
+  }
+  
+  /**
+   * Create the reduce sink descriptor.
+   * @param keyCols   The columns to be stored in the key
+   * @param valueCols The columns to be stored in the value
+   * @param tag       The tag for this reducesink
+   * @param partitionCols The columns for partitioning.
+   * @param numReducers  The number of reducers.
+   * @param inferNumReducers  whether we should try to infer the number of reducers.
+   * @return The reduceSinkDesc object.
+   */
   public static reduceSinkDesc getReduceSinkDesc(ArrayList<exprNodeDesc> keyCols, 
                                                  ArrayList<exprNodeDesc> valueCols, 
-                                                 int tag, int numPartitionFields, 
+                                                 int tag, 
+                                                 ArrayList<exprNodeDesc> partitionCols, 
                                                  int numReducers, boolean inferNumReducers) {
-     
-    return new reduceSinkDesc(keyCols, valueCols, tag, numPartitionFields, numReducers, inferNumReducers,
-      getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(keyCols.size())),
-      getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(valueCols.size())));
-  }
-
-  // We should read the TableDesc from gWork when it is available.   
-  public static tableDesc getReduceKeyDesc(mapredWork gWork) {
-     return getDefaultTableDesc("" + Utilities.ctrlaCode);
-  }
+    
+    return new reduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers, inferNumReducers,
+        getBinarySortableTableDesc(getFieldSchemasFromColumnList(keyCols, "reducesinkkey")),
+        getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols, "reducesinkvalue")));
+  }
+
+  /**
+   * Create the reduce sink descriptor.
+   * @param keyCols   The columns to be stored in the key
+   * @param valueCols The columns to be stored in the value
+   * @param tag       The tag for this reducesink
+   * @param numPartitionFields  The first numPartitionFields of keyCols will be partition columns.
+   *                  If numPartitionFields=-1, then partition randomly.
+   * @param numReducers  The number of reducers.
+   * @param inferNumReducers  whether we should try to infer the number of reducers.
+   * @return The reduceSinkDesc object.
+   */
+  public static reduceSinkDesc getReduceSinkDesc(ArrayList<exprNodeDesc> keyCols, 
+                                                 ArrayList<exprNodeDesc> valueCols, 
+                                                 int tag, 
+                                                 int numPartitionFields, 
+                                                 int numReducers, boolean inferNumReducers) {
+    ArrayList<exprNodeDesc> partitionCols = null;
 
-  // We should read the TableDesc from gWork when it is available.   
-  public static tableDesc getReduceValueDesc(mapredWork gWork, int tag) {
-     return getDefaultTableDesc("" + Utilities.ctrlaCode);
+    if (numPartitionFields >= keyCols.size()) {
+      partitionCols = keyCols;
+    } else if (numPartitionFields >= 0) {
+      partitionCols = new ArrayList<exprNodeDesc>(numPartitionFields);
+      for (int i=0; i<numPartitionFields; i++) {
+        partitionCols.add(keyCols.get(i));
+      }
+    } else {
+      // numPartitionFields = -1 means random partitioning
+      partitionCols = new ArrayList<exprNodeDesc>(1);
+      partitionCols.add(SemanticAnalyzer.getFuncExprNodeDesc("rand"));
+    }
+    
+    return getReduceSinkDesc(keyCols, valueCols, tag, partitionCols, numReducers, inferNumReducers);
   }
   
+  
 }
+  
\ No newline at end of file

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java Mon Nov 10 17:50:06 2008
@@ -20,6 +20,7 @@
 
 import java.io.Serializable;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -28,12 +29,14 @@
 public class alterTableDesc extends ddlDesc implements Serializable 
 {
   private static final long serialVersionUID = 1L;
-  public static enum alterTableTypes {RENAME, ADDCOLS, REPLACECOLS};
+  public static enum alterTableTypes {RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, ADDSERDE, ADDSERDEPROPS};
     
   alterTableTypes      op;
   String               oldName;
   String               newName;
   List<FieldSchema>    newCols;
+  String               serdeName;
+  Map<String, String>  props;
   
   /**
    * @param oldName old name of the table
@@ -54,6 +57,13 @@
     this.oldName = name;
     this.newCols = newCols;
   }
+  
+  /**
+   * @param alterType type of alter op
+   */
+  public alterTableDesc(alterTableTypes alterType) {
+    this.op = alterType;
+  }
 
   /**
    * @return the old name of the table
@@ -130,4 +140,34 @@
     this.newCols = newCols;
   }
 
+  /**
+   * @return the serdeName
+   */
+  @explain(displayName="deserializer library")
+  public String getSerdeName() {
+    return serdeName;
+  }
+
+  /**
+   * @param serdeName the serdeName to set
+   */
+  public void setSerdeName(String serdeName) {
+    this.serdeName = serdeName;
+  }
+
+  /**
+   * @return the props
+   */
+  @explain(displayName="properties")
+  public Map<String, String> getProps() {
+    return props;
+  }
+
+  /**
+   * @param props the props to set
+   */
+  public void setProps(Map<String, String> props) {
+    this.props = props;
+  }
+
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java Mon Nov 10 17:50:06 2008
@@ -19,20 +19,22 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 
 public class exprNodeColumnDesc extends exprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String column;
-  private boolean isVirtual;
   
   public exprNodeColumnDesc() {}
   public exprNodeColumnDesc(TypeInfo typeInfo, String column) {
     super(typeInfo);
     this.column = column;
-    this.isVirtual = isVirtual;
   }
   public exprNodeColumnDesc(Class<?> c, String column) {
     super(TypeInfoFactory.getPrimitiveTypeInfo(c));
@@ -54,4 +56,11 @@
   public String getExprString() {
     return getColumn();
   }
+
+  public List<String> getCols() {
+  	List<String> lst = new ArrayList<String>();
+  	lst.add(column);
+  	return lst;
+  }
+
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java Mon Nov 10 17:50:06 2008
@@ -19,9 +19,11 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.List;
 
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
 
 public class exprNodeDesc implements Serializable {  
   private static final long serialVersionUID = 1L;
@@ -51,4 +53,9 @@
   public String getTypeString() {
     return typeInfo.getTypeName();
   }
+
+  public List<String> getCols() {
+    return null;
+  }
+
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java Mon Nov 10 17:50:06 2008
@@ -19,8 +19,13 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+
 
 public class exprNodeFieldDesc extends exprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
@@ -67,5 +72,12 @@
   @Override
   public String getExprString() {
     return this.desc.getExprString() + "." + this.fieldName;
-  }
+  }
+
+  public List<String> getCols() {
+    List<String> colList = new ArrayList<String>();
+    if (desc != null) 
+    	colList = Utilities.mergeUniqElems(colList, desc.getCols());    
+    return colList;
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java Mon Nov 10 17:50:06 2008
@@ -21,12 +21,15 @@
 import java.io.Serializable;
 import java.lang.reflect.Method;
 import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
 
 /**
  * The reason that we have to store UDFClass as well as UDFMethod is because
@@ -133,4 +136,18 @@
     
     return sb.toString();
   }
+
+  public List<String> getCols() {
+    List<String> colList = new ArrayList<String>();
+    if (children != null) {
+      int pos = 0;
+      while (pos < children.size()) {
+        List<String> colCh = children.get(pos).getCols();
+        colList = Utilities.mergeUniqElems(colList, colCh);
+        pos++;
+      }
+    }
+
+    return colList;
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java Mon Nov 10 17:50:06 2008
@@ -19,8 +19,13 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+
 
 
 public class exprNodeIndexDesc extends exprNodeDesc implements Serializable {
@@ -61,5 +66,15 @@
   @Override
   public String getExprString() {
     return this.desc.getExprString() + "[" + this.index.getExprString() + "]";
-  }
-}
\ No newline at end of file
+  }
+  
+  public List<String> getCols() {
+    List<String> colList = new ArrayList<String>();
+    if (desc != null) 
+    	colList = Utilities.mergeUniqElems(colList, desc.getCols());
+    if (index != null)
+    	colList = Utilities.mergeUniqElems(colList, index.getCols());
+    
+    return colList;
+  }
+}

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java Mon Nov 10 17:50:06 2008
@@ -19,99 +19,91 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-import java.util.Properties;
+import java.util.List;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.hive.ql.plan.tableDesc;
 
 @explain(displayName="Fetch Operator")
 public class fetchWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  //  private loadFileDesc loadFileWork;
-  //  private tableDesc    tblDesc;
-  private Path srcDir;
-  private Properties schema;
-  private Class<? extends Deserializer> deserializerClass;
-  private Class<? extends InputFormat> inputFormatClass;
+  private Path tblDir;
+  private tableDesc tblDesc;
+
+  private List<Path> partDir;
+  private List<partitionDesc> partDesc;
+
   private int limit;
 
   public fetchWork() { }
 
-	/**
-	 * @param deserializer
-	 * @param deserializerClass
-	 * @param inputFormatClass
-	 * @param schema
-	 * @param srcDir
-	 */
-	public fetchWork(Path srcDir,
-			Class<? extends Deserializer> deserializerClass,
-			Class<? extends InputFormat> inputFormatClass, Properties schema,
-			int limit) {
-		this.srcDir = srcDir;
-		this.deserializerClass = deserializerClass;
-		this.inputFormatClass = inputFormatClass;
-		this.schema = schema;
+	public fetchWork(Path tblDir, tableDesc tblDesc, int limit) {
+		this.tblDir = tblDir;
+		this.tblDesc = tblDesc;
 		this.limit = limit;
 	}
 
+	public fetchWork(List<Path> partDir, List<partitionDesc> partDesc, int limit) {
+		this.partDir = partDir;
+		this.partDesc = partDesc;
+		this.limit = limit;
+	}
+	
 	/**
-	 * @return the srcDir
+	 * @return the tblDir
 	 */
-  @explain(displayName="source")
-	public Path getSrcDir() {
-		return srcDir;
+	public Path getTblDir() {
+		return tblDir;
 	}
 
 	/**
-	 * @param srcDir the srcDir to set
+	 * @param tblDir the tblDir to set
 	 */
-	public void setSrcDir(Path srcDir) {
-		this.srcDir = srcDir;
+	public void setTblDir(Path tblDir) {
+		this.tblDir = tblDir;
 	}
 
 	/**
-	 * @return the schema
+	 * @return the tblDesc
 	 */
-	public Properties getSchema() {
-		return schema;
+	public tableDesc getTblDesc() {
+		return tblDesc;
 	}
 
 	/**
-	 * @param schema the schema to set
+	 * @param tblDesc the tblDesc to set
 	 */
-	public void setSchema(Properties schema) {
-		this.schema = schema;
+	public void setTblDesc(tableDesc tblDesc) {
+		this.tblDesc = tblDesc;
 	}
 
 	/**
-	 * @return the deserializerClass
+	 * @return the partDir
 	 */
-	public Class<? extends Deserializer> getDeserializerClass() {
-		return deserializerClass;
+	public List<Path> getPartDir() {
+		return partDir;
 	}
 
 	/**
-	 * @param deserializerClass the deserializerClass to set
+	 * @param partDir the partDir to set
 	 */
-	public void setDeserializerClass(Class<? extends Deserializer> deserializerClass) {
-		this.deserializerClass = deserializerClass;
+	public void setPartDir(List<Path> partDir) {
+		this.partDir = partDir;
 	}
 
 	/**
-	 * @return the inputFormatClass
+	 * @return the partDesc
 	 */
-	public Class<? extends InputFormat> getInputFormatClass() {
-		return inputFormatClass;
+	public List<partitionDesc> getPartDesc() {
+		return partDesc;
 	}
 
 	/**
-	 * @param inputFormatClass the inputFormatClass to set
+	 * @param partDesc the partDesc to set
 	 */
-	public void setInputFormatClass(Class<? extends InputFormat> inputFormatClass) {
-		this.inputFormatClass = inputFormatClass;
+	public void setPartDesc(List<partitionDesc> partDesc) {
+		this.partDesc = partDesc;
 	}
 
 	/**

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java Mon Nov 10 17:50:06 2008
@@ -23,11 +23,12 @@
   /** Group-by Mode:
    *  COMPLETE: complete 1-phase aggregation: aggregate, evaluate
    *  PARTIAL1: partial aggregation - first phase:  aggregate, evaluatePartial
-   *  PARTIAL2: partial aggregation - second phase: aggregatePartial, evaluate
+   *  PARTIAL2: partial aggregation - second phase: aggregatePartial, evaluatePartial
+   *  FINAL: partial aggregation - final phase: aggregatePartial, evaluate
    *  HASH: the same as PARTIAL1 but use hash-table-based aggregation  
    */
   private static final long serialVersionUID = 1L;
-  public static enum Mode { COMPLETE, PARTIAL1, PARTIAL2, HASH };
+  public static enum Mode { COMPLETE, PARTIAL1, PARTIAL2, FINAL, HASH };
   private Mode mode;
   private java.util.ArrayList<exprNodeDesc> keys;
   private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java Mon Nov 10 17:50:06 2008
@@ -39,10 +39,10 @@
 
   // map<->reduce interface
   // schema of the map-reduce 'key' object - this is homogeneous
-  private schemaDesc keySchema;
+  private tableDesc keyDesc;
 
   // schema of the map-reduce 'val' object - this is heterogeneous
-  private HashMap<String,schemaDesc> aliasToSchema;
+  private List<tableDesc> tagToValueDesc;
 
   private Operator<?> reducer;
   
@@ -57,16 +57,16 @@
     final LinkedHashMap<String,ArrayList<String>> pathToAliases,
     final LinkedHashMap<String,partitionDesc> pathToPartitionInfo,
     final HashMap<String,Operator<? extends Serializable>> aliasToWork,
-    final schemaDesc keySchema,
-    HashMap<String,schemaDesc> aliasToSchema,
+    final tableDesc keyDesc,
+    List<tableDesc> tagToValueDesc,
     final Operator<?> reducer,
     final Integer numReduceTasks) {
     this.command = command;
     this.pathToAliases = pathToAliases;
     this.pathToPartitionInfo = pathToPartitionInfo;
     this.aliasToWork = aliasToWork;
-    this.keySchema = keySchema;
-    this.aliasToSchema = aliasToSchema;
+    this.keyDesc = keyDesc;
+    this.tagToValueDesc = tagToValueDesc;
     this.reducer = reducer;
     this.numReduceTasks = numReduceTasks;
   }
@@ -100,17 +100,17 @@
   public void setAliasToWork(final HashMap<String,Operator<? extends Serializable>> aliasToWork) {
     this.aliasToWork=aliasToWork;
   }
-  public schemaDesc getKeySchema() {
-    return this.keySchema;
+  public tableDesc getKeyDesc() {
+    return this.keyDesc;
   }
-  public void setKeySchema(final schemaDesc keySchema) {
-    this.keySchema = keySchema;
+  public void setKeyDesc(final tableDesc keyDesc) {
+    this.keyDesc = keyDesc;
   }
-  public HashMap<String,schemaDesc> getAliasToSchema() {
-    return this.aliasToSchema;
+  public List<tableDesc> getTagToValueDesc() {
+    return tagToValueDesc;
   }
-  public void setAliasToSchema(final HashMap<String,schemaDesc> aliasToSchema) {
-    this.aliasToSchema = aliasToSchema;
+  public void setTagToValueDesc(final List<tableDesc> tagToValueDesc) {
+    this.tagToValueDesc = tagToValueDesc;
   }
 
   @explain(displayName="Reduce Operator Tree")

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java Mon Nov 10 17:50:06 2008
@@ -23,20 +23,34 @@
 @explain(displayName="Reduce Output Operator")
 public class reduceSinkDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  // these are the expressions that go into the reduce key
+  /**
+   * Key columns are passed to reducer in the "key". 
+   */
   private java.util.ArrayList<exprNodeDesc> keyCols;
+  /**
+   * Value columns are passed to reducer in the "value". 
+   */
   private java.util.ArrayList<exprNodeDesc> valueCols;
-  // Describe how to serialize the key
+  /** 
+   * Describe how to serialize the key.
+   */
   private tableDesc keySerializeInfo;
-  // Describe how to serialize the value
+  /**
+   * Describe how to serialize the value.
+   */
   private tableDesc valueSerializeInfo;
   
+  /**
+   * The tag for this reducesink descriptor.
+   */
   private int tag;
   
-  // The partition key will be the first #numPartitionFields of keyCols
-  // If the value is 0, then all data will go to a single reducer
-  // If the value is -1, then data will go to a random reducer 
-  private int numPartitionFields;
+  /**
+   * The partition columns (CLUSTER BY or DISTRIBUTE BY in Hive language).
+   * Partition columns decide the reducer that the current row goes to.
+   * Partition columns are not passed to reducer.
+   */
+  private java.util.ArrayList<exprNodeDesc> partitionCols;
   
   private boolean inferNumReducers;
   private int numReducers;
@@ -47,7 +61,7 @@
     (java.util.ArrayList<exprNodeDesc> keyCols,
      java.util.ArrayList<exprNodeDesc> valueCols,
      int tag,
-     int numPartitionFields,
+     java.util.ArrayList<exprNodeDesc> partitionCols,
      int numReducers,
      boolean inferNumReducers,
      final tableDesc keySerializeInfo,
@@ -57,7 +71,7 @@
     this.tag = tag;
     this.numReducers = numReducers;
     this.inferNumReducers = inferNumReducers;
-    this.numPartitionFields = numPartitionFields;
+    this.partitionCols = partitionCols;
     this.keySerializeInfo = keySerializeInfo;
     this.valueSerializeInfo = valueSerializeInfo;
   }
@@ -80,12 +94,12 @@
     this.valueCols=valueCols;
   }
   
-  @explain(displayName="# partition fields")
-  public int getNumPartitionFields() {
-    return this.numPartitionFields;
+  @explain(displayName="Map-reduce partition columns")
+  public java.util.ArrayList<exprNodeDesc> getPartitionCols() {
+    return this.partitionCols;
   }
-  public void setNumPartitionFields(int numPartitionFields) {
-    this.numPartitionFields = numPartitionFields;
+  public void setPartitionCols(final java.util.ArrayList<exprNodeDesc> partitionCols) {
+    this.partitionCols = partitionCols;
   }
   
   @explain(displayName="tag")

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java Mon Nov 10 17:50:06 2008
@@ -24,11 +24,18 @@
 public class selectDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList;
+  private boolean selectStar;
   public selectDesc() { }
+  public selectDesc(final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
+    this(colList, false);
+  }
+  
   public selectDesc(
-    final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
+    final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList, final boolean selectStar) {
     this.colList = colList;
+    this.selectStar = selectStar;
   }
+  
   @explain(displayName="expressions")
   public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> getColList() {
     return this.colList;
@@ -36,4 +43,17 @@
   public void setColList(final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
     this.colList=colList;
   }
+  
+  /**
+   * @return the selectStar
+   */
+  public boolean isSelectStar() {
+    return selectStar;
+  }
+  /**
+   * @param selectStar the selectStar to set
+   */
+  public void setSelectStar(boolean selectStar) {
+    this.selectStar = selectStar;
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Mon Nov 10 17:50:06 2008
@@ -26,14 +26,19 @@
 import org.apache.log4j.*;
 import java.net.URL;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 
 import org.apache.commons.lang.StringUtils;
 
 public class SessionState {
-  
+
+  public static Log LOG = LogFactory.getLog("SessionState");
+  public static LogHelper console = new LogHelper(LOG);
+
   /**
    * current configuration
    */ 
@@ -146,11 +151,13 @@
   public static SessionState start(HiveConf conf) {
     ss = new SessionState (conf);
     ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
+    console = new LogHelper(LOG);
     return (ss);
   }
 
   public static SessionState start(SessionState startSs) {
     ss = startSs;
+    console = new LogHelper(LOG);
     ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId());
     return ss;
   }
@@ -162,6 +169,10 @@
     return ss;
   }
 
+  public static LogHelper getConsole() {
+    return console;
+  }
+
   private static String makeSessionId() {
     GregorianCalendar gc = new GregorianCalendar();
     String userid = System.getProperty("user.name");
@@ -242,4 +253,107 @@
       LOG.error(error + StringUtils.defaultString(detail));
     }
   }
+
+  public static String validateFile(Set<String> curFiles, String newFile) {
+    SessionState ss = SessionState.get();
+    LogHelper console = SessionState.getConsole();
+    Configuration conf = (ss == null) ? new Configuration() : ss.getConf();
+
+    try {
+      if(Utilities.realFile(newFile, conf) != null)
+        return newFile;
+      else {
+        console.printError(newFile + " does not exist");
+        return null;
+      }
+    } catch (IOException e) {
+      console.printError("Unable to validate " + newFile + "\nException: " + e.getMessage(),
+                         "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      return null;
+    }
+  }
+
+  public static interface ResourceHook {
+    public String preHook(Set<String> cur, String s);
+  }
+
+  public static enum ResourceType {
+    FILE(new ResourceHook () {
+        public String preHook(Set<String> cur, String s) { return validateFile(cur, s); }
+      });
+
+    public ResourceHook hook;
+
+    ResourceType(ResourceHook hook) {
+      this.hook = hook;
+    }
+  };
+
+  public static ResourceType find_resource_type(String s) {
+    
+    s = s.trim().toUpperCase();
+    
+    try {
+      return ResourceType.valueOf(s);
+    } catch (IllegalArgumentException e) {
+    }
+    
+    // try singular
+    if(s.endsWith("S")) {
+      s = s.substring(0, s.length()-1);
+    } else {
+      return null;
+    }
+
+    try {
+      return ResourceType.valueOf(s);
+    } catch (IllegalArgumentException e) {
+    }
+    return null;
+  }
+
+  private HashMap<ResourceType, HashSet<String>> resource_map = new HashMap<ResourceType, HashSet<String>> ();
+
+  public void add_resource(ResourceType t, String value) {
+    if(resource_map.get(t) == null) {
+      resource_map.put(t, new HashSet<String> ());
+    }
+
+    String fnlVal = value;
+    if(t.hook != null) {
+      fnlVal = t.hook.preHook(resource_map.get(t), value);
+      if(fnlVal == null)
+        return;
+    }
+    resource_map.get(t).add(fnlVal);
+  }
+
+  public boolean delete_resource(ResourceType t, String value) {
+    if(resource_map.get(t) == null) {
+      return false;
+    }
+    return (resource_map.get(t).remove(value));
+  }
+
+  public Set<String> list_resource(ResourceType t, List<String> filter) {
+    if(resource_map.get(t) == null) {
+      return null;
+    }
+    Set<String> orig = resource_map.get(t);
+    if(filter == null) {
+      return orig;
+    } else {
+      Set<String> fnl = new HashSet<String> ();
+      for(String one: orig) {
+        if(filter.contains(one)) {
+          fnl.add(one);
+        }
+      }
+      return fnl;
+    }
+  }
+
+  public void delete_resource(ResourceType t) {
+    resource_map.remove (t);
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java Mon Nov 10 17:50:06 2008
@@ -43,7 +43,7 @@
   public PrimitiveTypeInfo() {}
 
   public String getTypeName() {
-    return ObjectInspectorUtils.getClassShortName(primitiveClass.getName());
+    return ObjectInspectorUtils.getClassShortName(primitiveClass);
   }
   
   

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java Mon Nov 10 17:50:06 2008
@@ -3,11 +3,16 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -109,4 +114,67 @@
     return result;
   }
     
+  public static String getFieldSchemaTypeFromTypeInfo(TypeInfo typeInfo) {
+    switch(typeInfo.getCategory()) {
+      case PRIMITIVE: {
+        return ObjectInspectorUtils.getClassShortName(typeInfo.getPrimitiveClass());
+      }
+      case LIST: {
+        String elementType = getFieldSchemaTypeFromTypeInfo(typeInfo.getListElementTypeInfo());
+        return org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME + "<" + elementType + ">";
+      }
+      case MAP: {
+        String keyType = getFieldSchemaTypeFromTypeInfo(typeInfo.getMapKeyTypeInfo());
+        String valueType = getFieldSchemaTypeFromTypeInfo(typeInfo.getMapValueTypeInfo());
+        return org.apache.hadoop.hive.serde.Constants.MAP_TYPE_NAME + "<" +
+          keyType + "," + valueType + ">";
+      }
+      case STRUCT: {
+        throw new RuntimeException("Complex struct type not supported!");
+      }
+      default: {
+        throw new RuntimeException("Unknown type!");
+      }
+    }
+  }
+  
+  /**
+   * Convert TypeInfo to FieldSchema. 
+   */
+  public static FieldSchema getFieldSchemaFromTypeInfo(String fieldName, TypeInfo typeInfo) {
+    return new FieldSchema(
+        fieldName, getFieldSchemaTypeFromTypeInfo(typeInfo), "generated by TypeInfoUtils.getFieldSchemaFromTypeInfo"
+    );
+  }
+
+  /**
+   * The mapping from type name in DDL to the Java class. 
+   */
+  public static final Map<String, Class<?>> TypeNameToClass = new HashMap<String, Class<?>>();
+  static {
+    TypeNameToClass.put(Constants.BOOLEAN_TYPE_NAME, Boolean.class);
+    TypeNameToClass.put(Constants.TINYINT_TYPE_NAME, Byte.class);
+    TypeNameToClass.put(Constants.SMALLINT_TYPE_NAME, Short.class);
+    TypeNameToClass.put(Constants.INT_TYPE_NAME, Integer.class);
+    TypeNameToClass.put(Constants.BIGINT_TYPE_NAME, Long.class);
+    TypeNameToClass.put(Constants.FLOAT_TYPE_NAME, Float.class);
+    TypeNameToClass.put(Constants.DOUBLE_TYPE_NAME, Double.class);
+    TypeNameToClass.put(Constants.STRING_TYPE_NAME, String.class);
+    TypeNameToClass.put(Constants.DATE_TYPE_NAME, java.sql.Date.class);
+    // These types are not supported yet. 
+    // TypeNameToClass.put(Constants.DATETIME_TYPE_NAME);
+    // TypeNameToClass.put(Constants.TIMESTAMP_TYPE_NAME);
+  }
+  
+  /**
+   * Return the primitive type corresponding to the field schema
+   * @param field The field schema
+   * @return The TypeInfo object, or null if the field is not a primitive type.
+   */
+  public static TypeInfo getPrimitiveTypeInfoFromFieldSchema(FieldSchema field) {
+    String type = field.getType();
+    
+    Class<?> c = TypeNameToClass.get(type);
+    return c == null ? null : TypeInfoFactory.getPrimitiveTypeInfo(c);
+  }
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java Mon Nov 10 17:50:06 2008
@@ -37,9 +37,9 @@
     mCount = 0;
   }
 
-  public boolean aggregate(String o) {
-    if (o != null && !o.isEmpty()) {
-      mSum += Double.parseDouble(o);
+  public boolean aggregate(Double o) {
+    if (o != null) {
+      mSum += o;
       mCount ++;
     }
     return true;
@@ -60,9 +60,9 @@
     return true;
   }
 
-  public String evaluate() {
+  public Double evaluate() {
     // This is SQL standard - average of zero items should be null.
-    return mCount == 0 ? null : String.valueOf(mSum / mCount);
+    return mCount == 0 ? null : Double.valueOf(mSum / mCount);
   }
 
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java Mon Nov 10 17:50:06 2008
@@ -42,17 +42,17 @@
     return true;
   }
 
-  public String evaluatePartial() {
-    return Long.valueOf(mCount).toString();
+  public Long evaluatePartial() {
+    return Long.valueOf(mCount);
   }
 
-  public boolean aggregatePartial(String count) {
-    mCount += Long.parseLong(count);
+  public boolean aggregatePartial(Long count) {
+    mCount += count;
     return true;
   }
 
-  public String evaluate() {
-    return Long.valueOf(mCount).toString();
+  public Long evaluate() {
+    return Long.valueOf(mCount);
   }
 
   

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java Mon Nov 10 17:50:06 2008
@@ -37,28 +37,28 @@
     mEmpty = true;
   }
 
-  public boolean aggregate(String o) {
-    if (o != null && !o.isEmpty()) {
+  public boolean aggregate(Double o) {
+    if (o != null) {
       if (mEmpty) {
-        mMax = Double.parseDouble(o);
+        mMax = o;
         mEmpty = false;
       } else {
-        mMax = Math.max(mMax, Double.parseDouble(o));
+        mMax = Math.max(mMax, o);
       }
     }
     return true;
   }
   
-  public String evaluatePartial() {
-    return mEmpty ? null : String.valueOf(mMax);
+  public Double evaluatePartial() {
+    return mEmpty ? null : Double.valueOf(mMax);
   }
 
-  public boolean aggregatePartial(String o) {
+  public boolean aggregatePartial(Double o) {
     return aggregate(o);
   }
 
-  public String evaluate() {
-    return mEmpty ? null : String.valueOf(mMax);
+  public Double evaluate() {
+    return mEmpty ? null : Double.valueOf(mMax);
   }
 
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java Mon Nov 10 17:50:06 2008
@@ -37,28 +37,28 @@
     mEmpty = true;
   }
 
-  public boolean aggregate(String o) {
-    if (o != null && !o.isEmpty()) {
+  public boolean aggregate(Double o) {
+    if (o != null) {
       if (mEmpty) {
-        mMin = Double.parseDouble(o);
+        mMin = o;
         mEmpty = false;
       } else {
-        mMin = Math.min(mMin, Double.parseDouble(o));
+        mMin = Math.min(mMin, o);
       }
     }
     return true;
   }
   
-  public String evaluatePartial() {
-    return mEmpty ? null : String.valueOf(mMin);
+  public Double evaluatePartial() {
+    return mEmpty ? null : Double.valueOf(mMin);
   }
 
-  public boolean aggregatePartial(String o) {
+  public boolean aggregatePartial(Double o) {
     return aggregate(o);
   }
 
-  public String evaluate() {
-    return mEmpty ? null : String.valueOf(mMin);
+  public Double evaluate() {
+    return mEmpty ? null : Double.valueOf(mMin);
   }
 
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java Mon Nov 10 17:50:06 2008
@@ -37,30 +37,30 @@
     mEmpty = true;
   }
 
-  public boolean aggregate(String o) {
-    if (o != null && !o.isEmpty()) {
-      mSum += Double.parseDouble(o);
+  public boolean aggregate(Double o) {
+    if (o != null) {
+      mSum += o;
       mEmpty = false;
     }
     return true;
   }
   
-  public String evaluatePartial() {
+  public Double evaluatePartial() {
     // This is SQL standard - sum of zero items should be null.
-    return mEmpty ? null : new Double(mSum).toString();
+    return mEmpty ? null : new Double(mSum);
   }
 
-  public boolean aggregatePartial(String o) {
-    if (o != null && !o.isEmpty()) {
-      mSum += Double.parseDouble(o);
+  public boolean aggregatePartial(Double o) {
+    if (o != null) {
+      mSum += o;
       mEmpty = false;
     }
     return true;
   }
 
-  public String evaluate() {
+  public Double evaluate() {
     // This is SQL standard - sum of zero items should be null.
-    return mEmpty ? null : new Double(mSum).toString();
+    return mEmpty ? null : new Double(mSum);
   }
 
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java Mon Nov 10 17:50:06 2008
@@ -78,12 +78,4 @@
     }
   }
   
-  public String evaluate(java.sql.Date i) {
-    if (i == null) {
-      return null;
-    } else {
-      return i.toString();
-    }
-  }
-  
 }

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Mon Nov 10 17:50:06 2008
@@ -646,6 +646,10 @@
     // Do semantic analysis and plan generation
     Context ctx = new Context(conf);
     ctx.makeScratchDir();
+    while((ast.getToken() == null) && (ast.getChildCount() > 0)) {
+      ast = (CommonTree)ast.getChild(0);
+    }
+    
     sem.analyze(ast, ctx);
     ctx.removeScratchDir();
     return sem.getRootTasks();

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=712905&r1=712904&r2=712905&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Mon Nov 10 17:50:06 2008
@@ -198,7 +198,7 @@
   @SuppressWarnings("unchecked")
   private void populateMapRedPlan1(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(1));
-
+    
     // map-side work
     Operator<reduceSinkDesc> op1 = OperatorFactory.get
       (PlanUtils.getReduceSinkDesc
@@ -206,6 +206,8 @@
         Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
+    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
@@ -230,6 +232,8 @@
                            new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
+    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
@@ -261,6 +265,8 @@
         (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
+    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     Operator<reduceSinkDesc> op2 = OperatorFactory.get
       (PlanUtils.getReduceSinkDesc
@@ -270,10 +276,7 @@
         Integer.MAX_VALUE, -1, false));
 
     Utilities.addMapWork(mr, src2, "b", op2);
-
-    // just to satisfy the constraint that each tag must define a schema
-    mr.getAliasToSchema().put("a", new schemaDesc(""));
-    mr.getAliasToSchema().put("b", new schemaDesc(""));
+    mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
@@ -318,6 +321,8 @@
                                                         new exprNodeColumnDesc(String.class, "value"))), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
+    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
@@ -348,6 +353,8 @@
                                                         new exprNodeColumnDesc(String.class, "value"))), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
+    mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
@@ -384,6 +391,8 @@
                                                         new exprNodeColumnDesc(String.class, "value"))), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
+    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
+    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
     Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java Mon Nov 10 17:50:06 2008
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.Serializable;
+
+/**
+ * Simple test object
+ */
+public class JavaTestObjFlatFileInputFormat implements Serializable {
+  public String s;
+  public int num;
+  public JavaTestObjFlatFileInputFormat(String s, int num) {
+    this.s = s;
+    this.num = num;
+  }
+  public JavaTestObjFlatFileInputFormat() { 
+  }
+}
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java Mon Nov 10 17:50:06 2008
@@ -0,0 +1,212 @@
+// File generated by hadoop record compiler. Do not edit.
+package org.apache.hadoop.hive.ql.io;
+
+public class RecordTestObj extends org.apache.hadoop.record.Record {
+  private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo;
+  private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
+  private static int[] _rio_rtiFilterFields;
+  static {
+    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo("RecordTestObj");
+    _rio_recTypeInfo.addField("s", org.apache.hadoop.record.meta.TypeID.StringTypeID);
+    _rio_recTypeInfo.addField("num", org.apache.hadoop.record.meta.TypeID.LongTypeID);
+  }
+  
+  private String s;
+  private long num;
+  public RecordTestObj() { }
+  public RecordTestObj(
+    final String s,
+    final long num) {
+    this.s = s;
+    this.num = num;
+  }
+  public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
+    return _rio_recTypeInfo;
+  }
+  public static void setTypeFilter(org.apache.hadoop.record.meta.RecordTypeInfo rti) {
+    if (null == rti) return;
+    _rio_rtiFilter = rti;
+    _rio_rtiFilterFields = null;
+  }
+  private static void setupRtiFields()
+  {
+    if (null == _rio_rtiFilter) return;
+    // we may already have done this
+    if (null != _rio_rtiFilterFields) return;
+    int _rio_i, _rio_j;
+    _rio_rtiFilterFields = new int [_rio_rtiFilter.getFieldTypeInfos().size()];
+    for (_rio_i=0; _rio_i<_rio_rtiFilterFields.length; _rio_i++) {
+      _rio_rtiFilterFields[_rio_i] = 0;
+    }
+    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter.getFieldTypeInfos().iterator();
+    _rio_i=0;
+    while (_rio_itFilter.hasNext()) {
+      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter.next();
+      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo.getFieldTypeInfos().iterator();
+      _rio_j=1;
+      while (_rio_it.hasNext()) {
+        org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
+        if (_rio_tInfo.equals(_rio_tInfoFilter)) {
+          _rio_rtiFilterFields[_rio_i] = _rio_j;
+          break;
+        }
+        _rio_j++;
+      }
+      _rio_i++;
+    }
+  }
+  public String getS() {
+    return s;
+  }
+  public void setS(final String s) {
+    this.s=s;
+  }
+  public long getNum() {
+    return num;
+  }
+  public void setNum(final long num) {
+    this.num=num;
+  }
+  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a, final String _rio_tag)
+  throws java.io.IOException {
+    _rio_a.startRecord(this,_rio_tag);
+    _rio_a.writeString(s,"s");
+    _rio_a.writeLong(num,"num");
+    _rio_a.endRecord(this,_rio_tag);
+  }
+  private void deserializeWithoutFilter(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
+  throws java.io.IOException {
+    _rio_a.startRecord(_rio_tag);
+    s=_rio_a.readString("s");
+    num=_rio_a.readLong("num");
+    _rio_a.endRecord(_rio_tag);
+  }
+  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
+  throws java.io.IOException {
+    if (null == _rio_rtiFilter) {
+      deserializeWithoutFilter(_rio_a, _rio_tag);
+      return;
+    }
+    // if we're here, we need to read based on version info
+    _rio_a.startRecord(_rio_tag);
+    setupRtiFields();
+    for (int _rio_i=0; _rio_i<_rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
+      if (1 == _rio_rtiFilterFields[_rio_i]) {
+        s=_rio_a.readString("s");
+      }
+      else if (2 == _rio_rtiFilterFields[_rio_i]) {
+        num=_rio_a.readLong("num");
+      }
+      else {
+        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>)(_rio_rtiFilter.getFieldTypeInfos());
+        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i).getFieldID(), typeInfos.get(_rio_i).getTypeID());
+      }
+    }
+    _rio_a.endRecord(_rio_tag);
+  }
+  public int compareTo (final Object _rio_peer_) throws ClassCastException {
+    if (!(_rio_peer_ instanceof RecordTestObj)) {
+      throw new ClassCastException("Comparing different types of records.");
+    }
+    RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_;
+    int _rio_ret = 0;
+    _rio_ret = s.compareTo(_rio_peer.s);
+    if (_rio_ret != 0) return _rio_ret;
+    _rio_ret = (num == _rio_peer.num)? 0 :((num<_rio_peer.num)?-1:1);
+    if (_rio_ret != 0) return _rio_ret;
+    return _rio_ret;
+  }
+  public boolean equals(final Object _rio_peer_) {
+    if (!(_rio_peer_ instanceof RecordTestObj)) {
+      return false;
+    }
+    if (_rio_peer_ == this) {
+      return true;
+    }
+    RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_;
+    boolean _rio_ret = false;
+    _rio_ret = s.equals(_rio_peer.s);
+    if (!_rio_ret) return _rio_ret;
+    _rio_ret = (num==_rio_peer.num);
+    if (!_rio_ret) return _rio_ret;
+    return _rio_ret;
+  }
+  public Object clone() throws CloneNotSupportedException {
+    RecordTestObj _rio_other = new RecordTestObj();
+    _rio_other.s = this.s;
+    _rio_other.num = this.num;
+    return _rio_other;
+  }
+  public int hashCode() {
+    int _rio_result = 17;
+    int _rio_ret;
+    _rio_ret = s.hashCode();
+    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_ret = (int) (num^(num>>>32));
+    _rio_result = 37*_rio_result + _rio_ret;
+    return _rio_result;
+  }
+  public static String signature() {
+    return "LRecordTestObj(sl)";
+  }
+  public static class Comparator extends org.apache.hadoop.record.RecordComparator {
+    public Comparator() {
+      super(RecordTestObj.class);
+    }
+    static public int slurpRaw(byte[] b, int s, int l) {
+      try {
+        int os = s;
+        {
+          int i = org.apache.hadoop.record.Utils.readVInt(b, s);
+          int z = org.apache.hadoop.record.Utils.getVIntSize(i);
+          s+=(z+i); l-= (z+i);
+        }
+        {
+          long i = org.apache.hadoop.record.Utils.readVLong(b, s);
+          int z = org.apache.hadoop.record.Utils.getVIntSize(i);
+          s+=z; l-=z;
+        }
+        return (os - s);
+      } catch(java.io.IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+    static public int compareRaw(byte[] b1, int s1, int l1,
+                                   byte[] b2, int s2, int l2) {
+      try {
+        int os1 = s1;
+        {
+          int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
+          int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
+          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
+          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
+          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
+          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
+          if (r1 != 0) { return (r1<0)?-1:0; }
+          s1+=i1; s2+=i2; l1-=i1; l1-=i2;
+        }
+        {
+          long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
+          long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
+          if (i1 != i2) {
+            return ((i1-i2) < 0) ? -1 : 0;
+          }
+          int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
+          int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
+          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
+        }
+        return (os1 - s1);
+      } catch(java.io.IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+    public int compare(byte[] b1, int s1, int l1,
+                         byte[] b2, int s2, int l2) {
+      int ret = compareRaw(b1,s1,l1,b2,s2,l2);
+      return (ret == -1)? -1 : ((ret==0)? 1 : 0);}
+  }
+  
+  static {
+    org.apache.hadoop.record.RecordComparator.define(RecordTestObj.class, new Comparator());
+  }
+}

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java Mon Nov 10 17:50:06 2008
@@ -0,0 +1,281 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.record.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.io.serializer.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import com.facebook.thrift.*;
+import com.facebook.thrift.transport.*;
+import com.facebook.thrift.protocol.*;
+
+//import org.apache.hadoop.contrib.serialization.thrift.*;
+
+public class TestFlatFileInputFormat extends TestCase  {
+
+  public void testFlatFileInputJava() throws Exception {
+    Configuration conf;
+    JobConf job ;
+    FileSystem fs;
+    Path dir ;
+    Path file;
+    Reporter reporter;
+    FSDataOutputStream ds;
+
+    try {
+      //
+      // create job and filesystem and reporter and such.
+      //
+      conf = new Configuration();
+      job = new JobConf(conf);
+      fs = FileSystem.getLocal(conf);
+      dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
+      file = new Path(dir, "test.txt");
+      reporter = Reporter.NULL;
+      fs.delete(dir, true);
+
+      job.setClass(FlatFileInputFormat.SerializationImplKey,
+                   org.apache.hadoop.io.serializer.JavaSerialization.class,
+                   org.apache.hadoop.io.serializer.Serialization.class);
+      
+      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+                   JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class);
+      
+      //
+      // Write some data out to a flat file
+      //
+      FileInputFormat.setInputPaths(job, dir);
+      ds = fs.create(file);
+      Serializer serializer = new JavaSerialization().getSerializer(null);
+
+      // construct some data and write it
+      serializer.open(ds);
+      for (int i = 0; i < 10; i++) {
+        serializer.serialize(new JavaTestObjFlatFileInputFormat("Hello World! " + String.valueOf(i), i));
+      }
+      serializer.close();
+
+      //
+      // Construct the reader
+      //
+      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Serializable>> format =
+        new FlatFileInputFormat<Serializable>();
+      InputSplit[] splits = format.getSplits(job, 1);
+
+      // construct the record reader
+      RecordReader<Void, FlatFileInputFormat.RowContainer<Serializable>> reader =
+        format.getRecordReader(splits[0], job, reporter);
+
+      // create key/value
+      Void key = reader.createKey();
+      FlatFileInputFormat.RowContainer<Serializable> value = reader.createValue();
+      
+      //
+      // read back the data using the FlatFileRecordReader
+      //
+      int count = 0;
+      while (reader.next(key, value)) {
+        assertTrue(key == null);
+        assertTrue(((JavaTestObjFlatFileInputFormat)value.row).s.equals("Hello World! " +String.valueOf(count)));
+        assertTrue(((JavaTestObjFlatFileInputFormat)value.row).num == count);
+        count++;
+      }
+      reader.close();
+
+    } catch(Exception e) {
+      System.err.println("caught: " + e);
+      e.printStackTrace();
+    } finally {
+    }
+
+  }
+
+  public void testFlatFileInputRecord() throws Exception {
+    Configuration conf;
+    JobConf job ;
+    FileSystem fs;
+    Path dir ;
+    Path file;
+    Reporter reporter;
+    FSDataOutputStream ds;
+
+    try {
+      //
+      // create job and filesystem and reporter and such.
+      //
+      conf = new Configuration();
+      job = new JobConf(conf);
+      fs = FileSystem.getLocal(conf);
+      dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
+      file = new Path(dir, "test.txt");
+      reporter = Reporter.NULL;
+      fs.delete(dir, true);
+
+      job.setClass(FlatFileInputFormat.SerializationImplKey,
+                   org.apache.hadoop.io.serializer.WritableSerialization.class,
+                   org.apache.hadoop.io.serializer.Serialization.class);
+      
+      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+                   RecordTestObj.class, Writable.class);
+      
+      //
+      // Write some data out to a flat file
+      //
+      FileInputFormat.setInputPaths(job, dir);
+      ds = fs.create(file);
+      Serializer serializer = new WritableSerialization().getSerializer(Writable.class);
+
+      // construct some data and write it
+      serializer.open(ds);
+      for (int i = 0; i < 10; i++) {
+        serializer.serialize(new RecordTestObj("Hello World! " + String.valueOf(i), i));
+      }
+      serializer.close();
+
+      //
+      // Construct the reader
+      //
+      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format =
+        new FlatFileInputFormat<Writable>();
+      InputSplit[] splits = format.getSplits(job, 1);
+
+      // construct the record reader
+      RecordReader<Void, FlatFileInputFormat.RowContainer<Writable>> reader =
+        format.getRecordReader(splits[0], job, reporter);
+
+      // create key/value
+      Void key = reader.createKey();
+      FlatFileInputFormat.RowContainer<Writable> value = reader.createValue();
+      
+      //
+      // read back the data using the FlatFileRecordReader
+      //
+      int count = 0;
+      while (reader.next(key, value)) {
+        assertTrue(key == null);
+        assertTrue(((RecordTestObj)value.row).getS().equals("Hello World! " +String.valueOf(count)));
+        assertTrue(((RecordTestObj)value.row).getNum() == count);
+        count++;
+      }
+      reader.close();
+
+    } catch(Exception e) {
+      System.err.println("caught: " + e);
+      e.printStackTrace();
+    } finally {
+    }
+
+  }
+  /*
+  public void testFlatFileInputThrift() throws Exception {
+    Configuration conf;
+    JobConf job ;
+    FileSystem fs;
+    Path dir ;
+    Path file;
+    Reporter reporter;
+    FSDataOutputStream ds;
+
+    try {
+      //
+      // create job and filesystem and reporter and such.
+      //
+      conf = new Configuration();
+      job = new JobConf(conf);
+      fs = FileSystem.getLocal(conf);
+      dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
+      file = new Path(dir, "test.txt");
+      reporter = Reporter.NULL;
+      fs.delete(dir, true);
+
+      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationImplKey,
+                   org.apache.hadoop.contrib.serialization.thrift.ThriftSerialization.class,
+                   org.apache.hadoop.io.serializer.Serialization.class);
+      
+      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+                   FlatFileThriftTestObj.class, TBase.class);
+      
+      //
+      // Write some data out to a flat file
+      //
+      FileInputFormat.setInputPaths(job, dir);
+      ds = fs.create(file);
+      Serializer serializer = new ThriftSerialization().getSerializer(TBase.class);
+
+      // construct some data and write it
+      serializer.open(ds);
+      for (int i = 0; i < 10; i++) {
+        serializer.serialize(new FlatFileThriftTestObj("Hello World! " + String.valueOf(i), i));
+      }
+      serializer.close();
+
+      //
+      // Construct the reader
+      //
+      FileInputFormat<Void, FlatFileInputFormat.RowContainer<TBase>> format =
+        new FlatFileInputFormat<TBase>();
+      InputSplit[] splits = format.getSplits(job, 1);
+
+      // construct the record reader
+      RecordReader<Void, FlatFileInputFormat.RowContainer<TBase>> reader =
+        format.getRecordReader(splits[0], job, reporter);
+
+      // create key/value
+      Void key = reader.createKey();
+      FlatFileInputFormat.RowContainer<TBase> value = reader.createValue();
+      
+      //
+      // read back the data using the FlatFileRecordReader
+      //
+      int count = 0;
+      while (reader.next(key, value)) {
+        assertTrue(key == null);
+        assertTrue(((FlatFileThriftTestObj)value.row).s.equals("Hello World! " +String.valueOf(count)));
+        assertTrue(((FlatFileThriftTestObj)value.row).num == count);
+        count++;
+      }
+      reader.close();
+
+    } catch(Exception e) {
+      System.err.println("caught: " + e);
+      e.printStackTrace();
+    } finally {
+    }
+
+  }
+  */
+
+
+  public static void main(String[] args) throws Exception {
+    new TestFlatFileInputFormat().testFlatFileInputJava();
+    new TestFlatFileInputFormat().testFlatFileInputRecord();
+    //    new TestFlatFileInputFormat().testFlatFileInputThrift();
+  }
+}

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/bad_sample_clause.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,6 @@
+CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE;
+
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 2) s
+WHERE s.ds='2008-04-08' and s.hr='11';
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input1.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input1.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input1.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+SELECT a.* FROM src; 

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input2.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input2.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+SELECT a.key FROM src; 

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/input_testxpath4.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,6 @@
+EXPLAIN
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], lintstring.myint;
+
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], lintstring.myint;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_create_tbl1.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,7 @@
+DROP TABLE inv_valid_tbl1;
+CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table' 
+    PARTITIONED BY(aint DATETIME, country STRING) 
+    CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
+    ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.ThriftDeserializer' WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex', 'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
+    STORED AS SEQUENCEFILE;
+DESCRIBE EXTENDED inv_valid_tbl1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/invalid_tbl_name.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1 @@
+create table invalid-name(a int, b string);

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/joinneg.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,6 @@
+EXPLAIN FROM 
+(SELECT src.* FROM src) x
+JOIN 
+(SELECT src.* FROM src) Y
+ON (x.key = b.key)
+SELECT Y.*;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/load_wrong_fileformat.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,6 @@
+-- test for loading into tables with the correct file format
+-- test for loading into partitions with the correct file format
+
+DROP TABLE T1;
+CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE;
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias3.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,4 @@
+CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE;
+
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, sum(src.value) WHERE src.key < 100 group by key;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/notable_alias4.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,4 @@
+EXPLAIN
+SELECT key from src JOIN src1 on src1.key=src.key;
+
+SELECT key from src JOIN src1 on src1.key=src.key;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q?rev=712905&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/alter1.q Mon Nov 10 17:50:06 2008
@@ -0,0 +1,20 @@
+drop table alter1;
+create table alter1(a int, b int);
+describe extended alter1;
+alter table alter1 set tblproperties ('a'='1', 'c'='3');
+describe extended alter1;
+alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3');
+describe extended alter1;
+
+alter table alter1 set serdeproperties('s1'='9');
+describe extended alter1;
+alter table alter1 set serdeproperties('s1'='10', 's2' ='20');
+describe extended alter1;
+
+alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9');
+describe extended alter1;
+
+alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe';
+describe extended alter1;
+
+drop table alter1;



Mime
View raw message