hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r901644 [23/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...
Date Thu, 21 Jan 2010 10:38:15 GMT
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java Thu Jan 21 10:37:58 2010
@@ -16,93 +16,106 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
-
-public class exprNodeFieldDesc extends exprNodeDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-  exprNodeDesc desc;
+public class exprNodeFieldDesc extends exprNodeDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+  exprNodeDesc desc;
   String fieldName;
-  
-  // Used to support a.b where a is a list of struct that contains a field called b.
-  // a.b will return an array that contains field b of all elements of array a. 
+
+  // Used to support a.b where a is a list of struct that contains a field
+  // called b.
+  // a.b will return an array that contains field b of all elements of array a.
   Boolean isList;
-  
-  public exprNodeFieldDesc() {}
-  public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc, String fieldName, Boolean isList) {
-    super(typeInfo);
-    this.desc = desc;
+
+  public exprNodeFieldDesc() {
+  }
+
+  public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc,
+      String fieldName, Boolean isList) {
+    super(typeInfo);
+    this.desc = desc;
     this.fieldName = fieldName;
-    this.isList = isList;
-  }
-  
+    this.isList = isList;
+  }
+
   @Override
   public List<exprNodeDesc> getChildren() {
     List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
     children.add(desc);
     return children;
   }
-  
-  public exprNodeDesc getDesc() {
-    return this.desc;
-  }
-  public void setDesc(exprNodeDesc desc) {
-    this.desc = desc;
-  }
-  public String getFieldName() {
-    return this.fieldName;
-  }
-  public void setFieldName(String fieldName) {
-    this.fieldName = fieldName;
+
+  public exprNodeDesc getDesc() {
+    return desc;
   }
+
+  public void setDesc(exprNodeDesc desc) {
+    this.desc = desc;
+  }
+
+  public String getFieldName() {
+    return fieldName;
+  }
+
+  public void setFieldName(String fieldName) {
+    this.fieldName = fieldName;
+  }
+
   public Boolean getIsList() {
     return isList;
   }
+
   public void setIsList(Boolean isList) {
     this.isList = isList;
   }
-  
-  @Override
-  public String toString() {
-    return this.desc.toString() + "." + this.fieldName;
-  }
-  
-  @explain(displayName="expr")
+
+  @Override
+  public String toString() {
+    return desc.toString() + "." + fieldName;
+  }
+
+  @explain(displayName = "expr")
   @Override
   public String getExprString() {
-    return this.desc.getExprString() + "." + this.fieldName;
+    return desc.getExprString() + "." + fieldName;
   }
 
+  @Override
   public List<String> getCols() {
     List<String> colList = new ArrayList<String>();
-    if (desc != null) 
-    	colList = Utilities.mergeUniqElems(colList, desc.getCols());    
+    if (desc != null) {
+      colList = Utilities.mergeUniqElems(colList, desc.getCols());
+    }
     return colList;
   }
+
   @Override
   public exprNodeDesc clone() {
-    return new exprNodeFieldDesc(this.typeInfo, this.desc, this.fieldName, this.isList);
+    return new exprNodeFieldDesc(typeInfo, desc, fieldName, isList);
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeFieldDesc))
+    if (!(o instanceof exprNodeFieldDesc)) {
       return false;
-    exprNodeFieldDesc dest = (exprNodeFieldDesc)o;
-    if (!typeInfo.equals(dest.getTypeInfo()))
+    }
+    exprNodeFieldDesc dest = (exprNodeFieldDesc) o;
+    if (!typeInfo.equals(dest.getTypeInfo())) {
       return false;
-    if (!fieldName.equals(dest.getFieldName()) ||
-        !isList.equals(dest.getIsList()) ||
-        !desc.isSame(dest.getDesc()))
+    }
+    if (!fieldName.equals(dest.getFieldName())
+        || !isList.equals(dest.getIsList()) || !desc.isSame(dest.getDesc())) {
       return false;
-      
-    return true; 
+    }
+
+    return true;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java Thu Jan 21 10:37:58 2010
@@ -22,84 +22,94 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 /**
  * Describes a GenericFunc node.
  */
-public class exprNodeGenericFuncDesc extends exprNodeDesc implements Serializable {
+public class exprNodeGenericFuncDesc extends exprNodeDesc implements
+    Serializable {
 
   private static final long serialVersionUID = 1L;
-  
+
   /**
    * In case genericUDF is Serializable, we will serialize the object.
    * 
    * In case genericUDF does not implement Serializable, Java will remember the
-   * class of genericUDF and creates a new instance when deserialized.  This is
+   * class of genericUDF and creates a new instance when deserialized. This is
    * exactly what we want.
    */
   private GenericUDF genericUDF;
-  private List<exprNodeDesc> childExprs; 
-  
-  public exprNodeGenericFuncDesc() {}
-  public exprNodeGenericFuncDesc(TypeInfo typeInfo, GenericUDF genericUDF, 
-                          List<exprNodeDesc> children) {
+  private List<exprNodeDesc> childExprs;
+
+  public exprNodeGenericFuncDesc() {
+  }
+
+  public exprNodeGenericFuncDesc(TypeInfo typeInfo, GenericUDF genericUDF,
+      List<exprNodeDesc> children) {
     super(typeInfo);
-    assert(genericUDF != null);
+    assert (genericUDF != null);
     this.genericUDF = genericUDF;
-    this.childExprs = children;
+    childExprs = children;
   }
-  
+
   public GenericUDF getGenericUDF() {
     return genericUDF;
   }
-  
+
   public void setGenericUDF(GenericUDF genericUDF) {
     this.genericUDF = genericUDF;
   }
-  
+
   public List<exprNodeDesc> getChildExprs() {
-    return this.childExprs;
+    return childExprs;
   }
+
   public void setChildExprs(List<exprNodeDesc> children) {
-    this.childExprs = children;
+    childExprs = children;
   }
+
   @Override
   public List<exprNodeDesc> getChildren() {
     return childExprs;
   }
+
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
     sb.append(genericUDF.getClass().toString());
     sb.append("(");
-    for(int i=0; i<childExprs.size(); i++) {
-      if (i>0) sb.append(", ");
+    for (int i = 0; i < childExprs.size(); i++) {
+      if (i > 0) {
+        sb.append(", ");
+      }
       sb.append(childExprs.get(i).toString());
     }
     sb.append("(");
     sb.append(")");
     return sb.toString();
   }
-  
-  @explain(displayName="expr")
+
+  @explain(displayName = "expr")
   @Override
   public String getExprString() {
     // Get the children expr strings
     String[] childrenExprStrings = new String[childExprs.size()];
-    for (int i=0; i<childrenExprStrings.length; i++) {
+    for (int i = 0; i < childrenExprStrings.length; i++) {
       childrenExprStrings[i] = childExprs.get(i).getExprString();
     }
-    
+
     return genericUDF.getDisplayString(childrenExprStrings);
   }
 
+  @Override
   public List<String> getCols() {
     List<String> colList = new ArrayList<String>();
     if (childExprs != null) {
@@ -113,63 +123,70 @@
 
     return colList;
   }
-  
+
   @Override
   public exprNodeDesc clone() {
     List<exprNodeDesc> cloneCh = new ArrayList<exprNodeDesc>(childExprs.size());
-    for(exprNodeDesc ch :  childExprs) {
+    for (exprNodeDesc ch : childExprs) {
       cloneCh.add(ch.clone());
     }
-    exprNodeGenericFuncDesc clone = new exprNodeGenericFuncDesc(this.typeInfo,
+    exprNodeGenericFuncDesc clone = new exprNodeGenericFuncDesc(typeInfo,
         FunctionRegistry.cloneGenericUDF(genericUDF), cloneCh);
     return clone;
   }
-  
+
   /**
-   * Create a exprNodeGenericFuncDesc based on the genericUDFClass and the children
-   * parameters.
+   * Create a exprNodeGenericFuncDesc based on the genericUDFClass and the
+   * children parameters.
+   * 
    * @throws UDFArgumentException
    */
-  public static exprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, 
+  public static exprNodeGenericFuncDesc newInstance(GenericUDF genericUDF,
       List<exprNodeDesc> children) throws UDFArgumentException {
     ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
-    for(int i=0; i<childrenOIs.length; i++) {
-      childrenOIs[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
-          children.get(i).getTypeInfo());
+    for (int i = 0; i < childrenOIs.length; i++) {
+      childrenOIs[i] = TypeInfoUtils
+          .getStandardWritableObjectInspectorFromTypeInfo(children.get(i)
+              .getTypeInfo());
     }
-    
+
     ObjectInspector oi = genericUDF.initialize(childrenOIs);
-    return new exprNodeGenericFuncDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(oi),
-        genericUDF, children);
+    return new exprNodeGenericFuncDesc(TypeInfoUtils
+        .getTypeInfoFromObjectInspector(oi), genericUDF, children);
   }
-  
+
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeGenericFuncDesc))
+    if (!(o instanceof exprNodeGenericFuncDesc)) {
       return false;
-    exprNodeGenericFuncDesc dest = (exprNodeGenericFuncDesc)o;
-    if (!typeInfo.equals(dest.getTypeInfo()) ||
-        !genericUDF.getClass().equals(dest.getGenericUDF().getClass()))
+    }
+    exprNodeGenericFuncDesc dest = (exprNodeGenericFuncDesc) o;
+    if (!typeInfo.equals(dest.getTypeInfo())
+        || !genericUDF.getClass().equals(dest.getGenericUDF().getClass())) {
       return false;
-    
+    }
+
     if (genericUDF instanceof GenericUDFBridge) {
       GenericUDFBridge bridge = (GenericUDFBridge) genericUDF;
       GenericUDFBridge bridge2 = (GenericUDFBridge) dest.getGenericUDF();
       if (!bridge.getUdfClass().equals(bridge2.getUdfClass())
           || !bridge.getUdfName().equals(bridge2.getUdfName())
-          || bridge.isOperator() != bridge2.isOperator())
+          || bridge.isOperator() != bridge2.isOperator()) {
         return false;
+      }
     }
-    
-    if (childExprs.size() != dest.getChildExprs().size())
+
+    if (childExprs.size() != dest.getChildExprs().size()) {
       return false;
-    
+    }
+
     for (int pos = 0; pos < childExprs.size(); pos++) {
-      if (!childExprs.get(pos).isSame(dest.getChildExprs().get(pos)))
+      if (!childExprs.get(pos).isSame(dest.getChildExprs().get(pos))) {
         return false;
+      }
     }
-    
-    return true; 
+
+    return true;
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java Thu Jan 21 10:37:58 2010
@@ -24,18 +24,19 @@
 import org.apache.hadoop.io.NullWritable;
 
 public class exprNodeNullDesc extends exprNodeDesc implements Serializable {
-  
+
   private static final long serialVersionUID = 1L;
 
   public exprNodeNullDesc() {
-    super(TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class));
+    super(TypeInfoFactory
+        .getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class));
   }
 
   public Object getValue() {
     return null;
   }
 
-  @explain(displayName="expr")
+  @explain(displayName = "expr")
   @Override
   public String getExprString() {
     return "null";
@@ -45,14 +46,16 @@
   public exprNodeDesc clone() {
     return new exprNodeNullDesc();
   }
-  
+
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeNullDesc))
+    if (!(o instanceof exprNodeNullDesc)) {
       return false;
-    if (!typeInfo.equals(((exprNodeNullDesc)o).getTypeInfo()))
+    }
+    if (!typeInfo.equals(((exprNodeNullDesc) o).getTypeInfo())) {
       return false;
-    
-    return true; 
+    }
+
+    return true;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java Thu Jan 21 10:37:58 2010
@@ -20,18 +20,23 @@
 
 import java.io.Serializable;
 
-@explain(displayName="Extract")
+@explain(displayName = "Extract")
 public class extractDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private exprNodeDesc col;
-  public extractDesc() { }
+
+  public extractDesc() {
+  }
+
   public extractDesc(final exprNodeDesc col) {
     this.col = col;
   }
+
   public exprNodeDesc getCol() {
-    return this.col;
+    return col;
   }
+
   public void setCol(final exprNodeDesc col) {
-    this.col=col;
+    this.col = col;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java Thu Jan 21 10:37:58 2010
@@ -23,16 +23,15 @@
 import java.util.List;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
 
-@explain(displayName="Fetch Operator")
+@explain(displayName = "Fetch Operator")
 public class fetchWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String    tblDir;
+  private String tblDir;
   private tableDesc tblDesc;
 
-  private List<String>        partDir;
+  private List<String> partDir;
   private List<partitionDesc> partDesc;
 
   private int limit;
@@ -42,27 +41,28 @@
    */
   private String serializationNullFormat = "NULL";
 
-  public fetchWork() { }
+  public fetchWork() {
+  }
 
   public fetchWork(String tblDir, tableDesc tblDesc) {
     this(tblDir, tblDesc, -1);
   }
 
-	public fetchWork(String tblDir, tableDesc tblDesc, int limit) {
-		this.tblDir = tblDir;
-		this.tblDesc = tblDesc;
-		this.limit = limit;
-	}
-
-	public fetchWork(List<String> partDir, List<partitionDesc> partDesc) {
-	  this(partDir, partDesc, -1);
-	}
-	 
-	public fetchWork(List<String> partDir, List<partitionDesc> partDesc, int limit) {
-		this.partDir = partDir;
-		this.partDesc = partDesc;
-		this.limit = limit;
-	}
+  public fetchWork(String tblDir, tableDesc tblDesc, int limit) {
+    this.tblDir = tblDir;
+    this.tblDesc = tblDesc;
+    this.limit = limit;
+  }
+
+  public fetchWork(List<String> partDir, List<partitionDesc> partDesc) {
+    this(partDir, partDesc, -1);
+  }
+
+  public fetchWork(List<String> partDir, List<partitionDesc> partDesc, int limit) {
+    this.partDir = partDir;
+    this.partDesc = partDesc;
+    this.limit = limit;
+  }
 
   public String getSerializationNullFormat() {
     return serializationNullFormat;
@@ -71,123 +71,135 @@
   public void setSerializationNullFormat(String format) {
     serializationNullFormat = format;
   }
-	
-	/**
-	 * @return the tblDir
-	 */
-	public String getTblDir() {
-		return tblDir;
-	}
 
-	 /**
+  /**
+   * @return the tblDir
+   */
+  public String getTblDir() {
+    return tblDir;
+  }
+
+  /**
    * @return the tblDir
    */
   public Path getTblDirPath() {
     return new Path(tblDir);
   }
 
-	/**
-	 * @param tblDir the tblDir to set
-	 */
-	public void setTblDir(String tblDir) {
-		this.tblDir = tblDir;
-	}
-
-	/**
-	 * @return the tblDesc
-	 */
-	public tableDesc getTblDesc() {
-		return tblDesc;
-	}
-
-	/**
-	 * @param tblDesc the tblDesc to set
-	 */
-	public void setTblDesc(tableDesc tblDesc) {
-		this.tblDesc = tblDesc;
-	}
-
-	/**
-	 * @return the partDir
-	 */
-	public List<String> getPartDir() {
-		return partDir;
-	}
-
-
-	public List<Path> getPartDirPath() {
-	  return fetchWork.convertStringToPathArray(partDir);
-	}
-	
-	public static List<String> convertPathToStringArray(List<Path> paths) {
-	   if (paths == null)
-	      return null;
-	    
-	   List<String> pathsStr = new ArrayList<String>();
-	   for (Path path : paths)
-	     pathsStr.add(path.toString());
-	    
-	   return pathsStr;
-	}
-	
-	 public static List<Path> convertStringToPathArray(List<String> paths) {
-     if (paths == null)
-        return null;
-      
-     List<Path> pathsStr = new ArrayList<Path>();
-     for (String path : paths)
-       pathsStr.add(new Path(path));
-      
-     return pathsStr;
-  }
-
-	/**
-	 * @param partDir the partDir to set
-	 */
-	public void setPartDir(List<String> partDir) {
-		this.partDir = partDir;
-	}
-
-	/**
-	 * @return the partDesc
-	 */
-	public List<partitionDesc> getPartDesc() {
-		return partDesc;
-	}
-
-	/**
-	 * @param partDesc the partDesc to set
-	 */
-	public void setPartDesc(List<partitionDesc> partDesc) {
-		this.partDesc = partDesc;
-	}
-
-	/**
-	 * @return the limit
-	 */
-  @explain(displayName="limit")
-	public int getLimit() {
-		return limit;
-	}
-
-	/**
-	 * @param limit the limit to set
-	 */
-	public void setLimit(int limit) {
-		this.limit = limit;
-	}
-	
-	public String toString() {
-    if (tblDir != null)
-	    return new String ("table = " + tblDir);
-	  
-	  if (partDir == null) 
-	    return "null fetchwork";
-	  	  
-	  String ret = new String("partition = ");
-    for (String part : partDir)
-     ret = ret.concat(part);
-	  
+  /**
+   * @param tblDir
+   *          the tblDir to set
+   */
+  public void setTblDir(String tblDir) {
+    this.tblDir = tblDir;
+  }
+
+  /**
+   * @return the tblDesc
+   */
+  public tableDesc getTblDesc() {
+    return tblDesc;
+  }
+
+  /**
+   * @param tblDesc
+   *          the tblDesc to set
+   */
+  public void setTblDesc(tableDesc tblDesc) {
+    this.tblDesc = tblDesc;
+  }
+
+  /**
+   * @return the partDir
+   */
+  public List<String> getPartDir() {
+    return partDir;
+  }
+
+  public List<Path> getPartDirPath() {
+    return fetchWork.convertStringToPathArray(partDir);
+  }
+
+  public static List<String> convertPathToStringArray(List<Path> paths) {
+    if (paths == null) {
+      return null;
+    }
+
+    List<String> pathsStr = new ArrayList<String>();
+    for (Path path : paths) {
+      pathsStr.add(path.toString());
+    }
+
+    return pathsStr;
+  }
+
+  public static List<Path> convertStringToPathArray(List<String> paths) {
+    if (paths == null) {
+      return null;
+    }
+
+    List<Path> pathsStr = new ArrayList<Path>();
+    for (String path : paths) {
+      pathsStr.add(new Path(path));
+    }
+
+    return pathsStr;
+  }
+
+  /**
+   * @param partDir
+   *          the partDir to set
+   */
+  public void setPartDir(List<String> partDir) {
+    this.partDir = partDir;
+  }
+
+  /**
+   * @return the partDesc
+   */
+  public List<partitionDesc> getPartDesc() {
+    return partDesc;
+  }
+
+  /**
+   * @param partDesc
+   *          the partDesc to set
+   */
+  public void setPartDesc(List<partitionDesc> partDesc) {
+    this.partDesc = partDesc;
+  }
+
+  /**
+   * @return the limit
+   */
+  @explain(displayName = "limit")
+  public int getLimit() {
+    return limit;
+  }
+
+  /**
+   * @param limit
+   *          the limit to set
+   */
+  public void setLimit(int limit) {
+    this.limit = limit;
+  }
+
+  @Override
+  public String toString() {
+    if (tblDir != null) {
+      return new String("table = " + tblDir);
+    }
+
+    if (partDir == null) {
+      return "null fetchwork";
+    }
+
+    String ret = new String("partition = ");
+    for (String part : partDir) {
+      ret = ret.concat(part);
+    }
+
     return ret;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java Thu Jan 21 10:37:58 2010
@@ -20,7 +20,7 @@
 
 import java.io.Serializable;
 
-@explain(displayName="File Output Operator")
+@explain(displayName = "File Output Operator")
 public class fileSinkDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String dirName;
@@ -30,47 +30,46 @@
   private String compressCodec;
   private String compressType;
 
+  public fileSinkDesc() {
+  }
 
-  public fileSinkDesc() { }
-  public fileSinkDesc(
-    final String dirName,
-    final tableDesc tableInfo,
-    final boolean compressed, int destTableId) {
+  public fileSinkDesc(final String dirName, final tableDesc tableInfo,
+      final boolean compressed, int destTableId) {
 
     this.dirName = dirName;
     this.tableInfo = tableInfo;
     this.compressed = compressed;
     this.destTableId = destTableId;
   }
-  
-  public fileSinkDesc(
-      final String dirName,
-      final tableDesc tableInfo,
+
+  public fileSinkDesc(final String dirName, final tableDesc tableInfo,
       final boolean compressed) {
 
-      this.dirName = dirName;
-      this.tableInfo = tableInfo;
-      this.compressed = compressed;
-      this.destTableId = 0;
-    }
-  
-  @explain(displayName="directory", normalExplain=false)
+    this.dirName = dirName;
+    this.tableInfo = tableInfo;
+    this.compressed = compressed;
+    destTableId = 0;
+  }
+
+  @explain(displayName = "directory", normalExplain = false)
   public String getDirName() {
-    return this.dirName;
+    return dirName;
   }
+
   public void setDirName(final String dirName) {
     this.dirName = dirName;
   }
-  
-  @explain(displayName="table")
+
+  @explain(displayName = "table")
   public tableDesc getTableInfo() {
-    return this.tableInfo;
+    return tableInfo;
   }
+
   public void setTableInfo(final tableDesc tableInfo) {
     this.tableInfo = tableInfo;
   }
 
-  @explain(displayName="compressed")
+  @explain(displayName = "compressed")
   public boolean getCompressed() {
     return compressed;
   }
@@ -78,25 +77,29 @@
   public void setCompressed(boolean compressed) {
     this.compressed = compressed;
   }
-  
-  @explain(displayName="GlobalTableId")
+
+  @explain(displayName = "GlobalTableId")
   public int getDestTableId() {
     return destTableId;
   }
-  
+
   public void setDestTableId(int destTableId) {
     this.destTableId = destTableId;
   }
+
   public String getCompressCodec() {
     return compressCodec;
   }
+
   public void setCompressCodec(String intermediateCompressorCodec) {
-    this.compressCodec = intermediateCompressorCodec;
+    compressCodec = intermediateCompressorCodec;
   }
+
   public String getCompressType() {
     return compressType;
   }
+
   public void setCompressType(String intermediateCompressType) {
-    this.compressType = intermediateCompressType;
+    compressType = intermediateCompressType;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java Thu Jan 21 10:37:58 2010
@@ -21,7 +21,7 @@
 import java.io.Serializable;
 import java.util.List;
 
-@explain(displayName="Filter Operator")
+@explain(displayName = "Filter Operator")
 public class filterDesc implements Serializable {
 
   /**
@@ -34,28 +34,25 @@
     // The denominator of the TABLESAMPLE clause
     private int denominator;
 
-    // buckets columns for the table
-    private List<String> tabBucketCols;
-
     // Input files can be pruned
     private boolean inputPruning;
 
     public sampleDesc() {
     }
 
-    public sampleDesc(int numerator, int denominator, List<String> tabBucketCols, boolean inputPruning) {
+    public sampleDesc(int numerator, int denominator,
+        List<String> tabBucketCols, boolean inputPruning) {
       this.numerator = numerator;
       this.denominator = denominator;
-      this.tabBucketCols = tabBucketCols;
       this.inputPruning = inputPruning;
     }
 
     public int getNumerator() {
-      return this.numerator;
+      return numerator;
     }
 
     public int getDenominator() {
-      return this.denominator;
+      return denominator;
     }
 
     public boolean getInputPruning() {
@@ -68,41 +65,49 @@
   private boolean isSamplingPred;
   private transient sampleDesc sampleDescr;
 
-  public filterDesc() { }
+  public filterDesc() {
+  }
+
   public filterDesc(
-    final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred) {
+      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate,
+      boolean isSamplingPred) {
     this.predicate = predicate;
     this.isSamplingPred = isSamplingPred;
-    this.sampleDescr = null;
+    sampleDescr = null;
   }
 
   public filterDesc(
-    final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred, final sampleDesc sampleDescr) {
+      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate,
+      boolean isSamplingPred, final sampleDesc sampleDescr) {
     this.predicate = predicate;
     this.isSamplingPred = isSamplingPred;
     this.sampleDescr = sampleDescr;
   }
 
-  @explain(displayName="predicate")
+  @explain(displayName = "predicate")
   public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() {
-    return this.predicate;
+    return predicate;
   }
-  public void setPredicate(final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
+
+  public void setPredicate(
+      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
     this.predicate = predicate;
   }
 
-  @explain(displayName="isSamplingPred", normalExplain=false)
+  @explain(displayName = "isSamplingPred", normalExplain = false)
   public boolean getIsSamplingPred() {
-    return this.isSamplingPred;
+    return isSamplingPred;
   }
+
   public void setIsSamplingPred(final boolean isSamplingPred) {
     this.isSamplingPred = isSamplingPred;
   }
 
-  @explain(displayName="sampleDesc", normalExplain=false)
+  @explain(displayName = "sampleDesc", normalExplain = false)
   public sampleDesc getSampleDescr() {
-    return this.sampleDescr;
+    return sampleDescr;
   }
+
   public void setSampleDescr(final sampleDesc sampleDescr) {
     this.sampleDescr = sampleDescr;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java Thu Jan 21 10:37:58 2010
@@ -20,11 +20,13 @@
 
 import java.io.Serializable;
 
-@explain(displayName="Forward")
+@explain(displayName = "Forward")
 public class forwardDesc implements Serializable {
   private static final long serialVersionUID = 1L;
+
   @SuppressWarnings("nls")
   public forwardDesc() {
-    // throw new RuntimeException("This class does not need to be instantiated"); 
+    // throw new
+    // RuntimeException("This class does not need to be instantiated");
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java Thu Jan 21 10:37:58 2010
@@ -18,19 +18,24 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-@explain(displayName="Group By Operator")
+@explain(displayName = "Group By Operator")
 public class groupByDesc implements java.io.Serializable {
-  /** Group-by Mode:
-   *  COMPLETE: complete 1-phase aggregation: iterate, terminate
-   *  PARTIAL1: partial aggregation - first phase:  iterate, terminatePartial
-   *  PARTIAL2: partial aggregation - second phase: merge, terminatePartial
-   *  PARTIALS: For non-distinct the same as PARTIAL2, for distinct the same as PARTIAL1
-   *  FINAL: partial aggregation - final phase: merge, terminate
-   *  HASH: For non-distinct the same as PARTIAL1 but use hash-table-based aggregation
-   *  MERGEPARTIAL: FINAL for non-distinct aggregations, COMPLETE for distinct aggregations  
+  /**
+   * Group-by Mode: COMPLETE: complete 1-phase aggregation: iterate, terminate
+   * PARTIAL1: partial aggregation - first phase: iterate, terminatePartial
+   * PARTIAL2: partial aggregation - second phase: merge, terminatePartial
+   * PARTIALS: For non-distinct the same as PARTIAL2, for distinct the same as
+   * PARTIAL1 FINAL: partial aggregation - final phase: merge, terminate HASH:
+   * For non-distinct the same as PARTIAL1 but use hash-table-based aggregation
+   * MERGEPARTIAL: FINAL for non-distinct aggregations, COMPLETE for distinct
+   * aggregations
    */
   private static final long serialVersionUID = 1L;
-  public static enum Mode { COMPLETE, PARTIAL1, PARTIAL2, PARTIALS, FINAL, HASH, MERGEPARTIAL };
+
+  public static enum Mode {
+    COMPLETE, PARTIAL1, PARTIAL2, PARTIALS, FINAL, HASH, MERGEPARTIAL
+  };
+
   private Mode mode;
   private boolean groupKeyNotReductionKey;
   private boolean bucketGroup;
@@ -38,36 +43,41 @@
   private java.util.ArrayList<exprNodeDesc> keys;
   private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators;
   private java.util.ArrayList<java.lang.String> outputColumnNames;
-  public groupByDesc() { }
+
+  public groupByDesc() {
+  }
+
   public groupByDesc(
-    final Mode mode,
-    final java.util.ArrayList<java.lang.String> outputColumnNames,
-    final java.util.ArrayList<exprNodeDesc> keys,
-    final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
-    final boolean groupKeyNotReductionKey) {
-  	this(mode, outputColumnNames, keys, aggregators, groupKeyNotReductionKey, false);
+      final Mode mode,
+      final java.util.ArrayList<java.lang.String> outputColumnNames,
+      final java.util.ArrayList<exprNodeDesc> keys,
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
+      final boolean groupKeyNotReductionKey) {
+    this(mode, outputColumnNames, keys, aggregators, groupKeyNotReductionKey,
+        false);
   }
-  
+
   public groupByDesc(
       final Mode mode,
       final java.util.ArrayList<java.lang.String> outputColumnNames,
       final java.util.ArrayList<exprNodeDesc> keys,
       final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
-	    final boolean groupKeyNotReductionKey, final boolean bucketGroup) {
-		this.mode = mode;
-		this.outputColumnNames = outputColumnNames;
-		this.keys = keys;
-		this.aggregators = aggregators;
-		this.groupKeyNotReductionKey = groupKeyNotReductionKey;
-		this.bucketGroup = bucketGroup;
-	}
-  
+      final boolean groupKeyNotReductionKey, final boolean bucketGroup) {
+    this.mode = mode;
+    this.outputColumnNames = outputColumnNames;
+    this.keys = keys;
+    this.aggregators = aggregators;
+    this.groupKeyNotReductionKey = groupKeyNotReductionKey;
+    this.bucketGroup = bucketGroup;
+  }
+
   public Mode getMode() {
-    return this.mode;
+    return mode;
   }
-  @explain(displayName="mode")
+
+  @explain(displayName = "mode")
   public String getModeString() {
-    switch(mode) {
+    switch (mode) {
     case COMPLETE:
       return "complete";
     case PARTIAL1:
@@ -83,48 +93,57 @@
     case MERGEPARTIAL:
       return "mergepartial";
     }
-  
+
     return "unknown";
   }
+
   public void setMode(final Mode mode) {
     this.mode = mode;
   }
-  @explain(displayName="keys")
+
+  @explain(displayName = "keys")
   public java.util.ArrayList<exprNodeDesc> getKeys() {
-    return this.keys;
+    return keys;
   }
+
   public void setKeys(final java.util.ArrayList<exprNodeDesc> keys) {
     this.keys = keys;
   }
-  
-  @explain(displayName="outputColumnNames")
+
+  @explain(displayName = "outputColumnNames")
   public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
+
   public void setOutputColumnNames(
       java.util.ArrayList<java.lang.String> outputColumnNames) {
     this.outputColumnNames = outputColumnNames;
   }
-  
-  @explain(displayName="aggregations")
+
+  @explain(displayName = "aggregations")
   public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> getAggregators() {
-    return this.aggregators;
+    return aggregators;
   }
-  public void setAggregators(final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators) {
+
+  public void setAggregators(
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators) {
     this.aggregators = aggregators;
   }
 
   public boolean getGroupKeyNotReductionKey() {
-    return this.groupKeyNotReductionKey;
+    return groupKeyNotReductionKey;
   }
+
   public void setGroupKeyNotReductionKey(final boolean groupKeyNotReductionKey) {
     this.groupKeyNotReductionKey = groupKeyNotReductionKey;
   }
-  @explain(displayName="bucketGroup")
-	public boolean getBucketGroup() {
-  	return bucketGroup;
+
+  @explain(displayName = "bucketGroup")
+  public boolean getBucketGroup() {
+    return bucketGroup;
   }
-	public void setBucketGroup(boolean dataSorted) {
-  	this.bucketGroup = dataSorted;
+
+  public void setBucketGroup(boolean dataSorted) {
+    bucketGroup = dataSorted;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java Thu Jan 21 10:37:58 2010
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-import java.util.Vector;
-import org.apache.hadoop.hive.ql.parse.joinType;
 
 /**
  * Join conditions Descriptor implementation.
@@ -33,58 +31,60 @@
   private int type;
   private boolean preserved;
 
-  public joinCond() {}
+  public joinCond() {
+  }
 
   public joinCond(int left, int right, int type) {
-    this.left  = left;
+    this.left = left;
     this.right = right;
-    this.type  = type;
+    this.type = type;
   }
 
   public joinCond(org.apache.hadoop.hive.ql.parse.joinCond condn) {
-    this.left       = condn.getLeft();
-    this.right      = condn.getRight();
-    this.preserved  = condn.getPreserved();
-    switch ( condn.getJoinType() ) {
+    left = condn.getLeft();
+    right = condn.getRight();
+    preserved = condn.getPreserved();
+    switch (condn.getJoinType()) {
     case INNER:
-      this.type = joinDesc.INNER_JOIN;
+      type = joinDesc.INNER_JOIN;
       break;
     case LEFTOUTER:
-      this.type = joinDesc.LEFT_OUTER_JOIN;
+      type = joinDesc.LEFT_OUTER_JOIN;
       break;
     case RIGHTOUTER:
-      this.type = joinDesc.RIGHT_OUTER_JOIN;
+      type = joinDesc.RIGHT_OUTER_JOIN;
       break;
     case FULLOUTER:
-      this.type = joinDesc.FULL_OUTER_JOIN;
+      type = joinDesc.FULL_OUTER_JOIN;
       break;
     case UNIQUE:
-      this.type = joinDesc.UNIQUE_JOIN;
+      type = joinDesc.UNIQUE_JOIN;
       break;
     case LEFTSEMI:
-      this.type = joinDesc.LEFT_SEMI_JOIN;
+      type = joinDesc.LEFT_SEMI_JOIN;
       break;
     default:
       assert false;
     }
   }
-  
+
   /**
    * @return true if table is preserved, false otherwise
    */
   public boolean getPreserved() {
-    return this.preserved;
+    return preserved;
   }
-  
+
   /**
-   * @param preserved if table is preserved, false otherwise
+   * @param preserved
+   *          if table is preserved, false otherwise
    */
   public void setPreserved(final boolean preserved) {
     this.preserved = preserved;
   }
-  
+
   public int getLeft() {
-    return this.left;
+    return left;
   }
 
   public void setLeft(final int left) {
@@ -92,7 +92,7 @@
   }
 
   public int getRight() {
-    return this.right;
+    return right;
   }
 
   public void setRight(final int right) {
@@ -100,18 +100,18 @@
   }
 
   public int getType() {
-    return this.type;
+    return type;
   }
 
   public void setType(final int type) {
     this.type = type;
   }
-  
+
   @explain
   public String getJoinCondString() {
     StringBuilder sb = new StringBuilder();
-    
-    switch(type) {
+
+    switch (type) {
     case joinDesc.INNER_JOIN:
       sb.append("Inner Join ");
       break;
@@ -134,11 +134,11 @@
       sb.append("Unknow Join ");
       break;
     }
-    
+
     sb.append(left);
     sb.append(" to ");
     sb.append(right);
-    
+
     return sb.toString();
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java Thu Jan 21 10:37:58 2010
@@ -19,16 +19,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
-
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -37,92 +28,96 @@
  * Join operator Descriptor implementation.
  * 
  */
-@explain(displayName="Join Operator")
+@explain(displayName = "Join Operator")
 public class joinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  public static final int INNER_JOIN       = 0;
-  public static final int LEFT_OUTER_JOIN  = 1;
+  public static final int INNER_JOIN = 0;
+  public static final int LEFT_OUTER_JOIN = 1;
   public static final int RIGHT_OUTER_JOIN = 2;
-  public static final int FULL_OUTER_JOIN  = 3;
-  public static final int UNIQUE_JOIN      = 4;
-  public static final int LEFT_SEMI_JOIN   = 5;
+  public static final int FULL_OUTER_JOIN = 3;
+  public static final int UNIQUE_JOIN = 4;
+  public static final int LEFT_SEMI_JOIN = 5;
 
-  //used to handle skew join
+  // used to handle skew join
   private boolean handleSkewJoin = false;
   private int skewKeyDefinition = -1;
   private Map<Byte, String> bigKeysDirMap;
   private Map<Byte, Map<Byte, String>> smallKeysDirMap;
   private Map<Byte, tableDesc> skewKeysValuesTables;
-  
+
   // alias to key mapping
   private Map<Byte, List<exprNodeDesc>> exprs;
-  
-  //used for create joinOutputObjectInspector
+
+  // used for create joinOutputObjectInspector
   protected java.util.ArrayList<java.lang.String> outputColumnNames;
-  
+
   // key:column output name, value:tag
   transient private Map<String, Byte> reversedExprs;
-  
+
   // No outer join involved
   protected boolean noOuterJoin;
 
   protected joinCond[] conds;
-  
+
   protected Byte[] tagOrder;
   private tableDesc keyTableDesc;
-  
-  public joinDesc() { }
-  
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames, final boolean noOuterJoin, final joinCond[] conds) {
+
+  public joinDesc() {
+  }
+
+  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
+      ArrayList<String> outputColumnNames, final boolean noOuterJoin,
+      final joinCond[] conds) {
     this.exprs = exprs;
     this.outputColumnNames = outputColumnNames;
     this.noOuterJoin = noOuterJoin;
     this.conds = conds;
-    
+
     tagOrder = new Byte[exprs.size()];
-    for(int i = 0; i<tagOrder.length; i++)
-    {
-      tagOrder[i] = (byte)i;
+    for (int i = 0; i < tagOrder.length; i++) {
+      tagOrder[i] = (byte) i;
     }
   }
-  
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames) {
+
+  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
+      ArrayList<String> outputColumnNames) {
     this(exprs, outputColumnNames, true, null);
   }
 
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames, final joinCond[] conds) {
+  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
+      ArrayList<String> outputColumnNames, final joinCond[] conds) {
     this(exprs, outputColumnNames, false, conds);
   }
-  
+
   public Map<Byte, List<exprNodeDesc>> getExprs() {
-    return this.exprs;
+    return exprs;
   }
-  
+
   public Map<String, Byte> getReversedExprs() {
     return reversedExprs;
   }
 
   public void setReversedExprs(Map<String, Byte> reversed_Exprs) {
-    this.reversedExprs = reversed_Exprs;
+    reversedExprs = reversed_Exprs;
   }
-  
-  @explain(displayName="condition expressions")
+
+  @explain(displayName = "condition expressions")
   public Map<Byte, String> getExprsStringMap() {
     if (getExprs() == null) {
       return null;
     }
-    
+
     LinkedHashMap<Byte, String> ret = new LinkedHashMap<Byte, String>();
-    
-    for(Map.Entry<Byte, List<exprNodeDesc>> ent: getExprs().entrySet()) {
+
+    for (Map.Entry<Byte, List<exprNodeDesc>> ent : getExprs().entrySet()) {
       StringBuilder sb = new StringBuilder();
       boolean first = true;
       if (ent.getValue() != null) {
-        for(exprNodeDesc expr: ent.getValue()) {
+        for (exprNodeDesc expr : ent.getValue()) {
           if (!first) {
             sb.append(" ");
           }
-          
+
           first = false;
           sb.append("{");
           sb.append(expr.getExprString());
@@ -131,15 +126,15 @@
       }
       ret.put(ent.getKey(), sb.toString());
     }
-    
+
     return ret;
   }
-  
+
   public void setExprs(final Map<Byte, List<exprNodeDesc>> exprs) {
     this.exprs = exprs;
   }
-  
-  @explain(displayName="outputColumnNames")
+
+  @explain(displayName = "outputColumnNames")
   public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
@@ -150,21 +145,21 @@
   }
 
   public boolean getNoOuterJoin() {
-    return this.noOuterJoin;
+    return noOuterJoin;
   }
 
   public void setNoOuterJoin(final boolean noOuterJoin) {
     this.noOuterJoin = noOuterJoin;
   }
 
-  @explain(displayName="condition map")
+  @explain(displayName = "condition map")
   public List<joinCond> getCondsList() {
     if (conds == null) {
       return null;
     }
 
     ArrayList<joinCond> l = new ArrayList<joinCond>();
-    for(joinCond cond: conds) {
+    for (joinCond cond : conds) {
       l.add(cond);
     }
 
@@ -172,7 +167,7 @@
   }
 
   public joinCond[] getConds() {
-    return this.conds;
+    return conds;
   }
 
   public void setConds(final joinCond[] conds) {
@@ -191,19 +186,21 @@
   /**
    * The order in which tables should be processed when joining
    * 
-   * @param tagOrder Array of tags
+   * @param tagOrder
+   *          Array of tags
    */
   public void setTagOrder(Byte[] tagOrder) {
     this.tagOrder = tagOrder;
   }
 
-  @explain(displayName="handleSkewJoin")
+  @explain(displayName = "handleSkewJoin")
   public boolean getHandleSkewJoin() {
     return handleSkewJoin;
   }
 
   /**
    * set to handle skew join in this join op
+   * 
    * @param handleSkewJoin
    */
   public void setHandleSkewJoin(boolean handleSkewJoin) {
@@ -219,6 +216,7 @@
 
   /**
    * set the mapping from tbl to dir for big keys
+   * 
    * @param bigKeysDirMap
    */
   public void setBigKeysDirMap(Map<Byte, String> bigKeysDirMap) {
@@ -234,6 +232,7 @@
 
   /**
    * set the mapping from tbl to dir for small keys
+   * 
    * @param bigKeysDirMap
    */
   public void setSmallKeysDirMap(Map<Byte, Map<Byte, String>> smallKeysDirMap) {
@@ -250,6 +249,7 @@
 
   /**
    * set skew key definition
+   * 
    * @param skewKeyDefinition
    */
   public void setSkewKeyDefinition(int skewKeyDefinition) {
@@ -264,24 +264,27 @@
   }
 
   /**
-   * @param skewKeysValuesTable set the table desc for storing skew keys and their corresponding value;
+   * @param skewKeysValuesTable
+   *          set the table desc for storing skew keys and their corresponding
+   *          value;
    */
   public void setSkewKeysValuesTables(Map<Byte, tableDesc> skewKeysValuesTables) {
     this.skewKeysValuesTables = skewKeysValuesTables;
   }
-  
+
   public boolean isNoOuterJoin() {
     for (org.apache.hadoop.hive.ql.plan.joinCond cond : conds) {
       if (cond.getType() == joinDesc.FULL_OUTER_JOIN
           || (cond.getType() == joinDesc.LEFT_OUTER_JOIN)
-          || cond.getType() == joinDesc.RIGHT_OUTER_JOIN)
+          || cond.getType() == joinDesc.RIGHT_OUTER_JOIN) {
         return false;
+      }
     }
     return true;
   }
 
   public void setKeyTableDesc(tableDesc keyTblDesc) {
-    this.keyTableDesc = keyTblDesc;    
+    keyTableDesc = keyTblDesc;
   }
 
   public tableDesc getKeyTableDesc() {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java Thu Jan 21 10:37:58 2010
@@ -21,22 +21,25 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 
-@explain(displayName="Lateral View Join Operator")
+@explain(displayName = "Lateral View Join Operator")
 public class lateralViewJoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private ArrayList<String> outputInternalColNames;
-  public lateralViewJoinDesc() { 
+
+  public lateralViewJoinDesc() {
   }
+
   public lateralViewJoinDesc(ArrayList<String> outputInternalColNames) {
     this.outputInternalColNames = outputInternalColNames;
   }
-  public void setOutputInternalColNames(
-      ArrayList<String> outputInternalColNames) {
+
+  public void setOutputInternalColNames(ArrayList<String> outputInternalColNames) {
     this.outputInternalColNames = outputInternalColNames;
   }
-  @explain(displayName="outputColumnNames")
+
+  @explain(displayName = "outputColumnNames")
   public ArrayList<String> getOutputInternalColNames() {
-    return this.outputInternalColNames;
+    return outputInternalColNames;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java Thu Jan 21 10:37:58 2010
@@ -20,20 +20,24 @@
 
 import java.io.Serializable;
 
-@explain(displayName="Limit")
+@explain(displayName = "Limit")
 public class limitDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private int limit;
-  public limitDesc() { }
+
+  public limitDesc() {
+  }
+
   public limitDesc(final int limit) {
     this.limit = limit;
   }
 
   public int getLimit() {
-    return this.limit;
+    return limit;
   }
+
   public void setLimit(final int limit) {
-    this.limit=limit;
+    this.limit = limit;
   }
 
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java Thu Jan 21 10:37:58 2010
@@ -24,18 +24,20 @@
   private static final long serialVersionUID = 1L;
   private String sourceDir;
 
-  public loadDesc() { }
-  public loadDesc(
-    final String sourceDir) {
+  public loadDesc() {
+  }
+
+  public loadDesc(final String sourceDir) {
 
     this.sourceDir = sourceDir;
   }
-  
-  @explain(displayName="source", normalExplain=false)
+
+  @explain(displayName = "source", normalExplain = false)
   public String getSourceDir() {
-    return this.sourceDir;
+    return sourceDir;
   }
+
   public void setSourceDir(final String source) {
-    this.sourceDir = source;
+    sourceDir = source;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java Thu Jan 21 10:37:58 2010
@@ -20,23 +20,19 @@
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.ql.plan.loadDesc;
-
 public class loadFileDesc extends loadDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String targetDir;
   private boolean isDfsDir;
   // list of columns, comma separated
-  private String  columns;
-  private String  columnTypes;
+  private String columns;
+  private String columnTypes;
+
+  public loadFileDesc() {
+  }
 
-  public loadFileDesc() { }
-  public loadFileDesc(
-    final String sourceDir,
-    final String targetDir,
-    final boolean isDfsDir, 
-    final String  columns,
-    final String  columnTypes) {
+  public loadFileDesc(final String sourceDir, final String targetDir,
+      final boolean isDfsDir, final String columns, final String columnTypes) {
 
     super(sourceDir);
     this.targetDir = targetDir;
@@ -44,44 +40,50 @@
     this.columns = columns;
     this.columnTypes = columnTypes;
   }
-  
-  @explain(displayName="destination")
+
+  @explain(displayName = "destination")
   public String getTargetDir() {
-    return this.targetDir;
+    return targetDir;
   }
+
   public void setTargetDir(final String targetDir) {
-    this.targetDir=targetDir;
+    this.targetDir = targetDir;
   }
-  
-  @explain(displayName="hdfs directory")
+
+  @explain(displayName = "hdfs directory")
   public boolean getIsDfsDir() {
-    return this.isDfsDir;
+    return isDfsDir;
   }
+
   public void setIsDfsDir(final boolean isDfsDir) {
     this.isDfsDir = isDfsDir;
   }
-  
-	/**
-	 * @return the columns
-	 */
-	public String getColumns() {
-		return columns;
-	}
-	
-	/**
-	 * @param columns the columns to set
-	 */
-	public void setColumns(String columns) {
-		this.columns = columns;
-	}
+
+  /**
+   * @return the columns
+   */
+  public String getColumns() {
+    return columns;
+  }
+
+  /**
+   * @param columns
+   *          the columns to set
+   */
+  public void setColumns(String columns) {
+    this.columns = columns;
+  }
+
   /**
    * @return the columnTypes
    */
   public String getColumnTypes() {
     return columnTypes;
   }
+
   /**
-   * @param columnTypes the columnTypes to set
+   * @param columnTypes
+   *          the columnTypes to set
    */
   public void setColumnTypes(String columnTypes) {
     this.columnTypes = columnTypes;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java Thu Jan 21 10:37:58 2010
@@ -21,7 +21,8 @@
 import java.io.Serializable;
 import java.util.HashMap;
 
-public class loadTableDesc extends org.apache.hadoop.hive.ql.plan.loadDesc implements Serializable {
+public class loadTableDesc extends org.apache.hadoop.hive.ql.plan.loadDesc
+    implements Serializable {
   private static final long serialVersionUID = 1L;
   private boolean replace;
   private String tmpDir;
@@ -30,13 +31,12 @@
   private org.apache.hadoop.hive.ql.plan.tableDesc table;
   private HashMap<String, String> partitionSpec;
 
-  public loadTableDesc() { }
-  public loadTableDesc(
-    final String sourceDir,
-    final String tmpDir,
-    final org.apache.hadoop.hive.ql.plan.tableDesc table,
-    final HashMap<String, String> partitionSpec,
-    final boolean replace) {
+  public loadTableDesc() {
+  }
+
+  public loadTableDesc(final String sourceDir, final String tmpDir,
+      final org.apache.hadoop.hive.ql.plan.tableDesc table,
+      final HashMap<String, String> partitionSpec, final boolean replace) {
 
     super(sourceDir);
     this.tmpDir = tmpDir;
@@ -44,42 +44,45 @@
     this.partitionSpec = partitionSpec;
     this.replace = replace;
   }
-  public loadTableDesc(
-    final String sourceDir,
-    final String tmpDir,
-    final org.apache.hadoop.hive.ql.plan.tableDesc table,
-    final HashMap<String, String> partitionSpec) {
+
+  public loadTableDesc(final String sourceDir, final String tmpDir,
+      final org.apache.hadoop.hive.ql.plan.tableDesc table,
+      final HashMap<String, String> partitionSpec) {
     this(sourceDir, tmpDir, table, partitionSpec, true);
   }
 
-  @explain(displayName="tmp directory", normalExplain=false)
+  @explain(displayName = "tmp directory", normalExplain = false)
   public String getTmpDir() {
-    return this.tmpDir;
+    return tmpDir;
   }
+
   public void setTmpDir(final String tmp) {
-    this.tmpDir = tmp;
+    tmpDir = tmp;
   }
 
-  @explain(displayName="table")
+  @explain(displayName = "table")
   public tableDesc getTable() {
-    return this.table;
+    return table;
   }
+
   public void setTable(final org.apache.hadoop.hive.ql.plan.tableDesc table) {
     this.table = table;
   }
-  
-  @explain(displayName="partition")
+
+  @explain(displayName = "partition")
   public HashMap<String, String> getPartitionSpec() {
-    return this.partitionSpec;
+    return partitionSpec;
   }
+
   public void setPartitionSpec(final HashMap<String, String> partitionSpec) {
     this.partitionSpec = partitionSpec;
   }
 
-  @explain(displayName="replace")
+  @explain(displayName = "replace")
   public boolean getReplace() {
     return replace;
   }
+
   public void setReplace(boolean replace) {
     this.replace = replace;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java Thu Jan 21 10:37:58 2010
@@ -19,9 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -34,30 +31,28 @@
  * Map Join operator Descriptor implementation.
  * 
  */
-@explain(displayName="Common Join Operator")
+@explain(displayName = "Common Join Operator")
 public class mapJoinDesc extends joinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private Map<Byte, List<exprNodeDesc>> keys;
   private tableDesc keyTblDesc;
   private List<tableDesc> valueTblDescs;
-  
+
   private int posBigTable;
-  
+
   private Map<Byte, List<Integer>> retainList;
-  
-  public mapJoinDesc() { }
 
-  public mapJoinDesc(final Map<Byte, List<exprNodeDesc>> keys, 
-                     final tableDesc keyTblDesc, 
-                     final Map<Byte, List<exprNodeDesc>> values,
-                     final List<tableDesc> valueTblDescs,
-                     ArrayList<String> outputColumnNames,
-                     final int posBigTable,
-                     final joinCond[] conds) {
+  public mapJoinDesc() {
+  }
+
+  public mapJoinDesc(final Map<Byte, List<exprNodeDesc>> keys,
+      final tableDesc keyTblDesc, final Map<Byte, List<exprNodeDesc>> values,
+      final List<tableDesc> valueTblDescs, ArrayList<String> outputColumnNames,
+      final int posBigTable, final joinCond[] conds) {
     super(values, outputColumnNames, conds);
-    this.keys        = keys;
-    this.keyTblDesc  = keyTblDesc;
+    this.keys = keys;
+    this.keyTblDesc = keyTblDesc;
     this.valueTblDescs = valueTblDescs;
     this.posBigTable = posBigTable;
     initRetainExprList();
@@ -76,7 +71,7 @@
       retainList.put(current.getKey(), list);
     }
   }
-  
+
   public Map<Byte, List<Integer>> getRetainList() {
     return retainList;
   }
@@ -84,17 +79,18 @@
   public void setRetainList(Map<Byte, List<Integer>> retainList) {
     this.retainList = retainList;
   }
-  
+
   /**
    * @return the keys
    */
-  @explain(displayName="keys")
+  @explain(displayName = "keys")
   public Map<Byte, List<exprNodeDesc>> getKeys() {
     return keys;
   }
 
   /**
-   * @param keys the keys to set
+   * @param keys
+   *          the keys to set
    */
   public void setKeys(Map<Byte, List<exprNodeDesc>> keys) {
     this.keys = keys;
@@ -103,13 +99,14 @@
   /**
    * @return the position of the big table not in memory
    */
-  @explain(displayName="Position of Big Table")
+  @explain(displayName = "Position of Big Table")
   public int getPosBigTable() {
     return posBigTable;
   }
 
   /**
-   * @param posBigTable the position of the big table not in memory
+   * @param posBigTable
+   *          the position of the big table not in memory
    */
   public void setPosBigTable(int posBigTable) {
     this.posBigTable = posBigTable;
@@ -123,7 +120,8 @@
   }
 
   /**
-   * @param keyTblDesc the keyTblDesc to set
+   * @param keyTblDesc
+   *          the keyTblDesc to set
    */
   public void setKeyTblDesc(tableDesc keyTblDesc) {
     this.keyTblDesc = keyTblDesc;
@@ -137,7 +135,8 @@
   }
 
   /**
-   * @param valueTblDescs the valueTblDescs to set
+   * @param valueTblDescs
+   *          the valueTblDescs to set
    */
   public void setValueTblDescs(List<tableDesc> valueTblDescs) {
     this.valueTblDescs = valueTblDescs;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java Thu Jan 21 10:37:58 2010
@@ -18,47 +18,52 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.util.*;
-import java.io.*;
+import java.io.Serializable;
+import java.util.LinkedHashMap;
 
 import org.apache.hadoop.hive.ql.exec.Operator;
 
-@explain(displayName="Map Reduce Local Work")
+@explain(displayName = "Map Reduce Local Work")
 public class mapredLocalWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
   private LinkedHashMap<String, fetchWork> aliasToFetchWork;
 
-  public mapredLocalWork() { }
+  public mapredLocalWork() {
+  }
 
-  public mapredLocalWork(final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
-                         final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
+  public mapredLocalWork(
+      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
+      final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
     this.aliasToWork = aliasToWork;
     this.aliasToFetchWork = aliasToFetchWork;
   }
 
-  @explain(displayName="Alias -> Map Local Operator Tree")
+  @explain(displayName = "Alias -> Map Local Operator Tree")
   public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
     return aliasToWork;
   }
 
-  public void setAliasToWork(final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
+  public void setAliasToWork(
+      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
     this.aliasToWork = aliasToWork;
   }
 
   /**
    * @return the aliasToFetchWork
    */
-  @explain(displayName="Alias -> Map Local Tables")
+  @explain(displayName = "Alias -> Map Local Tables")
   public LinkedHashMap<String, fetchWork> getAliasToFetchWork() {
     return aliasToFetchWork;
   }
 
   /**
-   * @param aliasToFetchWork the aliasToFetchWork to set
+   * @param aliasToFetchWork
+   *          the aliasToFetchWork to set
    */
-  public void setAliasToFetchWork(final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
+  public void setAliasToFetchWork(
+      final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
     this.aliasToFetchWork = aliasToFetchWork;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java Thu Jan 21 10:37:58 2010
@@ -18,25 +18,29 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.util.*;
-import java.io.*;
+import java.io.ByteArrayOutputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
-@explain(displayName="Map Reduce")
+@explain(displayName = "Map Reduce")
 public class mapredWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private String command;
   // map side work
-  //   use LinkedHashMap to make sure the iteration order is
-  //   deterministic, to ease testing
-  private LinkedHashMap<String,ArrayList<String>> pathToAliases;
-  
-  private LinkedHashMap<String,partitionDesc> pathToPartitionInfo;
-  
-  private LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork;
-  
+  // use LinkedHashMap to make sure the iteration order is
+  // deterministic, to ease testing
+  private LinkedHashMap<String, ArrayList<String>> pathToAliases;
+
+  private LinkedHashMap<String, partitionDesc> pathToPartitionInfo;
+
+  private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
+
   private LinkedHashMap<String, partitionDesc> aliasToPartnInfo;
 
   // map<->reduce interface
@@ -47,117 +51,125 @@
   private List<tableDesc> tagToValueDesc;
 
   private Operator<?> reducer;
-  
+
   private Integer numReduceTasks;
-  
+
   private boolean needsTagging;
   private mapredLocalWork mapLocalWork;
 
-  public mapredWork() { 
-    this.aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
+  public mapredWork() {
+    aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
   }
 
   public mapredWork(
-    final String command,
-    final LinkedHashMap<String,ArrayList<String>> pathToAliases,
-    final LinkedHashMap<String,partitionDesc> pathToPartitionInfo,
-    final LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork,
-    final tableDesc keyDesc,
-    List<tableDesc> tagToValueDesc,
-    final Operator<?> reducer,
-    final Integer numReduceTasks,
-    final mapredLocalWork mapLocalWork) {
-      this.command = command;
-      this.pathToAliases = pathToAliases;
-      this.pathToPartitionInfo = pathToPartitionInfo;
-      this.aliasToWork = aliasToWork;
-      this.keyDesc = keyDesc;
-      this.tagToValueDesc = tagToValueDesc;
-      this.reducer = reducer;
-      this.numReduceTasks = numReduceTasks;
-      this.mapLocalWork = mapLocalWork;
-      this.aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
+      final String command,
+      final LinkedHashMap<String, ArrayList<String>> pathToAliases,
+      final LinkedHashMap<String, partitionDesc> pathToPartitionInfo,
+      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
+      final tableDesc keyDesc, List<tableDesc> tagToValueDesc,
+      final Operator<?> reducer, final Integer numReduceTasks,
+      final mapredLocalWork mapLocalWork) {
+    this.command = command;
+    this.pathToAliases = pathToAliases;
+    this.pathToPartitionInfo = pathToPartitionInfo;
+    this.aliasToWork = aliasToWork;
+    this.keyDesc = keyDesc;
+    this.tagToValueDesc = tagToValueDesc;
+    this.reducer = reducer;
+    this.numReduceTasks = numReduceTasks;
+    this.mapLocalWork = mapLocalWork;
+    aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
   }
 
   public String getCommand() {
-    return this.command;
+    return command;
   }
+
   public void setCommand(final String command) {
     this.command = command;
   }
 
-  @explain(displayName="Path -> Alias", normalExplain=false)
-  public LinkedHashMap<String,ArrayList<String>> getPathToAliases() {
-    return this.pathToAliases;
+  @explain(displayName = "Path -> Alias", normalExplain = false)
+  public LinkedHashMap<String, ArrayList<String>> getPathToAliases() {
+    return pathToAliases;
   }
-  public void setPathToAliases(final LinkedHashMap<String,ArrayList<String>> pathToAliases) {
+
+  public void setPathToAliases(
+      final LinkedHashMap<String, ArrayList<String>> pathToAliases) {
     this.pathToAliases = pathToAliases;
   }
 
-  @explain(displayName="Path -> Partition", normalExplain=false)
-  public LinkedHashMap<String,partitionDesc> getPathToPartitionInfo() {
-    return this.pathToPartitionInfo;
+  @explain(displayName = "Path -> Partition", normalExplain = false)
+  public LinkedHashMap<String, partitionDesc> getPathToPartitionInfo() {
+    return pathToPartitionInfo;
   }
 
-  public void setPathToPartitionInfo(final LinkedHashMap<String,partitionDesc> pathToPartitionInfo) {
+  public void setPathToPartitionInfo(
+      final LinkedHashMap<String, partitionDesc> pathToPartitionInfo) {
     this.pathToPartitionInfo = pathToPartitionInfo;
   }
-  
+
   /**
    * @return the aliasToPartnInfo
    */
   public LinkedHashMap<String, partitionDesc> getAliasToPartnInfo() {
     return aliasToPartnInfo;
   }
-  
+
   /**
-   * @param aliasToPartnInfo the aliasToPartnInfo to set
+   * @param aliasToPartnInfo
+   *          the aliasToPartnInfo to set
    */
   public void setAliasToPartnInfo(
       LinkedHashMap<String, partitionDesc> aliasToPartnInfo) {
     this.aliasToPartnInfo = aliasToPartnInfo;
   }
-  
-  @explain(displayName="Alias -> Map Operator Tree")
+
+  @explain(displayName = "Alias -> Map Operator Tree")
   public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
-    return this.aliasToWork;
-  }
-  public void setAliasToWork(final LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork) {
-    this.aliasToWork=aliasToWork;
+    return aliasToWork;
   }
 
+  public void setAliasToWork(
+      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
+    this.aliasToWork = aliasToWork;
+  }
 
   /**
    * @return the mapredLocalWork
    */
-  @explain(displayName="Local Work")
+  @explain(displayName = "Local Work")
   public mapredLocalWork getMapLocalWork() {
     return mapLocalWork;
   }
 
   /**
-   * @param mapLocalWork the mapredLocalWork to set
+   * @param mapLocalWork
+   *          the mapredLocalWork to set
    */
   public void setMapLocalWork(final mapredLocalWork mapLocalWork) {
     this.mapLocalWork = mapLocalWork;
   }
 
   public tableDesc getKeyDesc() {
-    return this.keyDesc;
+    return keyDesc;
   }
+
   public void setKeyDesc(final tableDesc keyDesc) {
     this.keyDesc = keyDesc;
   }
+
   public List<tableDesc> getTagToValueDesc() {
     return tagToValueDesc;
   }
+
   public void setTagToValueDesc(final List<tableDesc> tagToValueDesc) {
     this.tagToValueDesc = tagToValueDesc;
   }
 
-  @explain(displayName="Reduce Operator Tree")
+  @explain(displayName = "Reduce Operator Tree")
   public Operator<?> getReducer() {
-    return this.reducer;
+    return reducer;
   }
 
   public void setReducer(final Operator<?> reducer) {
@@ -165,59 +177,62 @@
   }
 
   /**
-   * If the number of reducers is -1, the runtime will automatically 
-   * figure it out by input data size.
+   * If the number of reducers is -1, the runtime will automatically figure it
+   * out by input data size.
    * 
-   * The number of reducers will be a positive number only in case the
-   * target table is bucketed into N buckets (through CREATE TABLE).
-   * This feature is not supported yet, so the number of reducers will 
-   * always be -1 for now.
+   * The number of reducers will be a positive number only in case the target
+   * table is bucketed into N buckets (through CREATE TABLE). This feature is
+   * not supported yet, so the number of reducers will always be -1 for now.
    */
   public Integer getNumReduceTasks() {
-    return this.numReduceTasks;
+    return numReduceTasks;
   }
+
   public void setNumReduceTasks(final Integer numReduceTasks) {
     this.numReduceTasks = numReduceTasks;
   }
+
   @SuppressWarnings("nls")
-  public void  addMapWork(String path, String alias, Operator<?> work, partitionDesc pd) {
-    ArrayList<String> curAliases = this.pathToAliases.get(path);
-    if(curAliases == null) {
-      assert(this.pathToPartitionInfo.get(path) == null);
-      curAliases = new ArrayList<String> ();
-      this.pathToAliases.put(path, curAliases);
-      this.pathToPartitionInfo.put(path, pd);
+  public void addMapWork(String path, String alias, Operator<?> work,
+      partitionDesc pd) {
+    ArrayList<String> curAliases = pathToAliases.get(path);
+    if (curAliases == null) {
+      assert (pathToPartitionInfo.get(path) == null);
+      curAliases = new ArrayList<String>();
+      pathToAliases.put(path, curAliases);
+      pathToPartitionInfo.put(path, pd);
     } else {
-      assert(this.pathToPartitionInfo.get(path) != null);
+      assert (pathToPartitionInfo.get(path) != null);
     }
 
-    for(String oneAlias: curAliases) {
-      if(oneAlias.equals(alias)) {
-        throw new RuntimeException ("Multiple aliases named: " + alias + " for path: " + path);
+    for (String oneAlias : curAliases) {
+      if (oneAlias.equals(alias)) {
+        throw new RuntimeException("Multiple aliases named: " + alias
+            + " for path: " + path);
       }
     }
     curAliases.add(alias);
 
-    if(this.aliasToWork.get(alias) != null) {
-      throw new RuntimeException ("Existing work for alias: " + alias);
+    if (aliasToWork.get(alias) != null) {
+      throw new RuntimeException("Existing work for alias: " + alias);
     }
-    this.aliasToWork.put(alias, work);
+    aliasToWork.put(alias, work);
   }
 
   @SuppressWarnings("nls")
-  public String isInvalid () {
-    if((getNumReduceTasks() >= 1) && (getReducer() == null)) {
+  public String isInvalid() {
+    if ((getNumReduceTasks() >= 1) && (getReducer() == null)) {
       return "Reducers > 0 but no reduce operator";
     }
 
-    if((getNumReduceTasks() == 0) && (getReducer() != null)) {
+    if ((getNumReduceTasks() == 0) && (getReducer() != null)) {
       return "Reducers == 0 but reduce operator specified";
     }
 
     return null;
   }
 
-  public String toXML () {
+  public String toXML() {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     Utilities.serializeMapRedWork(this, baos);
     return (baos.toString());
@@ -226,13 +241,13 @@
   // non bean
 
   /**
-   * For each map side operator - stores the alias the operator is working on behalf
-   * of in the operator runtime state. This is used by reducesink operator - but could
-   * be useful for debugging as well.
-   */
-  private void setAliases () {
-    for(String oneAlias: this.aliasToWork.keySet()) {
-      this.aliasToWork.get(oneAlias).setAlias(oneAlias);
+   * For each map side operator - stores the alias the operator is working on
+   * behalf of in the operator runtime state. This is used by reducesink
+   * operator - but could be useful for debugging as well.
+   */
+  private void setAliases() {
+    for (String oneAlias : aliasToWork.keySet()) {
+      aliasToWork.get(oneAlias).setAlias(oneAlias);
     }
   }
 
@@ -240,24 +255,24 @@
    * Derive additional attributes to be rendered by EXPLAIN.
    */
   public void deriveExplainAttributes() {
-    if (this.pathToPartitionInfo == null) {
+    if (pathToPartitionInfo == null) {
       return;
     }
-    for (Map.Entry<String,partitionDesc> entry
-           : this.pathToPartitionInfo.entrySet()) {
+    for (Map.Entry<String, partitionDesc> entry : pathToPartitionInfo
+        .entrySet()) {
       entry.getValue().deriveBaseFileName(entry.getKey());
     }
   }
 
-  public void initialize () {
+  public void initialize() {
     setAliases();
   }
 
-  @explain(displayName="Needs Tagging", normalExplain=false)
+  @explain(displayName = "Needs Tagging", normalExplain = false)
   public boolean getNeedsTagging() {
-    return this.needsTagging;
+    return needsTagging;
   }
-  
+
   public void setNeedsTagging(boolean needsTagging) {
     this.needsTagging = needsTagging;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java Thu Jan 21 10:37:58 2010
@@ -18,14 +18,13 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.*;
+import java.io.Serializable;
+import java.util.Set;
 
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import java.util.Set;
 
-
-@explain(displayName="Move Operator")
+@explain(displayName = "Move Operator")
 public class moveWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private loadTableDesc loadTableWork;
@@ -50,36 +49,37 @@
     this.outputs = outputs;
   }
 
-  public moveWork(
-    Set<ReadEntity> inputs,
-    Set<WriteEntity> outputs,
-    final loadTableDesc loadTableWork,
-    final loadFileDesc loadFileWork,
-    boolean checkFileFormat) {
+  public moveWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
+      final loadTableDesc loadTableWork, final loadFileDesc loadFileWork,
+      boolean checkFileFormat) {
     this(inputs, outputs);
     this.loadTableWork = loadTableWork;
     this.loadFileWork = loadFileWork;
     this.checkFileFormat = checkFileFormat;
   }
-  @explain(displayName="tables")
+
+  @explain(displayName = "tables")
   public loadTableDesc getLoadTableWork() {
-    return this.loadTableWork;
+    return loadTableWork;
   }
+
   public void setLoadTableWork(final loadTableDesc loadTableWork) {
     this.loadTableWork = loadTableWork;
   }
 
-  @explain(displayName="files")
+  @explain(displayName = "files")
   public loadFileDesc getLoadFileWork() {
-    return this.loadFileWork;
+    return loadFileWork;
   }
+
   public void setLoadFileWork(final loadFileDesc loadFileWork) {
-    this.loadFileWork=loadFileWork;
+    this.loadFileWork = loadFileWork;
   }
 
   public boolean getCheckFileFormat() {
     return checkFileFormat;
   }
+
   public void setCheckFileFormat(boolean checkFileFormat) {
     this.checkFileFormat = checkFileFormat;
   }



Mime
View raw message