hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r901644 [36/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...
Date Thu, 21 Jan 2010 10:38:15 GMT
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Thu Jan 21 10:37:58 2010
@@ -53,16 +53,17 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.TextInputFormat;
 
-
 /**
- * Mimics the actual query compiler in generating end to end plans and testing them out
- *
+ * Mimics the actual query compiler in generating end to end plans and testing
+ * them out
+ * 
  */
 public class TestExecDriver extends TestCase {
 
   static HiveConf conf;
 
-  static private String tmpdir = "/tmp/"+System.getProperty("user.name")+"/";
+  static private String tmpdir = "/tmp/" + System.getProperty("user.name")
+      + "/";
   static private Path tmppath = new Path(tmpdir);
   static private Hive db;
   static private FileSystem fs;
@@ -72,31 +73,33 @@
       conf = new HiveConf(ExecDriver.class);
 
       fs = FileSystem.get(conf);
-      if(fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {
-        throw new RuntimeException (tmpdir + " exists but is not a directory");
+      if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {
+        throw new RuntimeException(tmpdir + " exists but is not a directory");
       }
 
-      if(!fs.exists(tmppath)) {
-        if(!fs.mkdirs(tmppath)) {
-          throw new RuntimeException ("Could not make scratch directory " + tmpdir);
+      if (!fs.exists(tmppath)) {
+        if (!fs.mkdirs(tmppath)) {
+          throw new RuntimeException("Could not make scratch directory "
+              + tmpdir);
         }
       }
 
-      for(Object one: Utilities.makeList("mapplan1.out", "mapplan2.out",
-                                         "mapredplan1.out", "mapredplan2.out", "mapredplan3.out", "mapredplan4.out",
-                                         "mapredplan5.out", "mapredplan6.out")) {
-        Path onedir = new Path(tmppath, (String)one);
-        if(fs.exists(onedir)) {
+      for (Object one : Utilities.makeList("mapplan1.out", "mapplan2.out",
+          "mapredplan1.out", "mapredplan2.out", "mapredplan3.out",
+          "mapredplan4.out", "mapredplan5.out", "mapredplan6.out")) {
+        Path onedir = new Path(tmppath, (String) one);
+        if (fs.exists(onedir)) {
           fs.delete(onedir, true);
         }
       }
 
       // copy the test files into hadoop if required.
       int i = 0;
-      Path [] hadoopDataFile = new Path [2];
-      String [] testFiles = {"kv1.txt", "kv2.txt"};
-      String testFileDir = "file://" + conf.get("test.data.files").replace('\\', '/').replace("c:", "");
-      for(String oneFile: testFiles) {
+      Path[] hadoopDataFile = new Path[2];
+      String[] testFiles = { "kv1.txt", "kv2.txt" };
+      String testFileDir = "file://"
+          + conf.get("test.data.files").replace('\\', '/').replace("c:", "");
+      for (String oneFile : testFiles) {
         Path localDataFile = new Path(testFileDir, oneFile);
         hadoopDataFile[i] = new Path(tmppath, oneFile);
         fs.copyFromLocalFile(false, true, localDataFile, hadoopDataFile[i]);
@@ -106,78 +109,77 @@
       // load the test files into tables
       i = 0;
       db = Hive.get(conf);
-      String [] srctables = {"src", "src2"};
+      String[] srctables = { "src", "src2" };
       LinkedList<String> cols = new LinkedList<String>();
       cols.add("key");
       cols.add("value");
-      for(String src: srctables) {
+      for (String src : srctables) {
         db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
-        db.createTable(src, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
+        db.createTable(src, cols, null, TextInputFormat.class,
+            IgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src, false, null);
         i++;
       }
 
     } catch (Throwable e) {
       e.printStackTrace();
-      throw new RuntimeException ("Encountered throwable");
+      throw new RuntimeException("Encountered throwable");
     }
   }
 
-
   mapredWork mr;
 
   protected void setUp() {
-    mr =  PlanUtils.getMapRedWork();
+    mr = PlanUtils.getMapRedWork();
   }
 
-  private static void fileDiff(String datafile, String testdir) throws Exception {
+  private static void fileDiff(String datafile, String testdir)
+      throws Exception {
     String testFileDir = conf.get("test.data.files");
     System.out.println(testFileDir);
     FileInputStream fi_gold = new FileInputStream(new File(testFileDir,
-                                                           datafile));
+        datafile));
 
     // inbuilt assumption that the testdir has only one output file.
-    Path di_test = new Path (tmppath, testdir);
-    if(!fs.exists(di_test)) {
-      throw new RuntimeException (tmpdir + testdir + " does not exist");
+    Path di_test = new Path(tmppath, testdir);
+    if (!fs.exists(di_test)) {
+      throw new RuntimeException(tmpdir + testdir + " does not exist");
     }
-    if(!fs.getFileStatus(di_test).isDir()) {
-      throw new RuntimeException (tmpdir + testdir + " is not a directory");
+    if (!fs.getFileStatus(di_test).isDir()) {
+      throw new RuntimeException(tmpdir + testdir + " is not a directory");
     }
 
-    FSDataInputStream fi_test = fs.open( (fs.listStatus(di_test))[0].getPath() );
+    FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath());
 
-    if(!Utilities.contentsEqual(fi_gold, fi_test, false)) {
+    if (!Utilities.contentsEqual(fi_gold, fi_test, false)) {
       System.out.println(di_test.toString() + " does not match " + datafile);
       assertEquals(false, true);
     }
   }
 
-
   private filterDesc getTestFilterDesc(String column) {
     ArrayList<exprNodeDesc> children1 = new ArrayList<exprNodeDesc>();
-    children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column, "", false));
+    children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
+        column, "", false));
     exprNodeDesc lhs = new exprNodeGenericFuncDesc(
-        TypeInfoFactory.doubleTypeInfo,
-        FunctionRegistry.getFunctionInfo(Constants.DOUBLE_TYPE_NAME).getGenericUDF(),
-        children1);
-    
+        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
+            Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
+
     ArrayList<exprNodeDesc> children2 = new ArrayList<exprNodeDesc>();
-    children2.add(new exprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long.valueOf(100)));
+    children2.add(new exprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
+        .valueOf(100)));
     exprNodeDesc rhs = new exprNodeGenericFuncDesc(
-        TypeInfoFactory.doubleTypeInfo,
-        FunctionRegistry.getFunctionInfo(Constants.DOUBLE_TYPE_NAME).getGenericUDF(),
-        children2);
-    
+        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
+            Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
+
     ArrayList<exprNodeDesc> children3 = new ArrayList<exprNodeDesc>();
     children3.add(lhs);
     children3.add(rhs);
-    
+
     exprNodeDesc desc = new exprNodeGenericFuncDesc(
-        TypeInfoFactory.booleanTypeInfo,
-        FunctionRegistry.getFunctionInfo("<").getGenericUDF(),
-        children3);
-    
+        TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo("<")
+            .getGenericUDF(), children3);
+
     return new filterDesc(desc, false);
   }
 
@@ -185,12 +187,10 @@
   private void populateMapPlan1(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(0));
 
-    Operator<fileSinkDesc> op2 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapplan1.out",
-                                                      Utilities.defaultTd, true));
-    Operator<filterDesc> op1 =
-      OperatorFactory.get(getTestFilterDesc("key"), op2);
-
+    Operator<fileSinkDesc> op2 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapplan1.out", Utilities.defaultTd, true));
+    Operator<filterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
+        op2);
 
     Utilities.addMapWork(mr, src, "a", op1);
   }
@@ -199,22 +199,16 @@
   private void populateMapPlan2(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(0));
 
-    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapplan2.out",
-                                                      Utilities.defaultTd, false));
-
-    Operator<scriptDesc> op2 = OperatorFactory.get
-      (new scriptDesc("/bin/cat",
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
-                      TextRecordWriter.class,
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), 
-          TextRecordReader.class),
-       op3);
-
+    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapplan2.out", Utilities.defaultTd, false));
 
-    Operator<filterDesc> op1 =
-      OperatorFactory.get(getTestFilterDesc("key"), op2);
+    Operator<scriptDesc> op2 = OperatorFactory.get(new scriptDesc("/bin/cat",
+        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
+        TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
+            + Utilities.tabCode, "key,value"), TextRecordReader.class), op3);
 
+    Operator<filterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
+        op2);
 
     Utilities.addMapWork(mr, src, "a", op1);
   }
@@ -222,27 +216,27 @@
   @SuppressWarnings("unchecked")
   private void populateMapRedPlan1(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(1));
-    
+
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
+    }
     // map-side work
-    Operator<reduceSinkDesc> op1 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("key")),
-        Utilities.makeList(getStringColumn("value")), outputColumns, true, -1, 1, -1));
+    Operator<reduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
+            Utilities.makeList(getStringColumn("value")), outputColumns, true,
+            -1, 1, -1));
 
     Utilities.addMapWork(mr, src, "a", op1);
     mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan1.out",
-                                                      Utilities.defaultTd, false));
+    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan1.out", Utilities.defaultTd, false));
 
-    Operator<extractDesc> op2 =  OperatorFactory.get
-      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
+    Operator<extractDesc> op2 = OperatorFactory.get(new extractDesc(
+        getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
 
     mr.setReducer(op2);
   }
@@ -251,29 +245,28 @@
   private void populateMapRedPlan2(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(1));
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
+    }
     // map-side work
-    Operator<reduceSinkDesc> op1 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("key")),
-        Utilities.makeList(getStringColumn("key"),
-                           getStringColumn("value")), outputColumns, false, -1, 1, -1));
+    Operator<reduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
+            Utilities
+                .makeList(getStringColumn("key"), getStringColumn("value")),
+            outputColumns, false, -1, 1, -1));
 
     Utilities.addMapWork(mr, src, "a", op1);
     mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan2.out",
-                                                      Utilities.defaultTd, false));
+    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan2.out", Utilities.defaultTd, false));
 
-    Operator<filterDesc> op3 =
-      OperatorFactory.get(getTestFilterDesc("0"), op4);
+    Operator<filterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
 
-    Operator<extractDesc> op2 =  OperatorFactory.get
-      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
+    Operator<extractDesc> op2 = OperatorFactory.get(new extractDesc(
+        getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
 
     mr.setReducer(op2);
   }
@@ -282,49 +275,42 @@
    * test reduce with multiple tagged inputs
    */
   @SuppressWarnings("unchecked")
-    private void populateMapRedPlan3(Table src, Table src2) {
+  private void populateMapRedPlan3(Table src, Table src2) {
     mr.setNumReduceTasks(Integer.valueOf(5));
     mr.setNeedsTagging(true);
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
+    }
     // map-side work
-    Operator<reduceSinkDesc> op1 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("key")),
-        Utilities.makeList
-        (getStringColumn("value")), outputColumns, true, Byte.valueOf((byte)0), 1, -1));
+    Operator<reduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
+            Utilities.makeList(getStringColumn("value")), outputColumns, true,
+            Byte.valueOf((byte) 0), 1, -1));
 
     Utilities.addMapWork(mr, src, "a", op1);
     mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
-    Operator<reduceSinkDesc> op2 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("key")),
-        Utilities.makeList(getStringColumn("key")),
-        outputColumns, true,
-        Byte.valueOf((byte)1),
-        Integer.MAX_VALUE, -1));
+    Operator<reduceSinkDesc> op2 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
+            Utilities.makeList(getStringColumn("key")), outputColumns, true,
+            Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1));
 
     Utilities.addMapWork(mr, src2, "b", op2);
     mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan3.out",
-                                                      Utilities.defaultTd, false));
-
-    Operator<selectDesc> op5 =  OperatorFactory.get
-      (new selectDesc
-       (Utilities.makeList
-        (getStringColumn(Utilities.ReduceField.ALIAS.toString()),
-         new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
-             new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(
-                 TypeInfoFactory.stringTypeInfo),
-                 Utilities.ReduceField.VALUE.toString(), "", false),
-             "0",
-             false)), outputColumns), op4);
+    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan3.out", Utilities.defaultTd, false));
+
+    Operator<selectDesc> op5 = OperatorFactory.get(new selectDesc(Utilities
+        .makeList(getStringColumn(Utilities.ReduceField.ALIAS.toString()),
+            new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
+                new exprNodeColumnDesc(TypeInfoFactory
+                    .getListTypeInfo(TypeInfoFactory.stringTypeInfo),
+                    Utilities.ReduceField.VALUE.toString(), "", false), "0",
+                false)), outputColumns), op4);
 
     mr.setReducer(op5);
   }
@@ -335,78 +321,70 @@
 
     // map-side work
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
-    Operator<reduceSinkDesc> op1 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("tkey")),
-        Utilities.makeList(getStringColumn("tkey"),
-                           getStringColumn("tvalue")),
-                           outputColumns, false,
-        -1, 1, -1));
+    }
+    Operator<reduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
+            Utilities.makeList(getStringColumn("tkey"),
+                getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
 
-    Operator<scriptDesc> op0 = OperatorFactory.get
-    (new scriptDesc("/bin/cat",
+    Operator<scriptDesc> op0 = OperatorFactory.get(new scriptDesc("/bin/cat",
         PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
-                    TextRecordWriter.class,
-        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-        TextRecordReader.class),
-     op1);
-
-    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
-                                     Utilities.makeList(getStringColumn("key"),
-                                                        getStringColumn("value")), outputColumns), op0);
+        TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
+            + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class), op1);
+
+    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(Utilities
+        .makeList(getStringColumn("key"), getStringColumn("value")),
+        outputColumns), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
     mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan4.out",
-                                                      Utilities.defaultTd, false));
+    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan4.out", Utilities.defaultTd, false));
 
-    Operator<extractDesc> op2 =  OperatorFactory.get
-      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
+    Operator<extractDesc> op2 = OperatorFactory.get(new extractDesc(
+        getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
 
     mr.setReducer(op2);
   }
 
   public static exprNodeColumnDesc getStringColumn(String columnName) {
-    return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName, "", false);
+    return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName,
+        "", false);
   }
-  
+
   @SuppressWarnings("unchecked")
   private void populateMapRedPlan5(Table src) {
     mr.setNumReduceTasks(Integer.valueOf(1));
 
     // map-side work
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
-    Operator<reduceSinkDesc> op0 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc
-       (Utilities.makeList(getStringColumn("0")),
-        Utilities.makeList(getStringColumn("0"),
-                           getStringColumn("1")),
-                           outputColumns, false,
-        -1, 1, -1));
-
-    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
-                                     Utilities.makeList(getStringColumn("key"),
-                                                        getStringColumn("value")), outputColumns), op0);
+    }
+    Operator<reduceSinkDesc> op0 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("0")), Utilities
+            .makeList(getStringColumn("0"), getStringColumn("1")),
+            outputColumns, false, -1, 1, -1));
+
+    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(Utilities
+        .makeList(getStringColumn("key"), getStringColumn("value")),
+        outputColumns), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
     mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan5.out",
-                                                      Utilities.defaultTd, false));
+    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan5.out", Utilities.defaultTd, false));
 
-    Operator<extractDesc> op2 =  OperatorFactory.get
-      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
+    Operator<extractDesc> op2 = OperatorFactory.get(new extractDesc(
+        getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
 
     mr.setReducer(op2);
   }
@@ -417,49 +395,43 @@
 
     // map-side work
     ArrayList<String> outputColumns = new ArrayList<String>();
-    for (int i = 0; i < 2; i++)
+    for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
-    Operator<reduceSinkDesc> op1 = OperatorFactory.get
-      (PlanUtils.getReduceSinkDesc(
-        Utilities.makeList(getStringColumn("tkey")),
-        Utilities.makeList(getStringColumn("tkey"),
-                           getStringColumn("tvalue")),
-                           outputColumns, false,
-        -1, 1, -1));
-
-    Operator<scriptDesc> op0 = OperatorFactory.get
-      (new scriptDesc("\'/bin/cat\'",
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-                      TextRecordWriter.class,
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-          TextRecordReader.class),
-       op1);
-
-    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
-                                     Utilities.makeList(getStringColumn("key"),
-                                                        getStringColumn("value")), outputColumns), op0);
+    }
+    Operator<reduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+        .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
+            Utilities.makeList(getStringColumn("tkey"),
+                getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
+
+    Operator<scriptDesc> op0 = OperatorFactory.get(new scriptDesc(
+        "\'/bin/cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode,
+            "tkey,tvalue"), TextRecordWriter.class, PlanUtils
+            .getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+        TextRecordReader.class), op1);
+
+    Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(Utilities
+        .makeList(getStringColumn("key"), getStringColumn("value")),
+        outputColumns), op0);
 
     Utilities.addMapWork(mr, src, "a", op4);
     mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
     mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
-                                                     (tmpdir + "mapredplan6.out",
-                                                      Utilities.defaultTd, false));
+    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+        + "mapredplan6.out", Utilities.defaultTd, false));
 
-    Operator<filterDesc> op2 =
-      OperatorFactory.get(getTestFilterDesc("0"), op3);
+    Operator<filterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
 
-    Operator<extractDesc> op5 =  OperatorFactory.get
-      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op2);
+    Operator<extractDesc> op5 = OperatorFactory.get(new extractDesc(
+        getStringColumn(Utilities.ReduceField.VALUE.toString())), op2);
 
     mr.setReducer(op5);
   }
 
   private File generatePlanFile() throws Exception {
-    File scratchDir = new File(
-        (new HiveConf(TestExecDriver.class)).getVar(ConfVars.SCRATCHDIR));
+    File scratchDir = new File((new HiveConf(TestExecDriver.class))
+        .getVar(ConfVars.SCRATCHDIR));
     File planFile = File.createTempFile("plan", ".xml", scratchDir);
     System.out.println("Generating plan file " + planFile.toString());
     FileOutputStream out = new FileOutputStream(planFile);
@@ -469,31 +441,34 @@
 
   private void executePlan(File planFile) throws Exception {
     String testName = new Exception().getStackTrace()[1].getMethodName();
-    String cmdLine = conf.getVar(HiveConf.ConfVars.HADOOPBIN) + " jar " + conf.getJar() +
-      " org.apache.hadoop.hive.ql.exec.ExecDriver -plan " +
-      planFile.toString() + " " + ExecDriver.generateCmdLine(conf);
+    String cmdLine = conf.getVar(HiveConf.ConfVars.HADOOPBIN) + " jar "
+        + conf.getJar() + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan "
+        + planFile.toString() + " " + ExecDriver.generateCmdLine(conf);
     System.out.println("Executing: " + cmdLine);
     Process executor = Runtime.getRuntime().exec(cmdLine);
 
-    StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out);
-    StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err);
+    StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(),
+        null, System.out);
+    StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(),
+        null, System.err);
 
     outPrinter.start();
     errPrinter.start();
 
     int exitVal = executor.waitFor();
 
-    if(exitVal != 0) {
-      System.out.println(testName + " execution failed with exit status: " + exitVal);
+    if (exitVal != 0) {
+      System.out.println(testName + " execution failed with exit status: "
+          + exitVal);
       assertEquals(true, false);
     }
     System.out.println(testName + " execution completed successfully");
   }
 
   public void testMapPlan1() throws Exception {
-    
+
     System.out.println("Beginning testMapPlan1");
-    
+
     try {
       populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
       File planFile = generatePlanFile();
@@ -525,7 +500,8 @@
     System.out.println("Beginning testMapRedPlan1");
 
     try {
-      populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+      populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
@@ -540,7 +516,8 @@
     System.out.println("Beginning testMapPlan2");
 
     try {
-      populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+      populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("lt100.sorted.txt", "mapredplan2.out");
@@ -555,8 +532,8 @@
     System.out.println("Beginning testMapPlan3");
 
     try {
-      populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"),
-          db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
+      populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
@@ -566,13 +543,13 @@
     }
   }
 
-
   public void testMapRedPlan4() throws Exception {
 
     System.out.println("Beginning testMapPlan4");
 
     try {
-      populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+      populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
@@ -587,7 +564,8 @@
     System.out.println("Beginning testMapPlan5");
 
     try {
-      populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+      populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
@@ -602,7 +580,8 @@
     System.out.println("Beginning testMapPlan6");
 
     try {
-      populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+      populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+          "src"));
       File planFile = generatePlanFile();
       executePlan(planFile);
       fileDiff("lt100.sorted.txt", "mapredplan6.out");

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Thu Jan 21 10:37:58 2010
@@ -18,29 +18,25 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import java.util.ArrayList;
+
 import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
 
-import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.io.Text;
 
 public class TestExpressionEvaluator extends TestCase {
@@ -56,21 +52,19 @@
   ArrayList<String> names;
   ArrayList<TypeInfo> typeInfos;
   TypeInfo dataType;
-  
+
   public TestExpressionEvaluator() {
-    col1 = new ArrayList<Text> ();
+    col1 = new ArrayList<Text>();
     col1.add(new Text("0"));
     col1.add(new Text("1"));
     col1.add(new Text("2"));
     col1.add(new Text("3"));
-    col1Type = TypeInfoFactory.getListTypeInfo(
-        TypeInfoFactory.stringTypeInfo);
-    cola = new ArrayList<Text> ();
+    col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
+    cola = new ArrayList<Text>();
     cola.add(new Text("a"));
     cola.add(new Text("b"));
     cola.add(new Text("c"));
-    colaType = TypeInfoFactory.getListTypeInfo(
-        TypeInfoFactory.stringTypeInfo);
+    colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
     try {
       data = new ArrayList<Object>();
       data.add(col1);
@@ -82,30 +76,34 @@
       typeInfos.add(col1Type);
       typeInfos.add(colaType);
       dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
-      
+
       r = new InspectableObject();
       r.o = data;
-      r.oi = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(dataType);
+      r.oi = TypeInfoUtils
+          .getStandardWritableObjectInspectorFromTypeInfo(dataType);
     } catch (Throwable e) {
       e.printStackTrace();
-      throw new RuntimeException (e);
+      throw new RuntimeException(e);
     }
   }
 
+  @Override
   protected void setUp() {
   }
 
   public void testExprNodeColumnEvaluator() throws Throwable {
     try {
       // get a evaluator for a simple field expression
-      exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola", "", false);
+      exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola", "",
+          false);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
 
       // evaluate on row
       ObjectInspector resultOI = eval.initialize(r.oi);
       Object resultO = eval.evaluate(r.o);
-      
-      Object standardResult = ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.WRITABLE);   
+
+      Object standardResult = ObjectInspectorUtils.copyToStandardObject(
+          resultO, resultOI, ObjectInspectorCopyOption.WRITABLE);
       assertEquals(cola, standardResult);
       System.out.println("ExprNodeColumnEvaluator ok");
     } catch (Throwable e) {
@@ -117,32 +115,35 @@
   private static exprNodeDesc getListIndexNode(exprNodeDesc node, int index) {
     return getListIndexNode(node, new exprNodeConstantDesc(index));
   }
-  
-  private static exprNodeDesc getListIndexNode(exprNodeDesc node, exprNodeDesc index) {
+
+  private static exprNodeDesc getListIndexNode(exprNodeDesc node,
+      exprNodeDesc index) {
     ArrayList<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
     children.add(node);
     children.add(index);
-    return new exprNodeGenericFuncDesc(
-          ((ListTypeInfo)node.getTypeInfo()).getListElementTypeInfo(),
-          FunctionRegistry.getGenericUDFForIndex(),
-          children);
+    return new exprNodeGenericFuncDesc(((ListTypeInfo) node.getTypeInfo())
+        .getListElementTypeInfo(), FunctionRegistry.getGenericUDFForIndex(),
+        children);
   }
-  
+
   public void testExprNodeFuncEvaluator() throws Throwable {
     try {
       // get a evaluator for a string concatenation expression
-      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false);
-      exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola", "", false);
+      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "",
+          false);
+      exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola", "",
+          false);
       exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
       exprNodeDesc cola0desc = getListIndexNode(coladesc, 0);
-      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc);
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("concat", col11desc, cola0desc);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
       ObjectInspector resultOI = eval.initialize(r.oi);
       Object resultO = eval.evaluate(r.o);
-      assertEquals("1a",
-          ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
+      assertEquals("1a", ObjectInspectorUtils.copyToStandardObject(resultO,
+          resultOI, ObjectInspectorCopyOption.JAVA));
       System.out.println("ExprNodeFuncEvaluator ok");
     } catch (Throwable e) {
       e.printStackTrace();
@@ -153,16 +154,19 @@
   public void testExprNodeConversionEvaluator() throws Throwable {
     try {
       // get a evaluator for a string concatenation expression
-      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false);
+      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "",
+          false);
       exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
-      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
       ObjectInspector resultOI = eval.initialize(r.oi);
       Object resultO = eval.evaluate(r.o);
-      assertEquals(Double.valueOf("1"),
-          ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
+      assertEquals(Double.valueOf("1"), ObjectInspectorUtils
+          .copyToStandardObject(resultO, resultOI,
+              ObjectInspectorCopyOption.JAVA));
       System.out.println("testExprNodeConversionEvaluator ok");
     } catch (Throwable e) {
       e.printStackTrace();
@@ -170,119 +174,118 @@
     }
   }
 
-  private static void measureSpeed(String expr, int times, ExprNodeEvaluator eval, InspectableObject input, Object standardJavaOutput) throws HiveException {
+  private static void measureSpeed(String expr, int times,
+      ExprNodeEvaluator eval, InspectableObject input, Object standardJavaOutput)
+      throws HiveException {
     System.out.println("Evaluating " + expr + " for " + times + " times");
-    // evaluate on row
-    InspectableObject output = new InspectableObject(); 
+    new InspectableObject();
     ObjectInspector resultOI = eval.initialize(input.oi);
     Object resultO = null;
     long start = System.currentTimeMillis();
-    for (int i=0; i<times; i++) {
+    for (int i = 0; i < times; i++) {
       resultO = eval.evaluate(input.o);
     }
     long end = System.currentTimeMillis();
-    assertEquals(standardJavaOutput,
-        ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
-    System.out.println("Evaluation finished: " + String.format("%2.3f", (end - start)*0.001) + " seconds, " 
-        + String.format("%2.3f", (end - start)*1000.0/times) + " seconds/million call.");
+    assertEquals(standardJavaOutput, ObjectInspectorUtils.copyToStandardObject(
+        resultO, resultOI, ObjectInspectorCopyOption.JAVA));
+    System.out.println("Evaluation finished: "
+        + String.format("%2.3f", (end - start) * 0.001) + " seconds, "
+        + String.format("%2.3f", (end - start) * 1000.0 / times)
+        + " seconds/million call.");
   }
-  
+
   public void testExprNodeSpeed() throws Throwable {
     try {
       int basetimes = 100000;
-      measureSpeed("1 + 2",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", 
-                  new exprNodeConstantDesc(1), 
-                  new exprNodeConstantDesc(2))),
-          r,
-          Integer.valueOf(1 + 2));
-      measureSpeed("1 + 2 - 3",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", 
-                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
-                      new exprNodeConstantDesc(1), 
-                      new exprNodeConstantDesc(2)),
-                  new exprNodeConstantDesc(3))),
-          r,
-          Integer.valueOf(1 + 2 - 3));
-      measureSpeed("1 + 2 - 3 + 4",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
-                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", 
-                      TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
-                          new exprNodeConstantDesc(1), 
+      measureSpeed("1 + 2", basetimes * 100, ExprNodeEvaluatorFactory
+          .get(TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(
+              "+", new exprNodeConstantDesc(1), new exprNodeConstantDesc(2))),
+          r, Integer.valueOf(1 + 2));
+      measureSpeed("1 + 2 - 3", basetimes * 100, ExprNodeEvaluatorFactory
+          .get(TypeCheckProcFactory.DefaultExprProcessor
+              .getFuncExprNodeDesc("-",
+                  TypeCheckProcFactory.DefaultExprProcessor
+                      .getFuncExprNodeDesc("+", new exprNodeConstantDesc(1),
                           new exprNodeConstantDesc(2)),
-                      new exprNodeConstantDesc(3)),
-                  new exprNodeConstantDesc(4))),                      
-          r,
-          Integer.valueOf(1 + 2 - 3 + 4));
-      measureSpeed("concat(\"1\", \"2\")",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                  new exprNodeConstantDesc("1"), 
-                  new exprNodeConstantDesc("2"))),
-          r,
-          "12");
-      measureSpeed("concat(concat(\"1\", \"2\"), \"3\")",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
-                      new exprNodeConstantDesc("1"), 
-                      new exprNodeConstantDesc("2")),
-                  new exprNodeConstantDesc("3"))),
-          r,
-          "123");
+                  new exprNodeConstantDesc(3))), r, Integer.valueOf(1 + 2 - 3));
+      measureSpeed("1 + 2 - 3 + 4", basetimes * 100, ExprNodeEvaluatorFactory
+          .get(TypeCheckProcFactory.DefaultExprProcessor
+              .getFuncExprNodeDesc("+",
+                  TypeCheckProcFactory.DefaultExprProcessor
+                      .getFuncExprNodeDesc("-",
+                          TypeCheckProcFactory.DefaultExprProcessor
+                              .getFuncExprNodeDesc("+",
+                                  new exprNodeConstantDesc(1),
+                                  new exprNodeConstantDesc(2)),
+                          new exprNodeConstantDesc(3)),
+                  new exprNodeConstantDesc(4))), r, Integer
+          .valueOf(1 + 2 - 3 + 4));
+      measureSpeed("concat(\"1\", \"2\")", basetimes * 100,
+          ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat", new exprNodeConstantDesc("1"),
+                      new exprNodeConstantDesc("2"))), r, "12");
+      measureSpeed("concat(concat(\"1\", \"2\"), \"3\")", basetimes * 100,
+          ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat",
+                      TypeCheckProcFactory.DefaultExprProcessor
+                          .getFuncExprNodeDesc("concat",
+                              new exprNodeConstantDesc("1"),
+                              new exprNodeConstantDesc("2")),
+                      new exprNodeConstantDesc("3"))), r, "123");
       measureSpeed("concat(concat(concat(\"1\", \"2\"), \"3\"), \"4\")",
-          basetimes * 100,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                    TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
-                        new exprNodeConstantDesc("1"), 
-                        new exprNodeConstantDesc("2")),
-                    new exprNodeConstantDesc("3")),
-                new exprNodeConstantDesc("4"))),
-          r,
-          "1234");
+          basetimes * 100, ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat",
+                      TypeCheckProcFactory.DefaultExprProcessor
+                          .getFuncExprNodeDesc("concat",
+                              TypeCheckProcFactory.DefaultExprProcessor
+                                  .getFuncExprNodeDesc("concat",
+                                      new exprNodeConstantDesc("1"),
+                                      new exprNodeConstantDesc("2")),
+                              new exprNodeConstantDesc("3")),
+                      new exprNodeConstantDesc("4"))), r, "1234");
       exprNodeDesc constant1 = new exprNodeConstantDesc(1);
       exprNodeDesc constant2 = new exprNodeConstantDesc(2);
-      measureSpeed("concat(col1[1], cola[1])", 
-          basetimes * 10,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
-                  getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), 
-                  getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1))),
-          r,
-          "1b");
-      measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", 
-          basetimes * 10,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                      getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), 
-                      getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)),
-                  getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2))),
-          r,
-          "1b2");
-      measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])", 
-          basetimes * 10,
-          ExprNodeEvaluatorFactory.get(
-              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                      TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
-                          getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1), 
-                          getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)),
-                      getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2)),
-                  getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant2))),
-          r,
-          "1b2c");
-      
+      measureSpeed("concat(col1[1], cola[1])", basetimes * 10,
+          ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat", getListIndexNode(
+                      new exprNodeColumnDesc(col1Type, "col1", "", false),
+                      constant1), getListIndexNode(new exprNodeColumnDesc(
+                      colaType, "cola", "", false), constant1))), r, "1b");
+      measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", basetimes * 10,
+          ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat",
+                      TypeCheckProcFactory.DefaultExprProcessor
+                          .getFuncExprNodeDesc("concat", getListIndexNode(
+                              new exprNodeColumnDesc(col1Type, "col1", "",
+                                  false), constant1), getListIndexNode(
+                              new exprNodeColumnDesc(colaType, "cola", "",
+                                  false), constant1)), getListIndexNode(
+                          new exprNodeColumnDesc(col1Type, "col1", "", false),
+                          constant2))), r, "1b2");
+      measureSpeed(
+          "concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
+          basetimes * 10, ExprNodeEvaluatorFactory
+              .get(TypeCheckProcFactory.DefaultExprProcessor
+                  .getFuncExprNodeDesc("concat",
+                      TypeCheckProcFactory.DefaultExprProcessor
+                          .getFuncExprNodeDesc("concat",
+                              TypeCheckProcFactory.DefaultExprProcessor
+                                  .getFuncExprNodeDesc("concat",
+                                      getListIndexNode(new exprNodeColumnDesc(
+                                          col1Type, "col1", "", false),
+                                          constant1), getListIndexNode(
+                                          new exprNodeColumnDesc(colaType,
+                                              "cola", "", false), constant1)),
+                              getListIndexNode(new exprNodeColumnDesc(col1Type,
+                                  "col1", "", false), constant2)),
+                      getListIndexNode(new exprNodeColumnDesc(colaType, "cola",
+                          "", false), constant2))), r, "1b2c");
+
     } catch (Throwable e) {
       e.printStackTrace();
       throw e;

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestHashMapWrapper.java Thu Jan 21 10:37:58 2010
@@ -18,12 +18,13 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import junit.framework.TestCase;
 import java.util.HashMap;
 import java.util.Random;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import junit.framework.TestCase;
+
 import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 public class TestHashMapWrapper extends TestCase {
 
@@ -34,39 +35,39 @@
     mem_map.put("k2", "v2");
     mem_map.put("k3", "v3");
     mem_map.put("k4", "v4");
-    
+
     try {
       // NO cache
-      HashMapWrapper<String, String> wrapper = 
-     	  new HashMapWrapper<String, String>(0);
+      HashMapWrapper<String, String> wrapper = new HashMapWrapper<String, String>(
+          0);
       insertAll(wrapper, mem_map);
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // cache size = 1
       wrapper = new HashMapWrapper<String, String>(1);
       insertAll(wrapper, mem_map);
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // cache size = 2
       wrapper = new HashMapWrapper<String, String>(2);
       insertAll(wrapper, mem_map);
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // cache size = 4
       wrapper = new HashMapWrapper<String, String>(4);
       insertAll(wrapper, mem_map);
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // default cache size (25000)
       wrapper = new HashMapWrapper<String, String>();
       insertAll(wrapper, mem_map);
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // check mixed put/remove/get functions
       wrapper = new HashMapWrapper<String, String>(2);
       insertAll(wrapper, mem_map);
@@ -74,35 +75,35 @@
       mem_map.remove("k3");
       assertTrue(mem_map.size() == 3);
       checkAll(wrapper, mem_map);
-      
+
       wrapper.remove("k1");
       mem_map.remove("k1");
       checkAll(wrapper, mem_map);
-      
+
       String v4 = wrapper.get("k4");
       assertTrue(v4 != null);
-      assert(v4.equals("v4"));
-      
+      assert (v4.equals("v4"));
+
       wrapper.remove("k4");
       mem_map.remove("k4");
       checkAll(wrapper, mem_map);
-      
-      wrapper.put("k5", "v5"); 
-      mem_map.put("k5", "v5"); 
-      checkAll(wrapper, mem_map);
-      
-      wrapper.put("k6", "v6"); 
-      mem_map.put("k6", "v6"); 
-      checkAll(wrapper, mem_map);
-      
-      wrapper.put("k6", "v61"); 
-      mem_map.put("k6", "v61"); 
+
+      wrapper.put("k5", "v5");
+      mem_map.put("k5", "v5");
+      checkAll(wrapper, mem_map);
+
+      wrapper.put("k6", "v6");
+      mem_map.put("k6", "v6");
+      checkAll(wrapper, mem_map);
+
+      wrapper.put("k6", "v61");
+      mem_map.put("k6", "v61");
       checkAll(wrapper, mem_map);
-      
+
       wrapper.remove("k6");
       mem_map.remove("k6");
       checkAll(wrapper, mem_map);
-      
+
       // get k1, k2 to main memory
       wrapper.get("k1");
       wrapper.get("k2");
@@ -113,16 +114,16 @@
       wrapper.put("k6", "v7");
       mem_map.put("k6", "v7");
       checkAll(wrapper, mem_map);
-      
+
       // test clear
       wrapper.clear();
       mem_map.clear();
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
-      
+      wrapper.close(); // clean up temporary files
+
       // insert 3,000 pairs random testing
       wrapper = new HashMapWrapper<String, String>(1000);
-      for ( int i = 0; i < 3000; ++i ) {
+      for (int i = 0; i < 3000; ++i) {
         String k = "k" + i;
         String v = "v" + i;
         wrapper.put(k, v);
@@ -130,14 +131,14 @@
       }
       checkAll(wrapper, mem_map);
       System.out.println("Finished inserting 3000 pairs.");
-      
+
       // do 10,000 random get/remove operations
       Random rand = new Random(12345678);
-      for ( int i = 0; i < 10000; ++i ) {
+      for (int i = 0; i < 10000; ++i) {
         int j = rand.nextInt(3000);
         String k = "k" + j;
         String v;
-        
+
         int command = rand.nextInt(3);
         switch (command) {
         case 0: // remove
@@ -147,14 +148,16 @@
           break;
         case 1: // get
           // System.out.println("getting " + k);// uncomment this for debugging
-          v =  wrapper.get(k);
+          v = wrapper.get(k);
           String v2 = mem_map.get(k);
-          assertTrue("one of them doesn't exists or different values from two hash tables", 
-                     v == null && v2 == null || v.equals(v2));
+          assertTrue(
+              "one of them doesn't exists or different values from two hash tables",
+              v == null && v2 == null || v.equals(v2));
           break;
         case 2: // put
           v = "v" + rand.nextInt(3000);
-          // System.out.println("putting (" + k + ", " + v);// uncomment this for debugging
+          // System.out.println("putting (" + k + ", " + v);// uncomment this
+          // for debugging
           wrapper.put(k, v);
           mem_map.put(k, v);
           break;
@@ -162,7 +165,7 @@
         // checkAll(wrapper, mem_map); // uncomment this for debugging
       }
       checkAll(wrapper, mem_map);
-      wrapper.close();  // clean up temporary files
+      wrapper.close(); // clean up temporary files
     } catch (Exception e) {
       e.printStackTrace();
       System.out.println(e.toString());
@@ -170,39 +173,39 @@
     }
     System.out.println("TestHashMapWrapper successful");
   }
-  
-  private void insertAll(HashMapWrapper<String, String> hashTable, 
-                         HashMap<String, String> map) 
-    throws HiveException {
-    
-    for (String k: map.keySet()) {
+
+  private void insertAll(HashMapWrapper<String, String> hashTable,
+      HashMap<String, String> map) throws HiveException {
+
+    for (String k : map.keySet()) {
       String v = map.get(k);
       hashTable.put(k, v);
     }
   }
-  
-  private void checkAll(HashMapWrapper<String, String> hashTable, 
-                        HashMap<String, String> map) 
-    throws HiveException {
-    
+
+  private void checkAll(HashMapWrapper<String, String> hashTable,
+      HashMap<String, String> map) throws HiveException {
+
     // check each item in the HashMapWrapper was actually inserted
-    for ( String k: hashTable.keySet() ) {
+    for (String k : hashTable.keySet()) {
       String map_val = hashTable.get(k);
       String val = map.get(k);
-      assertTrue("some HashMapWrapper value is not in main memory HashMap: map_val = " + map_val + "; val = " + val, 
-                 map_val != null && val != null);
-      assertTrue("value in HashMapWrapper is not the same as MM HashMap: map_val = " + map_val + "; val = " + val, 
-                 val.equals(map_val));
+      assertTrue(
+          "some HashMapWrapper value is not in main memory HashMap: map_val = "
+              + map_val + "; val = " + val, map_val != null && val != null);
+      assertTrue(
+          "value in HashMapWrapper is not the same as MM HashMap: map_val = "
+              + map_val + "; val = " + val, val.equals(map_val));
     }
-    
+
     // check all inserted elements are in HashMapWrapper
-    for ( String k: map.keySet() ) {
+    for (String k : map.keySet()) {
       String map_val = hashTable.get(k);
       String val = map.get(k);
-      assertTrue("Some MM HashMap key is not in HashMapWrapper: map_val = " + map_val + "; val = " + val, 
-                 map_val != null && val != null);
-      assertTrue("Value in MM HashMap is not in HashMapWrapper: map_val = " + map_val + "; val = " + val, 
-                 val.equals(map_val));
+      assertTrue("Some MM HashMap key is not in HashMapWrapper: map_val = "
+          + map_val + "; val = " + val, map_val != null && val != null);
+      assertTrue("Value in MM HashMap is not in HashMapWrapper: map_val = "
+          + map_val + "; val = " + val, val.equals(map_val));
     }
   }
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Thu Jan 21 10:37:58 2010
@@ -46,35 +46,40 @@
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 
 public class TestOperators extends TestCase {
 
   // this is our row to test expressions on
-  protected InspectableObject [] r;
+  protected InspectableObject[] r;
 
+  @Override
   protected void setUp() {
-    r = new InspectableObject [5];
+    r = new InspectableObject[5];
     ArrayList<String> names = new ArrayList<String>(3);
     names.add("col0");
     names.add("col1");
     names.add("col2");
-    ArrayList<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>(3);
-    objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
-    objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
-    objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
-    for(int i=0; i<5; i++) {
-      ArrayList<String> data = new ArrayList<String> ();
-      data.add(""+i);
-      data.add(""+(i+1));
-      data.add(""+(i+2));
+    ArrayList<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>(
+        3);
+    objectInspectors
+        .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+    objectInspectors
+        .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+    objectInspectors
+        .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
+    for (int i = 0; i < 5; i++) {
+      ArrayList<String> data = new ArrayList<String>();
+      data.add("" + i);
+      data.add("" + (i + 1));
+      data.add("" + (i + 2));
       try {
         r[i] = new InspectableObject();
         r[i].o = data;
-        r[i].oi = ObjectInspectorFactory.getStandardStructObjectInspector(names, objectInspectors);
+        r[i].oi = ObjectInspectorFactory.getStandardStructObjectInspector(
+            names, objectInspectors);
       } catch (Throwable e) {
-        throw new RuntimeException (e);
+        throw new RuntimeException(e);
       }
     }
   }
@@ -86,10 +91,13 @@
       exprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
       exprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
       exprNodeDesc zero = new exprNodeConstantDesc("0");
-      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
-      exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
-      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("and", func1, func2);
-      assert(func3 != null);
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc(">", col2, col1);
+      exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("==", col0, zero);
+      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("and", func1, func2);
+      assert (func3 != null);
       filterDesc filterCtx = new filterDesc(func3, false);
 
       // Configuration
@@ -97,23 +105,26 @@
       op.setConf(filterCtx);
 
       // runtime initialization
-      op.initialize(new JobConf(TestOperators.class), new ObjectInspector[]{r[0].oi});
+      op.initialize(new JobConf(TestOperators.class),
+          new ObjectInspector[] { r[0].oi });
 
-      for(InspectableObject oner: r) {
+      for (InspectableObject oner : r) {
         op.process(oner.o, 0);
       }
 
       Map<Enum<?>, Long> results = op.getStats();
-      System.out.println("filtered = " + results.get(FilterOperator.Counter.FILTERED));
-      assertEquals(Long.valueOf(4), results.get(FilterOperator.Counter.FILTERED));
-      System.out.println("passed = " + results.get(FilterOperator.Counter.PASSED));
+      System.out.println("filtered = "
+          + results.get(FilterOperator.Counter.FILTERED));
+      assertEquals(Long.valueOf(4), results
+          .get(FilterOperator.Counter.FILTERED));
+      System.out.println("passed = "
+          + results.get(FilterOperator.Counter.PASSED));
       assertEquals(Long.valueOf(1), results.get(FilterOperator.Counter.PASSED));
 
       /*
-      for(Enum e: results.keySet()) {
-        System.out.println(e.toString() + ":" + results.get(e));
-      }
-      */
+       * for(Enum e: results.keySet()) { System.out.println(e.toString() + ":" +
+       * results.get(e)); }
+       */
       System.out.println("Filter Operator ok");
 
     } catch (Throwable e) {
@@ -131,28 +142,33 @@
       // col2
       exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
       exprNodeDesc expr2 = new exprNodeConstantDesc("1");
-      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
+      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("concat", expr1, expr2);
 
       // select operator to project these two columns
-      ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
+      ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc>();
       earr.add(exprDesc1);
       earr.add(exprDesc2);
       ArrayList<String> outputCols = new ArrayList<String>();
-      for (int i = 0; i < earr.size(); i++)
-        outputCols.add("_col"+i);
+      for (int i = 0; i < earr.size(); i++) {
+        outputCols.add("_col" + i);
+      }
       selectDesc selectCtx = new selectDesc(earr, outputCols);
       Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
       op.setConf(selectCtx);
 
       // fileSinkOperator to dump the output of the select
-      //fileSinkDesc fsd = new fileSinkDesc ("file:///tmp" + File.separator + System.getProperty("user.name") + File.separator + "TestFileSinkOperator",
-      //                                     Utilities.defaultTd, false);
-      //Operator<fileSinkDesc> flop = OperatorFactory.getAndMakeChild(fsd, op);
+      // fileSinkDesc fsd = new fileSinkDesc ("file:///tmp" + File.separator +
+      // System.getProperty("user.name") + File.separator +
+      // "TestFileSinkOperator",
+      // Utilities.defaultTd, false);
+      // Operator<fileSinkDesc> flop = OperatorFactory.getAndMakeChild(fsd, op);
 
-      op.initialize(new JobConf(TestOperators.class), new ObjectInspector[]{r[0].oi});
+      op.initialize(new JobConf(TestOperators.class),
+          new ObjectInspector[] { r[0].oi });
 
       // evaluate on row
-      for(int i=0; i<5; i++) {
+      for (int i = 0; i < 5; i++) {
         op.process(r[i].o, 0);
       }
       op.close(false);
@@ -165,7 +181,6 @@
     }
   }
 
-
   public void testScriptOperator() throws Throwable {
     try {
       System.out.println("Testing Script Operator");
@@ -175,50 +190,59 @@
       // col2
       exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
       exprNodeDesc expr2 = new exprNodeConstantDesc("1");
-      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
+      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("concat", expr1, expr2);
 
       // select operator to project these two columns
-      ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
+      ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc>();
       earr.add(exprDesc1);
       earr.add(exprDesc2);
       ArrayList<String> outputCols = new ArrayList<String>();
-      for (int i = 0; i < earr.size(); i++)
-        outputCols.add("_col"+i);
+      for (int i = 0; i < earr.size(); i++) {
+        outputCols.add("_col" + i);
+      }
       selectDesc selectCtx = new selectDesc(earr, outputCols);
       Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
       op.setConf(selectCtx);
 
       // scriptOperator to echo the output of the select
-      tableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
-      tableDesc scriptInput  = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
-      scriptDesc sd = new scriptDesc("cat", scriptOutput, TextRecordWriter.class, scriptInput, TextRecordReader.class);
+      tableDesc scriptOutput = PlanUtils.getDefaultTableDesc(""
+          + Utilities.tabCode, "a,b");
+      tableDesc scriptInput = PlanUtils.getDefaultTableDesc(""
+          + Utilities.tabCode, "a,b");
+      scriptDesc sd = new scriptDesc("cat", scriptOutput,
+          TextRecordWriter.class, scriptInput, TextRecordReader.class);
       Operator<scriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
 
       // Collect operator to observe the output of the script
-      collectDesc cd = new collectDesc (Integer.valueOf(10));
-      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
+      collectDesc cd = new collectDesc(Integer.valueOf(10));
+      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
+          cd, sop);
 
-      op.initialize(new JobConf(TestOperators.class), new ObjectInspector[]{r[0].oi});
+      op.initialize(new JobConf(TestOperators.class),
+          new ObjectInspector[] { r[0].oi });
 
       // evaluate on row
-      for(int i=0; i<5; i++) {
+      for (int i = 0; i < 5; i++) {
         op.process(r[i].o, 0);
       }
       op.close(false);
 
       InspectableObject io = new InspectableObject();
-      for(int i=0; i<5; i++) {
+      for (int i = 0; i < 5; i++) {
         cdop.retrieve(io);
         System.out.println("[" + i + "] io.o=" + io.o);
         System.out.println("[" + i + "] io.oi=" + io.oi);
-        StructObjectInspector soi = (StructObjectInspector)io.oi;
-        assert(soi != null);
+        StructObjectInspector soi = (StructObjectInspector) io.oi;
+        assert (soi != null);
         StructField a = soi.getStructFieldRef("a");
         StructField b = soi.getStructFieldRef("b");
-        assertEquals(""+(i+1), ((PrimitiveObjectInspector)a.getFieldObjectInspector())
-            .getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
-        assertEquals((i) + "1", ((PrimitiveObjectInspector)b.getFieldObjectInspector())
-            .getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
+        assertEquals("" + (i + 1), ((PrimitiveObjectInspector) a
+            .getFieldObjectInspector()).getPrimitiveJavaObject(soi
+            .getStructFieldData(io.o, a)));
+        assertEquals((i) + "1", ((PrimitiveObjectInspector) b
+            .getFieldObjectInspector()).getPrimitiveJavaObject(soi
+            .getStructFieldData(io.o, b)));
       }
 
       System.out.println("Script Operator ok");
@@ -234,35 +258,37 @@
       System.out.println("Testing Map Operator");
       // initialize configuration
       Configuration hconf = new JobConf(TestOperators.class);
-      HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME, "hdfs:///testDir/testFile");
+      HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME,
+          "hdfs:///testDir/testFile");
 
       // initialize pathToAliases
-      ArrayList<String> aliases = new ArrayList<String> ();
+      ArrayList<String> aliases = new ArrayList<String>();
       aliases.add("a");
       aliases.add("b");
-      LinkedHashMap<String, ArrayList<String>> pathToAliases = new LinkedHashMap<String, ArrayList<String>> ();
+      LinkedHashMap<String, ArrayList<String>> pathToAliases = new LinkedHashMap<String, ArrayList<String>>();
       pathToAliases.put("/testDir", aliases);
 
       // initialize pathToTableInfo
       // Default: treat the table as a single column "col"
       tableDesc td = Utilities.defaultTd;
       partitionDesc pd = new partitionDesc(td, null);
-      LinkedHashMap<String,org.apache.hadoop.hive.ql.plan.partitionDesc> pathToPartitionInfo = new
-        LinkedHashMap<String,org.apache.hadoop.hive.ql.plan.partitionDesc> ();
+      LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.partitionDesc> pathToPartitionInfo = new LinkedHashMap<String, org.apache.hadoop.hive.ql.plan.partitionDesc>();
       pathToPartitionInfo.put("/testDir", pd);
 
       // initialize aliasToWork
-      collectDesc cd = new collectDesc (Integer.valueOf(1));
-      CollectOperator cdop1 = (CollectOperator) OperatorFactory.get(collectDesc.class);
+      collectDesc cd = new collectDesc(Integer.valueOf(1));
+      CollectOperator cdop1 = (CollectOperator) OperatorFactory
+          .get(collectDesc.class);
       cdop1.setConf(cd);
-      CollectOperator cdop2 = (CollectOperator) OperatorFactory.get(collectDesc.class);
+      CollectOperator cdop2 = (CollectOperator) OperatorFactory
+          .get(collectDesc.class);
       cdop2.setConf(cd);
-      LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork = new LinkedHashMap<String,Operator<? extends Serializable>> ();
+      LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork = new LinkedHashMap<String, Operator<? extends Serializable>>();
       aliasToWork.put("a", cdop1);
       aliasToWork.put("b", cdop2);
 
       // initialize mapredWork
-      mapredWork mrwork = new mapredWork ();
+      mapredWork mrwork = new mapredWork();
       mrwork.setPathToAliases(pathToAliases);
       mrwork.setPathToPartitionInfo(pathToPartitionInfo);
       mrwork.setAliasToWork(aliasToWork);
@@ -274,11 +300,11 @@
       Text tw = new Text();
       InspectableObject io1 = new InspectableObject();
       InspectableObject io2 = new InspectableObject();
-      for(int i=0; i<5; i++) {
-        String answer = "[[" + i + ", " + (i+1) + ", " + (i+2) + "]]";
+      for (int i = 0; i < 5; i++) {
+        String answer = "[[" + i + ", " + (i + 1) + ", " + (i + 2) + "]]";
 
-        tw.set("" + i + "\u0001" + (i+1) + "\u0001"+ (i+2));
-        mo.process((Writable)tw);
+        tw.set("" + i + "\u0001" + (i + 1) + "\u0001" + (i + 2));
+        mo.process(tw);
         cdop1.retrieve(io1);
         cdop2.retrieve(io2);
         System.out.println("io1.o.toString() = " + io1.o.toString());

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Thu Jan 21 10:37:58 2010
@@ -18,21 +18,22 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
+import java.io.ByteArrayOutputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
 
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.TextInputFormat;
+import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.filterDesc;
+import org.apache.hadoop.hive.ql.plan.mapredWork;
+import org.apache.hadoop.hive.ql.plan.partitionDesc;
+import org.apache.hadoop.hive.ql.plan.tableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
-
+import org.apache.hadoop.mapred.JobConf;
 
 public class TestPlan extends TestCase {
 
@@ -43,25 +44,28 @@
 
     try {
       // initialize a complete map reduce configuration
-      exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1, "", false);
-      exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2, "", false);
-      exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2);
+      exprNodeDesc expr1 = new exprNodeColumnDesc(
+          TypeInfoFactory.stringTypeInfo, F1, "", false);
+      exprNodeDesc expr2 = new exprNodeColumnDesc(
+          TypeInfoFactory.stringTypeInfo, F2, "", false);
+      exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor
+          .getFuncExprNodeDesc("==", expr1, expr2);
 
       filterDesc filterCtx = new filterDesc(filterExpr, false);
       Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
       op.setConf(filterCtx);
 
-      ArrayList<String> aliasList = new ArrayList<String> ();
+      ArrayList<String> aliasList = new ArrayList<String>();
       aliasList.add("a");
-      LinkedHashMap<String, ArrayList<String>> pa = new LinkedHashMap<String, ArrayList<String>> ();
+      LinkedHashMap<String, ArrayList<String>> pa = new LinkedHashMap<String, ArrayList<String>>();
       pa.put("/tmp/testfolder", aliasList);
 
       tableDesc tblDesc = Utilities.defaultTd;
       partitionDesc partDesc = new partitionDesc(tblDesc, null);
-      LinkedHashMap<String, partitionDesc> pt = new LinkedHashMap<String, partitionDesc> ();
+      LinkedHashMap<String, partitionDesc> pt = new LinkedHashMap<String, partitionDesc>();
       pt.put("/tmp/testfolder", partDesc);
 
-      LinkedHashMap<String, Operator<? extends Serializable>> ao = new LinkedHashMap<String, Operator<? extends Serializable>> ();
+      LinkedHashMap<String, Operator<? extends Serializable>> ao = new LinkedHashMap<String, Operator<? extends Serializable>>();
       ao.put("a", op);
 
       mapredWork mrwork = new mapredWork();
@@ -70,7 +74,7 @@
       mrwork.setAliasToWork(ao);
 
       // serialize the configuration once ..
-      ByteArrayOutputStream baos = new ByteArrayOutputStream ();
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
       Utilities.serializeMapRedWork(mrwork, baos);
       baos.close();
       String v1 = baos.toString();
@@ -82,8 +86,9 @@
       mapredWork mrwork2 = Utilities.getMapRedWork(job);
       Utilities.clearMapRedWork(job);
 
-      // over here we should have some checks of the deserialized object against the orginal object
-      //System.out.println(v1);
+      // over here we should have some checks of the deserialized object against
+      // the orginal object
+      // System.out.println(v1);
 
       // serialize again
       baos.reset();

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Thu Jan 21 10:37:58 2010
@@ -54,7 +54,8 @@
    * intialize the tables
    */
 
-  protected void setUp(){
+  @Override
+  protected void setUp() {
     try {
       conf = new HiveConf(HiveHistory.class);
 
@@ -102,13 +103,13 @@
       e.printStackTrace();
       throw new RuntimeException("Encountered throwable");
     }
-}
+  }
 
   /**
    * check history file output for this query.als
    */
   public void testSimpleQuery() {
-    LineageInfo lep = new LineageInfo();
+    new LineageInfo();
     try {
 
       // NOTE: It is critical to do this here so that log4j is reinitialized
@@ -145,8 +146,7 @@
         fail("jobInfo Map size not 1");
       }
 
-
-      cmd = (String)jobInfoMap.keySet().toArray()[0];
+      cmd = (String) jobInfoMap.keySet().toArray()[0];
       QueryInfo ji = jobInfoMap.get(cmd);
 
       if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java Thu Jan 21 10:37:58 2010
@@ -19,35 +19,36 @@
 package org.apache.hadoop.hive.ql.hooks;
 
 import java.util.Set;
-import org.apache.hadoop.security.UserGroupInformation;
+
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
- * Implementation of a post execute hook that simply prints out its
- * parameters to standard output.
+ * Implementation of a post execute hook that simply prints out its parameters
+ * to standard output.
  */
 public class PostExecutePrinter implements PostExecute {
 
   @Override
   public void run(SessionState sess, Set<ReadEntity> inputs,
-      Set<WriteEntity> outputs, UserGroupInformation ugi)
-    throws Exception {
+      Set<WriteEntity> outputs, UserGroupInformation ugi) throws Exception {
 
     LogHelper console = SessionState.getConsole();
 
-    if (console == null)
+    if (console == null) {
       return;
+    }
 
     if (sess != null) {
       console.printError("POSTHOOK: query: " + sess.getCmd().trim());
       console.printError("POSTHOOK: type: " + sess.getCommandType());
     }
 
-    for(ReadEntity re: inputs) {
+    for (ReadEntity re : inputs) {
       console.printError("POSTHOOK: Input: " + re.toString());
     }
-    for(WriteEntity we: outputs) {
+    for (WriteEntity we : outputs) {
       console.printError("POSTHOOK: Output: " + we.toString());
     }
   }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java Thu Jan 21 10:37:58 2010
@@ -19,35 +19,36 @@
 package org.apache.hadoop.hive.ql.hooks;
 
 import java.util.Set;
-import org.apache.hadoop.security.UserGroupInformation;
+
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
- * Implementation of a pre execute hook that simply prints out its
- * parameters to standard output.
+ * Implementation of a pre execute hook that simply prints out its parameters to
+ * standard output.
  */
 public class PreExecutePrinter implements PreExecute {
 
   @Override
   public void run(SessionState sess, Set<ReadEntity> inputs,
-      Set<WriteEntity> outputs, UserGroupInformation ugi)
-    throws Exception {
+      Set<WriteEntity> outputs, UserGroupInformation ugi) throws Exception {
 
     LogHelper console = SessionState.getConsole();
 
-    if (console == null)
+    if (console == null) {
       return;
+    }
 
     if (sess != null) {
       console.printError("PREHOOK: query: " + sess.getCmd().trim());
       console.printError("PREHOOK: type: " + sess.getCommandType());
     }
 
-    for(ReadEntity re: inputs) {
+    for (ReadEntity re : inputs) {
       console.printError("PREHOOK: Input: " + re.toString());
     }
-    for(WriteEntity we: outputs) {
+    for (WriteEntity we : outputs) {
       console.printError("PREHOOK: Output: " + we.toString());
     }
   }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/JavaTestObjFlatFileInputFormat.java Thu Jan 21 10:37:58 2010
@@ -26,11 +26,12 @@
 public class JavaTestObjFlatFileInputFormat implements Serializable {
   public String s;
   public int num;
+
   public JavaTestObjFlatFileInputFormat(String s, int num) {
     this.s = s;
     this.num = num;
   }
-  public JavaTestObjFlatFileInputFormat() { 
+
+  public JavaTestObjFlatFileInputFormat() {
   }
 }
-



Mime
View raw message