incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1325183 [3/3] - in /incubator/hcatalog/branches/branch-0.4: ./ bin/ hive/ ivy/ scripts/ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hcatalog/cli/SemanticAnalysis/ src/test/e2e/hcatalog/ src/test/e2e/hcatalog/conf/ src/tes...
Date Thu, 12 Apr 2012 10:08:47 GMT
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorer.java?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorer.java
(original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorer.java
Thu Apr 12 10:08:45 2012
@@ -21,7 +21,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
-import java.util.Properties;
 
 import junit.framework.TestCase;
 
@@ -30,8 +29,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hcatalog.MiniCluster;
-import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.HcatTestUtils;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigException;
 import org.apache.pig.PigServer;
@@ -39,32 +37,28 @@ import org.apache.pig.data.DataByteArray
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.util.LogUtils;
-import org.apache.pig.impl.util.UDFContext;
 
 public class TestHCatStorer extends TestCase {
+  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
+      "/build/test/data/" + TestHCatStorer.class.getCanonicalName();
+  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
+  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
-  MiniCluster cluster = MiniCluster.buildCluster();
   private Driver driver;
-  Properties props;
 
   @Override
   protected void setUp() throws Exception {
-
-    HiveConf hiveConf = new HiveConf(this.getClass());
-    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-    driver = new Driver(hiveConf);
-    SessionState.start(new CliSessionState(hiveConf));
-    props = new Properties();
-    props.setProperty("fs.default.name", cluster.getProperties().getProperty("fs.default.name"));
-    fullFileName = cluster.getProperties().getProperty("fs.default.name") + fileName;
+    if (driver == null) {
+      HiveConf hiveConf = new HiveConf(this.getClass());
+      hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+      hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+      hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+      hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+      driver = new Driver(hiveConf);
+      SessionState.start(new CliSessionState(hiveConf));
+    }
   }
 
-  String fileName = "/tmp/input.data";
-  String fullFileName;
-
-
 //  public void testStoreFuncMap() throws IOException{
 //
 //    driver.run("drop table junit_unparted");
@@ -79,8 +73,7 @@ public class TestHCatStorer extends Test
 //    MiniCluster.deleteFile(cluster, fileName);
 //    MiniCluster.createInputFile(cluster, fileName, new String[]{"test\t{([a#haddop,b#pig])}","data\t{([b#hive,a#hcat])}"});
 //
-//    PigServer server = new PigServer(ExecType.LOCAL, props);
-//    UDFContext.getUDFContext().setClientSystemProps();
+//    PigServer server = new PigServer(ExecType.LOCAL);
 //    server.setBatchOn();
 //    server.registerQuery("A = load '"+ fullFileName +"' as (b:chararray,arr_of_maps:bag{mytup:tuple
( mymap:map[ ])});");
 //    server.registerQuery("store A into 'default.junit_unparted' using org.apache.hadoop.hive.hCatalog.pig.HCatStorer('','b:chararray,arr_of_maps:bag{mytup:tuple
( mymap:map[ ])}');");
@@ -109,16 +102,14 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
     int LOOP_SIZE = 11;
     String[] input = new String[LOOP_SIZE];
     for(int i = 0; i < LOOP_SIZE; i++) {
         input[i] = i + "\t1";
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
-    server.registerQuery("A = load '"+fullFileName+"' as (a:int, b:chararray);");
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
     server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('b=1');");
     server.registerQuery("B = load 'default.junit_unparted' using "+HCatLoader.class.getName()+"();");
     Iterator<Tuple> itr= server.openIterator("B");
@@ -135,7 +126,6 @@ public class TestHCatStorer extends Test
 
     assertFalse(itr.hasNext());
     assertEquals(11, i);
-    MiniCluster.deleteFile(cluster, fileName);
   }
 
   public void testMultiPartColsInData() throws IOException, CommandNeedRetryException{
@@ -149,17 +139,15 @@ public class TestHCatStorer extends Test
       throw new IOException("Failed to create table.");
     }
 
-    MiniCluster.deleteFile(cluster, fullFileName);
     String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
                           "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
                           "111239\tSatya\t01/01/2001\tM\tIN\tKL",
                           "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
 
-    MiniCluster.createInputFile(cluster, fullFileName, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+    PigServer pig = new PigServer(ExecType.LOCAL);
     pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+fullFileName+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
+    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
     		"emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
     pig.registerQuery("TN = FILTER A BY emp_state == 'TN';");
     pig.registerQuery("KA = FILTER A BY emp_state == 'KA';");
@@ -179,7 +167,6 @@ public class TestHCatStorer extends Test
     assertEquals(inputData[1], results.get(1));
     assertEquals(inputData[2], results.get(2));
     assertEquals(inputData[3], results.get(3));
-    MiniCluster.deleteFile(cluster, fullFileName);
     driver.run("drop table employee");
   }
 
@@ -191,16 +178,14 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
     int LOOP_SIZE = 11;
     String[] input = new String[LOOP_SIZE];
     for(int i = 0; i < LOOP_SIZE; i++) {
         input[i] = i+"";
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
-    server.registerQuery("A = load '"+fullFileName+"' as (a:int);");
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int);");
     server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('b=1');");
     server.registerQuery("B = load 'default.junit_unparted' using "+HCatLoader.class.getName()+"();");
     Iterator<Tuple> itr= server.openIterator("B");
@@ -217,7 +202,6 @@ public class TestHCatStorer extends Test
 
     assertFalse(itr.hasNext());
     assertEquals(11, i);
-    MiniCluster.deleteFile(cluster, fileName);
   }
 
   public void testNoAlias() throws IOException, CommandNeedRetryException{
@@ -227,12 +211,11 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    PigServer server = new PigServer(ExecType.LOCAL);
     boolean errCaught = false;
     try{
       server.setBatchOn();
-      server.registerQuery("A = load '"+ fullFileName +"' as (a:int, b:chararray);");
+      server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
       server.registerQuery("B = foreach A generate a+10, b;");
       server.registerQuery("store B into 'junit_parted' using "+HCatStorer.class.getName()+"('ds=20100101');");
       server.executeBatch();
@@ -248,7 +231,7 @@ public class TestHCatStorer extends Test
     errCaught = false;
     try{
       server.setBatchOn();
-      server.registerQuery("A = load '"+ fullFileName +"' as (a:int, B:chararray);");
+      server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, B:chararray);");
       server.registerQuery("B = foreach A generate a, B;");
       server.registerQuery("store B into 'junit_parted' using "+HCatStorer.class.getName()+"('ds=20100101');");
       server.executeBatch();
@@ -279,7 +262,6 @@ public class TestHCatStorer extends Test
       throw new IOException("Failed to create table.");
     }
 
-    MiniCluster.deleteFile(cluster, fileName);
     int LOOP_SIZE = 3;
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     int k = 0;
@@ -289,17 +271,15 @@ public class TestHCatStorer extends Test
         input[k++] = si + "\t"+j;
       }
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+ fullFileName +"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
     server.registerQuery("B = filter A by a < 2;");
     server.registerQuery("store B into 'junit_unparted' using "+HCatStorer.class.getName()+"();");
     server.registerQuery("C = filter A by a >= 2;");
     server.registerQuery("store C into 'junit_unparted2' using "+HCatStorer.class.getName()+"();");
     server.executeBatch();
-    MiniCluster.deleteFile(cluster, fileName);
 
     driver.run("select * from junit_unparted");
     ArrayList<String> res = new ArrayList<String>();
@@ -329,7 +309,7 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
+
     int LOOP_SIZE = 3;
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     int k = 0;
@@ -339,14 +319,12 @@ public class TestHCatStorer extends Test
         input[k++] = si + "\t"+j;
       }
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+ fullFileName +"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
     server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('');");
     server.executeBatch();
-    MiniCluster.deleteFile(cluster, fileName);
 
     driver.run("select * from junit_unparted");
     ArrayList<String> res = new ArrayList<String>();
@@ -369,7 +347,7 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
+
     int LOOP_SIZE = 3;
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     int k = 0;
@@ -379,14 +357,12 @@ public class TestHCatStorer extends Test
         input[k++] = si + "\t"+j;
       }
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+ fullFileName +"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
     server.registerQuery("store A into 'junit_unparted' using "+HCatStorer.class.getName()+"();");
     server.executeBatch();
-    MiniCluster.deleteFile(cluster, fileName);
 
     driver.run("select * from junit_unparted");
     ArrayList<String> res = new ArrayList<String>();
@@ -409,7 +385,7 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
+
     int LOOP_SIZE = 3;
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     int k = 0;
@@ -419,15 +395,13 @@ public class TestHCatStorer extends Test
         input[k++] = si + "\t"+j;
       }
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+fullFileName+"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
     server.registerQuery("B = filter A by a > 100;");
     server.registerQuery("store B into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int,b:chararray');");
     server.executeBatch();
-    MiniCluster.deleteFile(cluster, fileName);
 
     driver.run("select * from junit_unparted");
     ArrayList<String> res = new ArrayList<String>();
@@ -447,22 +421,18 @@ public class TestHCatStorer extends Test
     throw new IOException("Failed to create table.");
   }
 
-  MiniCluster.deleteFile(cluster, fileName);
-  MiniCluster.createInputFile(cluster, fileName, new String[]{"zookeeper\t(2)\t{(pig)}\t{(pnuts,hdfs)}\t{(hadoop),(hcat)}",
-      "chubby\t(2)\t{(sawzall)}\t{(bigtable,gfs)}\t{(mapreduce),(hcat)}"});
+  String[] inputData = new String[]{"zookeeper\t(2)\t{(pig)}\t{(pnuts,hdfs)}\t{(hadoop),(hcat)}",
+      "chubby\t(2)\t{(sawzall)}\t{(bigtable,gfs)}\t{(mapreduce),(hcat)}"};
 
-  PigServer server = new PigServer(ExecType.LOCAL, props);
-  UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+    
+  PigServer server = new PigServer(ExecType.LOCAL);
   server.setBatchOn();
-  server.registerQuery("A = load '"+fullFileName+"' as (b:chararray, a:tuple(a1:int), arr_of_struct:bag{mytup:tuple(s1:chararray)},
arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)});");
+  server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (b:chararray, a:tuple(a1:int),
arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)},
arr_of_struct3:bag{t3:tuple(s3:chararray)});");
   server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','b:chararray,
a:tuple(a1:int)," +
   		" arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)},
arr_of_struct3:bag{t3:tuple(s3:chararray)}');");
   server.executeBatch();
 
-
-
-  MiniCluster.deleteFile(cluster, fileName);
-
   driver.run("select * from junit_unparted");
   ArrayList<String> res = new ArrayList<String>();
   driver.getResults(res);
@@ -482,18 +452,17 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
+
     int LOOP_SIZE = 3;
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     for(int i = 0; i < LOOP_SIZE*LOOP_SIZE; i++) {
       input[i] = i + "\t" + i * 2.1f +"\t"+ i*1.1d + "\t" + i * 2L +"\t"+"lets hcat"+"\tbinary-data";
     }
 
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+fullFileName+"' as (a:int, b:float, c:double, d:long,
e:chararray, f:bytearray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:float, c:double, d:long,
e:chararray, f:bytearray);");
     server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int,
b:float, c:double, d:long, e:chararray,f:bytearray');");
     server.executeBatch();
 
@@ -517,14 +486,12 @@ public class TestHCatStorer extends Test
     	count++;
     }
     assertEquals(LOOP_SIZE  * LOOP_SIZE, count);
-    MiniCluster.deleteFile(cluster, fileName);
     driver.run("drop table junit_unparted");
   }
   
   @Override
   protected void tearDown() throws Exception {
     super.tearDown();
-    MiniCluster.deleteFile(cluster, fileName);
   }
 
 
@@ -538,24 +505,22 @@ public class TestHCatStorer extends Test
     if(retCode != 0) {
       throw new IOException("Failed to create table.");
     }
-    MiniCluster.deleteFile(cluster, fileName);
+
     int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
+    String[] inputData = new String[LOOP_SIZE*LOOP_SIZE];
     int k = 0;
     for(int i = 1; i <= LOOP_SIZE; i++) {
       String si = i + "";
       for(int j=1;j<=LOOP_SIZE;j++) {
-        input[k++] = si + "\t"+j;
+        inputData[k++] = si + "\t"+j;
       }
     }
-    MiniCluster.createInputFile(cluster, fileName, input);
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+fullFileName+"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
     server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int,b:chararray');");
     server.executeBatch();
-    MiniCluster.deleteFile(cluster, fileName);
 
     driver.run("select * from junit_unparted");
     ArrayList<String> res = new ArrayList<String>();
@@ -584,17 +549,15 @@ public class TestHCatStorer extends Test
       throw new IOException("Failed to create table.");
     }
 
-    MiniCluster.deleteFile(cluster, fullFileName);
     String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
                           "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
                           "111239\tSatya\t01/01/2001\tM\tIN\tKL",
                           "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
 
-    MiniCluster.createInputFile(cluster, fullFileName, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+    PigServer pig = new PigServer(ExecType.LOCAL);
     pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+fullFileName+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
+    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
         "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
     pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
     pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN');");
@@ -608,7 +571,6 @@ public class TestHCatStorer extends Test
     assertEquals(inputData[1], results.get(1));
     assertEquals(inputData[2], results.get(2));
     assertEquals(inputData[3], results.get(3));
-    MiniCluster.deleteFile(cluster, fullFileName);
     driver.run("drop table employee");
   }
 
@@ -623,17 +585,15 @@ public class TestHCatStorer extends Test
       throw new IOException("Failed to create table.");
     }
 
-    MiniCluster.deleteFile(cluster, fullFileName);
     String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
                           "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
                           "111239\tSatya\t01/01/2001\tM\tIN\tKL",
                           "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
 
-    MiniCluster.createInputFile(cluster, fullFileName, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+    PigServer pig = new PigServer(ExecType.LOCAL);
     pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+fullFileName+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
+    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
         "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
     pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
     pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"();");
@@ -647,7 +607,6 @@ public class TestHCatStorer extends Test
     assertEquals(inputData[1], results.get(1));
     assertEquals(inputData[2], results.get(2));
     assertEquals(inputData[3], results.get(3));
-    MiniCluster.deleteFile(cluster, fullFileName);
     driver.run("drop table employee");
   }
 
@@ -662,14 +621,12 @@ public class TestHCatStorer extends Test
         throw new IOException("Failed to create table.");
       }
 
-      MiniCluster.deleteFile(cluster, fullFileName);
       String[] inputData = {};
+      HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
 
-      MiniCluster.createInputFile(cluster, fullFileName, inputData);
-      PigServer pig = new PigServer(ExecType.LOCAL, props);
-      UDFContext.getUDFContext().setClientSystemProps();
+      PigServer pig = new PigServer(ExecType.LOCAL);
       pig.setBatchOn();
-      pig.registerQuery("A = LOAD '"+fullFileName+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
+      pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray,"
+
           "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
       pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
       pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"();");
@@ -678,9 +635,6 @@ public class TestHCatStorer extends Test
       ArrayList<String> results = new ArrayList<String>();
       driver.getResults(results);
       assertEquals(0, results.size());
-      MiniCluster.deleteFile(cluster, fullFileName);
       driver.run("drop table employee");
     }
-
-
 }

Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java
(original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java
Thu Apr 12 10:08:45 2012
@@ -17,36 +17,35 @@
  */
 package org.apache.hcatalog.pig;
 
+import java.io.File;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Properties;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hcatalog.MiniCluster;
 import org.apache.hcatalog.data.Pair;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
-import org.apache.pig.impl.util.UDFContext;
 
 public class TestHCatStorerMulti extends TestCase {
+  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
+      "/build/test/data/" + TestHCatStorerMulti.class.getCanonicalName();
+  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
+  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
   private static final String BASIC_TABLE = "junit_unparted_basic";
   private static final String PARTITIONED_TABLE = "junit_parted_basic";
-  private static MiniCluster cluster = MiniCluster.buildCluster();
   private static Driver driver;
 
-  private static final String basicFile = "/tmp/basic.input.data";
-  private static String basicFileFullName;
-  private static Properties props;
-
   private static Map<Integer,Pair<Integer,String>> basicInputData;
 
   private void dropTable(String tablename) throws IOException, CommandNeedRetryException{
@@ -77,14 +76,11 @@ public class TestHCatStorerMulti extends
       hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
       hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
       hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+      hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
       driver = new Driver(hiveConf);
       SessionState.start(new CliSessionState(hiveConf));
     }
 
-    props = new Properties();
-    props.setProperty("fs.default.name", cluster.getProperties().getProperty("fs.default.name"));
-    basicFileFullName = cluster.getProperties().getProperty("fs.default.name") + basicFile;
-
     cleanup();
   }
 
@@ -100,10 +96,9 @@ public class TestHCatStorerMulti extends
 
     populateBasicFile();
 
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+basicFileFullName+"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
     server.registerQuery("store A into '"+BASIC_TABLE+"' using org.apache.hcatalog.pig.HCatStorer();");
 
     server.executeBatch();
@@ -119,10 +114,9 @@ public class TestHCatStorerMulti extends
 
     populateBasicFile();
 
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+basicFileFullName+"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
 
     server.registerQuery("B2 = filter A by a < 2;");
     server.registerQuery("store B2 into '"+PARTITIONED_TABLE+"' using org.apache.hcatalog.pig.HCatStorer('bkt=0');");
@@ -145,10 +139,9 @@ public class TestHCatStorerMulti extends
 
     populateBasicFile();
 
-    PigServer server = new PigServer(ExecType.LOCAL, props);
-    UDFContext.getUDFContext().setClientSystemProps();
+    PigServer server = new PigServer(ExecType.LOCAL);
     server.setBatchOn();
-    server.registerQuery("A = load '"+basicFileFullName+"' as (a:int, b:chararray);");
+    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
     server.registerQuery("store A into '"+BASIC_TABLE+"' using org.apache.hcatalog.pig.HCatStorer();");
 
     server.registerQuery("B2 = filter A by a < 2;");
@@ -173,20 +166,29 @@ public class TestHCatStorerMulti extends
     String[] input = new String[LOOP_SIZE*LOOP_SIZE];
     basicInputData = new HashMap<Integer,Pair<Integer,String>>();
     int k = 0;
+    File file = new File(INPUT_FILE_NAME);
+    file.deleteOnExit();
+    FileWriter writer = new FileWriter(file);
     for(int i = 1; i <= LOOP_SIZE; i++) {
       String si = i + "";
       for(int j=1;j<=LOOP_SIZE;j++) {
         String sj = "S"+j+"S";
         input[k] = si + "\t" + sj;
         basicInputData.put(k, new Pair<Integer,String>(i,sj));
+        writer.write(input[k] + "\n");
         k++;
       }
     }
-    MiniCluster.createInputFile(cluster, basicFile, input);
+    writer.close();
   }
 
   private void cleanup() throws IOException, CommandNeedRetryException {
-    MiniCluster.deleteFile(cluster, basicFile);
+    File f = new File(TEST_WAREHOUSE_DIR);
+    if (f.exists()) {
+      FileUtil.fullyDelete(f);
+    }
+    new File(TEST_WAREHOUSE_DIR).mkdirs();
+
     dropTable(BASIC_TABLE);
     dropTable(PARTITIONED_TABLE);
   }

Modified: incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/build.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/build.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/build.xml Thu Apr 12 10:08:45
2012
@@ -75,7 +75,7 @@
 
     <!-- ivy properteis set here -->
     <property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
-    <property name="ivy.dir" location="ivy" />
+    <property name="ivy.dir" location="../../ivy" />
     <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
     <property name="asfrepo" value="https://repository.apache.org"/>
     <property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
@@ -103,29 +103,14 @@
         <fileset dir="${ivy.lib.dir}" includes="*.jar"/>
         <fileset dir="${hcatalog.dir}/build/hcatalog" includes=" *.jar"/>
         <path refid="common.classpath"/>
-        <fileset dir="${hive.root}/build/hbase-handler" includes="*.jar"/>
     </path>
 
     <path id="test.classpath">
         <pathelement location="${test.build.classes}" />
         <pathelement location="${build.classes}" />
         <pathelement location="conf"/>
-        <pathelement location="${hive.conf.dir}"/>
-        <!-- jars Hadoop depends on -->
-        <fileset dir="${hive.root}/build/hadoopcore/hadoop-${hadoop.version}/" >
-            <include name="**/lib/*.jar" />
-            <include name="hadoop-test-*.jar" />
-        </fileset>
-        <path refid="classpath"/>
-        <pathelement location="${handler.jar}"/>
-        <!-- jars Hive depends on -->
-        <fileset dir="${hive.root}/build/ivy/lib/default/">
-            <include name="**/*.jar" />
-            <exclude name="*hbase*.jar" />
-            <exclude name="zookeeper*.jar" />
-            <exclude name="guava*.jar" />
-            <exclude name="slf4j*.jar" />
-        </fileset>
+        <fileset dir="${ivy.lib.dir}" includes="*.jar"/>
+        <fileset dir="${hcatalog.dir}/build/hcatalog" includes=" *.jar"/>
     </path>
 
     <!--
@@ -184,6 +169,11 @@
         <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
                       pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="common"/>
         <ivy:cachepath pathid="compile.classpath" conf="common"/>
+        <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="default"/>
+        <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+                      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+                      conf="default"/>
+        <ivy:cachepath pathid="compile.classpath" conf="default"/>
     </target>
 
     <target name="ivy-releaseaudit" depends="ivy-init" description="Resolve, Retrieve
Ivy-managed artifacts for releaseaudit configuration">

Modified: incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy.xml Thu Apr 12 10:08:45
2012
@@ -47,10 +47,81 @@
             <artifact name="zookeeper" type="test-jar" ext="jar" m:classifier="tests"/>
         </dependency>
 
-    <!-- hbase test dependencies -->
-    <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="${high-scale-lib.version}"
conf="common->master" />
-    <dependency org="com.google.guava" name="guava" rev="${guava.version}" conf="common->master"
/>
-    <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}" conf="common->master"
/>
+        <dependency org="org.apache.hadoop" name="hadoop-core"
+          rev="${hadoop-core.version}" conf="common->master" />
+        <dependency org="org.apache.hadoop" name="hadoop-test"
+          rev="${hadoop-test.version}" conf="common->master" />
+        <dependency org="org.apache.hive" name="hive-metastore"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-common"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-cli"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-exec"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="junit" name="junit" rev="${junit.version}"
+          conf="common->master"/>
+        <dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"
+          conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging"
+          rev="${commons-logging.version}" conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging-api"
+          rev="${commons-logging.version}" conf="common->master"/>
 
+        <!-- hbase test dependencies -->
+        <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="${high-scale-lib.version}"
conf="common->master" />
+        <dependency org="com.google.guava" name="guava" rev="${guava.version}" conf="common->master"
/>
+        <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}" conf="common->master"
/>
+
+        <!-- needed to run tests -->
+        <dependency org="commons-configuration" name="commons-configuration"
+          rev="${commons-configuration.version}" conf="default"/>
+        <dependency org="org.codehaus.jackson" name="jackson-mapper-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.codehaus.jackson" name="jackson-core-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="log4j" name="log4j" rev="${log4j.version}"
+          conf="common->master"/>
+        <dependency org="javax.jdo" name="jdo2-api" rev="${jdo.version}"
+          conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-core"
+          rev="${datanucleus-core.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-connectionpool"
+          rev="${datanucleus-connectionpool.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-enhancer"
+          rev="${datanucleus-enhancer.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-rdbms"
+          rev="${datanucleus-rdbms.version}" conf="default"/>
+        <dependency org="commons-dbcp" name="commons-dbcp" rev="${commons-dbcp.version}"
+            conf="common->master">
+          <exclude module="commons-pool" />
+          <exclude org="org.apache.geronimo.specs" module="geronimo-jta_1.1_spec"/>
+        </dependency>
+        <dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"
+          conf="default"/>
+        <dependency org="org.apache.derby" name="derby" rev="${derby.version}"
+          conf="default"/>
+        <dependency org="commons-configuration" name="commons-configuration"
+          rev="${commons-configuration.version}" conf="default"/>
+        <dependency org="commons-httpclient" name="commons-httpclient"
+          rev="${commons-httpclient.version}" conf="default"/>
+        <dependency org="org.apache.hive" name="hive-builtins"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.mortbay.jetty" name="jetty"
+          rev="${jetty.version}" conf="default"/>
+        <dependency org="org.mortbay.jetty" name="jetty-util"
+          rev="${jetty.version}" conf="default"/>
+        <dependency org="org.apache.thrift" name="libfb303" rev="${fb303.version}"
+          conf="common->master"/>
+        <dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}"
+          conf="common->master" />
+        <dependency org="org.apache.hive" name="hive-hbase-handler"
+          rev="${hive.version}" conf="common->master"/>
+
+ 
     </dependencies>
 </ivy-module>



Mime
View raw message