incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [5/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ s...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorer.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorer.java (original)
+++ incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorer.java Mon Sep 10 23:28:55 2012
@@ -37,562 +37,560 @@ import org.junit.Test;
 
 public class TestHCatStorer extends HCatBaseTest {
 
-  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
+    private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
-  @Test
-  public void testPartColsInData() throws IOException, CommandNeedRetryException{
+    @Test
+    public void testPartColsInData() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+        int LOOP_SIZE = 11;
+        String[] input = new String[LOOP_SIZE];
+        for (int i = 0; i < LOOP_SIZE; i++) {
+            input[i] = i + "\t1";
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('b=1');");
+        server.registerQuery("B = load 'default.junit_unparted' using " + HCatLoader.class.getName() + "();");
+        Iterator<Tuple> itr = server.openIterator("B");
+
+        int i = 0;
+
+        while (itr.hasNext()) {
+            Tuple t = itr.next();
+            Assert.assertEquals(2, t.size());
+            Assert.assertEquals(t.get(0), i);
+            Assert.assertEquals(t.get(1), "1");
+            i++;
+        }
+
+        Assert.assertFalse(itr.hasNext());
+        Assert.assertEquals(11, i);
+    }
+
+    @Test
+    public void testMultiPartColsInData() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table employee");
+        String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
+            " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
+
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
+            "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
+            "111239\tSatya\t01/01/2001\tM\tIN\tKL",
+            "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
+
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+        PigServer pig = new PigServer(ExecType.LOCAL);
+        pig.setBatchOn();
+        pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
+            "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
+        pig.registerQuery("TN = FILTER A BY emp_state == 'TN';");
+        pig.registerQuery("KA = FILTER A BY emp_state == 'KA';");
+        pig.registerQuery("KL = FILTER A BY emp_state == 'KL';");
+        pig.registerQuery("AP = FILTER A BY emp_state == 'AP';");
+        pig.registerQuery("STORE TN INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=TN');");
+        pig.registerQuery("STORE KA INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=KA');");
+        pig.registerQuery("STORE KL INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=KL');");
+        pig.registerQuery("STORE AP INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN,emp_state=AP');");
+        pig.executeBatch();
+        driver.run("select * from employee");
+        ArrayList<String> results = new ArrayList<String>();
+        driver.getResults(results);
+        Assert.assertEquals(4, results.size());
+        Collections.sort(results);
+        Assert.assertEquals(inputData[0], results.get(0));
+        Assert.assertEquals(inputData[1], results.get(1));
+        Assert.assertEquals(inputData[2], results.get(2));
+        Assert.assertEquals(inputData[3], results.get(3));
+        driver.run("drop table employee");
+    }
+
+    @Test
+    public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+        int LOOP_SIZE = 11;
+        String[] input = new String[LOOP_SIZE];
+        for (int i = 0; i < LOOP_SIZE; i++) {
+            input[i] = i + "";
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int);");
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('b=1');");
+        server.registerQuery("B = load 'default.junit_unparted' using " + HCatLoader.class.getName() + "();");
+        Iterator<Tuple> itr = server.openIterator("B");
+
+        int i = 0;
+
+        while (itr.hasNext()) {
+            Tuple t = itr.next();
+            Assert.assertEquals(2, t.size());
+            Assert.assertEquals(t.get(0), i);
+            Assert.assertEquals(t.get(1), "1");
+            i++;
+        }
+
+        Assert.assertFalse(itr.hasNext());
+        Assert.assertEquals(11, i);
+    }
+
+    @Test
+    public void testNoAlias() throws IOException, CommandNeedRetryException {
+        driver.run("drop table junit_parted");
+        String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+        PigServer server = new PigServer(ExecType.LOCAL);
+        boolean errCaught = false;
+        try {
+            server.setBatchOn();
+            server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+            server.registerQuery("B = foreach A generate a+10, b;");
+            server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
+            server.executeBatch();
+        } catch (PigException fe) {
+            PigException pe = LogUtils.getPigException(fe);
+            Assert.assertTrue(pe instanceof FrontendException);
+            Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+            Assert.assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
+            errCaught = true;
+        }
+        Assert.assertTrue(errCaught);
+        errCaught = false;
+        try {
+            server.setBatchOn();
+            server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, B:chararray);");
+            server.registerQuery("B = foreach A generate a, B;");
+            server.registerQuery("store B into 'junit_parted' using " + HCatStorer.class.getName() + "('ds=20100101');");
+            server.executeBatch();
+        } catch (PigException fe) {
+            PigException pe = LogUtils.getPigException(fe);
+            Assert.assertTrue(pe instanceof FrontendException);
+            Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+            Assert.assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
+            errCaught = true;
+        }
+        driver.run("drop table junit_parted");
+        Assert.assertTrue(errCaught);
+    }
+
+    @Test
+    public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+        driver.run("drop table junit_unparted2");
+        createTable = "create table junit_unparted2(a int, b string) stored as RCFILE";
+        retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int LOOP_SIZE = 3;
+        String[] input = new String[LOOP_SIZE * LOOP_SIZE];
+        int k = 0;
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                input[k++] = si + "\t" + j;
+            }
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("B = filter A by a < 2;");
+        server.registerQuery("store B into 'junit_unparted' using " + HCatStorer.class.getName() + "();");
+        server.registerQuery("C = filter A by a >= 2;");
+        server.registerQuery("store C into 'junit_unparted2' using " + HCatStorer.class.getName() + "();");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("select * from junit_unparted2");
+        ArrayList<String> res2 = new ArrayList<String>();
+        driver.getResults(res2);
+
+        res.addAll(res2);
+        driver.run("drop table junit_unparted");
+        driver.run("drop table junit_unparted2");
+
+        Iterator<String> itr = res.iterator();
+        for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
+            Assert.assertEquals(input[i], itr.next());
+        }
+
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int LOOP_SIZE = 3;
+        String[] input = new String[LOOP_SIZE * LOOP_SIZE];
+        int k = 0;
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                input[k++] = si + "\t" + j;
+            }
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('');");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("drop table junit_unparted");
+        Iterator<String> itr = res.iterator();
+        for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
+            Assert.assertEquals(input[i], itr.next());
+        }
+
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int LOOP_SIZE = 3;
+        String[] input = new String[LOOP_SIZE * LOOP_SIZE];
+        int k = 0;
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                input[k++] = si + "\t" + j;
+            }
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into 'junit_unparted' using " + HCatStorer.class.getName() + "();");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("drop table junit_unparted");
+        Iterator<String> itr = res.iterator();
+        for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
+            Assert.assertEquals(input[i], itr.next());
+        }
+
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testEmptyStore() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int LOOP_SIZE = 3;
+        String[] input = new String[LOOP_SIZE * LOOP_SIZE];
+        int k = 0;
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                input[k++] = si + "\t" + j;
+            }
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("B = filter A by a > 100;");
+        server.registerQuery("store B into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','a:int,b:chararray');");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("drop table junit_unparted");
+        Iterator<String> itr = res.iterator();
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testBagNStruct() throws IOException, CommandNeedRetryException {
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(b string,a struct<a1:int>,  arr_of_struct array<string>, " +
+            "arr_of_struct2 array<struct<s1:string,s2:string>>,  arr_of_struct3 array<struct<s3:string>>) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        String[] inputData = new String[]{"zookeeper\t(2)\t{(pig)}\t{(pnuts,hdfs)}\t{(hadoop),(hcat)}",
+            "chubby\t(2)\t{(sawzall)}\t{(bigtable,gfs)}\t{(mapreduce),(hcat)}"};
+
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (b:chararray, a:tuple(a1:int), arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)});");
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','b:chararray, a:tuple(a1:int)," +
+            " arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)}');");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("drop table junit_unparted");
+        Iterator<String> itr = res.iterator();
+        Assert.assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
+        Assert.assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]", itr.next());
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, f binary, g binary) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int i = 0;
+        String[] input = new String[3];
+        input[i++] = "0\t\t\t\t\t\t"; //Empty values except first column
+        input[i++] = "\t" + i * 2.1f + "\t" + i * 1.1d + "\t" + i * 2L + "\t" + "lets hcat" + "\tbinary-data"; //First column empty
+        input[i++] = i + "\t" + i * 2.1f + "\t" + i * 1.1d + "\t" + i * 2L + "\t" + "lets hcat" + "\tbinary-data";
+
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:float, c:double, d:long, e:chararray, f:bytearray);");
+        //null gets stored into column g which is a binary field.
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','a:int, b:float, c:double, d:long, e:chararray,f:bytearray');");
+        server.executeBatch();
+
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+
+        Iterator<String> itr = res.iterator();
+        Assert.assertEquals("0\tNULL\tNULL\tNULL\tNULL\tnull\tnull", itr.next());
+        Assert.assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\tbinary-data\tnull", itr.next());
+        Assert.assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tbinary-data\tnull", itr.next());
+        Assert.assertFalse(itr.hasNext());
+
+        server.registerQuery("B = load 'junit_unparted' using " + HCatLoader.class.getName() + ";");
+        Iterator<Tuple> iter = server.openIterator("B");
+        int count = 0;
+        int num5nulls = 0;
+        while (iter.hasNext()) {
+            Tuple t = iter.next();
+            if (t.get(5) == null) {
+                num5nulls++;
+            } else {
+                Assert.assertTrue(t.get(5) instanceof DataByteArray);
+            }
+            Assert.assertNull(t.get(6));
+            count++;
+        }
+        Assert.assertEquals(3, count);
+        Assert.assertEquals(1, num5nulls);
+        driver.run("drop table junit_unparted");
+    }
+
+    @Test
+    public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_unparted");
+        String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        int LOOP_SIZE = 3;
+        String[] inputData = new String[LOOP_SIZE * LOOP_SIZE];
+        int k = 0;
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                inputData[k++] = si + "\t" + j;
+            }
+        }
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into 'default.junit_unparted' using " + HCatStorer.class.getName() + "('','a:int,b:chararray');");
+        server.executeBatch();
+
+        driver.run("select * from junit_unparted");
+        ArrayList<String> res = new ArrayList<String>();
+        driver.getResults(res);
+        driver.run("drop table junit_unparted");
+        Iterator<String> itr = res.iterator();
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                Assert.assertEquals(si + "\t" + j, itr.next());
+            }
+        }
+        Assert.assertFalse(itr.hasNext());
+
+    }
+
+    @Test
+    public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table if exists employee");
+        String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
+            " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
+
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
+            "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
+            "111239\tSatya\t01/01/2001\tM\tIN\tKL",
+            "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
+
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+        PigServer pig = new PigServer(ExecType.LOCAL);
+        pig.setBatchOn();
+        pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
+            "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
+        pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
+        pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "('emp_country=IN');");
+        pig.executeBatch();
+        driver.run("select * from employee");
+        ArrayList<String> results = new ArrayList<String>();
+        driver.getResults(results);
+        Assert.assertEquals(4, results.size());
+        Collections.sort(results);
+        Assert.assertEquals(inputData[0], results.get(0));
+        Assert.assertEquals(inputData[1], results.get(1));
+        Assert.assertEquals(inputData[2], results.get(2));
+        Assert.assertEquals(inputData[3], results.get(3));
+        driver.run("drop table employee");
+    }
+
+    @Test
+    public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table if exists employee");
+        String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
+            " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
+
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
+            "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
+            "111239\tSatya\t01/01/2001\tM\tIN\tKL",
+            "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
+
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
+        PigServer pig = new PigServer(ExecType.LOCAL);
+        pig.setBatchOn();
+        pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
+            "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
+        pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
+        pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "();");
+        pig.executeBatch();
+        driver.run("select * from employee");
+        ArrayList<String> results = new ArrayList<String>();
+        driver.getResults(results);
+        Assert.assertEquals(4, results.size());
+        Collections.sort(results);
+        Assert.assertEquals(inputData[0], results.get(0));
+        Assert.assertEquals(inputData[1], results.get(1));
+        Assert.assertEquals(inputData[2], results.get(2));
+        Assert.assertEquals(inputData[3], results.get(3));
+        driver.run("drop table employee");
+    }
+
+    @Test
+    public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table if exists employee");
+        String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
+            " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
+
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table.");
+        }
+
+        String[] inputData = {};
+        HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
 
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-    int LOOP_SIZE = 11;
-    String[] input = new String[LOOP_SIZE];
-    for(int i = 0; i < LOOP_SIZE; i++) {
-        input[i] = i + "\t1";
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-    server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('b=1');");
-    server.registerQuery("B = load 'default.junit_unparted' using "+HCatLoader.class.getName()+"();");
-    Iterator<Tuple> itr= server.openIterator("B");
-
-    int i = 0;
-
-    while(itr.hasNext()){
-      Tuple t = itr.next();
-      Assert.assertEquals(2, t.size());
-      Assert.assertEquals(t.get(0), i);
-      Assert.assertEquals(t.get(1), "1");
-      i++;
-    }
-
-    Assert.assertFalse(itr.hasNext());
-    Assert.assertEquals(11, i);
-  }
-
-  @Test
-  public void testMultiPartColsInData() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table employee");
-    String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-        " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
-
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
-                          "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
-                          "111239\tSatya\t01/01/2001\tM\tIN\tKL",
-                          "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
-
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL);
-    pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
-        "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
-    pig.registerQuery("TN = FILTER A BY emp_state == 'TN';");
-    pig.registerQuery("KA = FILTER A BY emp_state == 'KA';");
-    pig.registerQuery("KL = FILTER A BY emp_state == 'KL';");
-    pig.registerQuery("AP = FILTER A BY emp_state == 'AP';");
-    pig.registerQuery("STORE TN INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN,emp_state=TN');");
-    pig.registerQuery("STORE KA INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN,emp_state=KA');");
-    pig.registerQuery("STORE KL INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN,emp_state=KL');");
-    pig.registerQuery("STORE AP INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN,emp_state=AP');");
-    pig.executeBatch();
-    driver.run("select * from employee");
-    ArrayList<String> results = new ArrayList<String>();
-    driver.getResults(results);
-    Assert.assertEquals(4, results.size());
-    Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
-    driver.run("drop table employee");
-  }
-
-  @Test
-  public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-    int LOOP_SIZE = 11;
-    String[] input = new String[LOOP_SIZE];
-    for(int i = 0; i < LOOP_SIZE; i++) {
-        input[i] = i+"";
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int);");
-    server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('b=1');");
-    server.registerQuery("B = load 'default.junit_unparted' using "+HCatLoader.class.getName()+"();");
-    Iterator<Tuple> itr= server.openIterator("B");
-
-    int i = 0;
-
-    while(itr.hasNext()){
-      Tuple t = itr.next();
-      Assert.assertEquals(2, t.size());
-      Assert.assertEquals(t.get(0), i);
-      Assert.assertEquals(t.get(1), "1");
-      i++;
-    }
-
-    Assert.assertFalse(itr.hasNext());
-    Assert.assertEquals(11, i);
-  }
-
-  @Test
-  public void testNoAlias() throws IOException, CommandNeedRetryException{
-    driver.run("drop table junit_parted");
-    String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-    PigServer server = new PigServer(ExecType.LOCAL);
-    boolean errCaught = false;
-    try{
-      server.setBatchOn();
-      server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
-      server.registerQuery("B = foreach A generate a+10, b;");
-      server.registerQuery("store B into 'junit_parted' using "+HCatStorer.class.getName()+"('ds=20100101');");
-      server.executeBatch();
-    }
-    catch(PigException fe){
-      PigException pe = LogUtils.getPigException(fe);
-      Assert.assertTrue(pe instanceof FrontendException);
-      Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
-      Assert.assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
-      errCaught = true;
-    }
-    Assert.assertTrue(errCaught);
-    errCaught = false;
-    try{
-      server.setBatchOn();
-      server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, B:chararray);");
-      server.registerQuery("B = foreach A generate a, B;");
-      server.registerQuery("store B into 'junit_parted' using "+HCatStorer.class.getName()+"('ds=20100101');");
-      server.executeBatch();
-    }
-    catch(PigException fe){
-      PigException pe = LogUtils.getPigException(fe);
-      Assert.assertTrue(pe instanceof FrontendException);
-      Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
-      Assert.assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
-      errCaught = true;
-    }
-    driver.run("drop table junit_parted");
-    Assert.assertTrue(errCaught);
-  }
-
-  @Test
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-    driver.run("drop table junit_unparted2");
-    createTable = "create table junit_unparted2(a int, b string) stored as RCFILE";
-    retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
-    int k = 0;
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        input[k++] = si + "\t"+j;
-      }
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
-    server.registerQuery("B = filter A by a < 2;");
-    server.registerQuery("store B into 'junit_unparted' using "+HCatStorer.class.getName()+"();");
-    server.registerQuery("C = filter A by a >= 2;");
-    server.registerQuery("store C into 'junit_unparted2' using "+HCatStorer.class.getName()+"();");
-    server.executeBatch();
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-    driver.run("select * from junit_unparted2");
-    ArrayList<String> res2 = new ArrayList<String>();
-    driver.getResults(res2);
-
-    res.addAll(res2);
-    driver.run("drop table junit_unparted");
-    driver.run("drop table junit_unparted2");
-
-    Iterator<String> itr = res.iterator();
-    for(int i = 0; i < LOOP_SIZE*LOOP_SIZE; i++) {
-      Assert.assertEquals( input[i] ,itr.next());
-    }
-
-    Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
-    int k = 0;
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        input[k++] = si + "\t"+j;
-      }
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
-    server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('');");
-    server.executeBatch();
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-    driver.run("drop table junit_unparted");
-    Iterator<String> itr = res.iterator();
-    for(int i = 0; i < LOOP_SIZE*LOOP_SIZE; i++) {
-      Assert.assertEquals( input[i] ,itr.next());
-    }
-
-    Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
-    int k = 0;
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        input[k++] = si + "\t"+j;
-      }
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+ INPUT_FILE_NAME +"' as (a:int, b:chararray);");
-    server.registerQuery("store A into 'junit_unparted' using "+HCatStorer.class.getName()+"();");
-    server.executeBatch();
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-    driver.run("drop table junit_unparted");
-    Iterator<String> itr = res.iterator();
-    for(int i = 0; i < LOOP_SIZE*LOOP_SIZE; i++) {
-      Assert.assertEquals( input[i] ,itr.next());
-    }
-
-    Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testEmptyStore() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
-    int k = 0;
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        input[k++] = si + "\t"+j;
-      }
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-    server.registerQuery("B = filter A by a > 100;");
-    server.registerQuery("store B into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int,b:chararray');");
-    server.executeBatch();
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-    driver.run("drop table junit_unparted");
-    Iterator<String> itr = res.iterator();
-    Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testBagNStruct() throws IOException, CommandNeedRetryException{
-  driver.run("drop table junit_unparted");
-  String createTable = "create table junit_unparted(b string,a struct<a1:int>,  arr_of_struct array<string>, " +
-      "arr_of_struct2 array<struct<s1:string,s2:string>>,  arr_of_struct3 array<struct<s3:string>>) stored as RCFILE";
-  int retCode = driver.run(createTable).getResponseCode();
-  if(retCode != 0) {
-    throw new IOException("Failed to create table.");
-  }
-
-  String[] inputData = new String[]{"zookeeper\t(2)\t{(pig)}\t{(pnuts,hdfs)}\t{(hadoop),(hcat)}",
-      "chubby\t(2)\t{(sawzall)}\t{(bigtable,gfs)}\t{(mapreduce),(hcat)}"};
-
-  HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-
-  PigServer server = new PigServer(ExecType.LOCAL);
-  server.setBatchOn();
-  server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (b:chararray, a:tuple(a1:int), arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)});");
-  server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','b:chararray, a:tuple(a1:int)," +
-      " arr_of_struct:bag{mytup:tuple(s1:chararray)}, arr_of_struct2:bag{mytup:tuple(s1:chararray,s2:chararray)}, arr_of_struct3:bag{t3:tuple(s3:chararray)}');");
-  server.executeBatch();
-
-  driver.run("select * from junit_unparted");
-  ArrayList<String> res = new ArrayList<String>();
-  driver.getResults(res);
-  driver.run("drop table junit_unparted");
-  Iterator<String> itr = res.iterator();
-  Assert.assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
-  Assert.assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]",itr.next());
- Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, f binary, g binary) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int i =0;
-    String[] input = new String[3];
-    input[i++]= "0\t\t\t\t\t\t"; //Empty values except first column
-    input[i++]= "\t" + i * 2.1f +"\t"+ i*1.1d + "\t" + i * 2L +"\t"+"lets hcat"+"\tbinary-data"; //First column empty
-    input[i++]= i + "\t" + i * 2.1f +"\t"+ i*1.1d + "\t" + i * 2L +"\t"+"lets hcat"+"\tbinary-data";
-
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:float, c:double, d:long, e:chararray, f:bytearray);");
-    //null gets stored into column g which is a binary field.
-    server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int, b:float, c:double, d:long, e:chararray,f:bytearray');");
-    server.executeBatch();
-
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-
-    Iterator<String> itr = res.iterator();
-    Assert.assertEquals( "0\tNULL\tNULL\tNULL\tNULL\tnull\tnull" ,itr.next());
-    Assert.assertEquals( "NULL\t4.2\t2.2\t4\tlets hcat\tbinary-data\tnull" ,itr.next());
-    Assert.assertEquals( "3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tbinary-data\tnull",itr.next());
-    Assert.assertFalse(itr.hasNext());
-
-    server.registerQuery("B = load 'junit_unparted' using "+HCatLoader.class.getName()+";");
-    Iterator<Tuple> iter = server.openIterator("B");
-    int count = 0;
-    int num5nulls = 0;
-    while(iter.hasNext()){
-        Tuple t = iter.next();
-        if(t.get(5) == null){
-            num5nulls++;
-        }else {
-          Assert.assertTrue(t.get(5) instanceof DataByteArray);
-        }
-      Assert.assertNull(t.get(6));
-        count++;
-    }
-    Assert.assertEquals(3, count);
-    Assert.assertEquals(1, num5nulls);
-    driver.run("drop table junit_unparted");
-  }
-
-  @Test
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as RCFILE";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    int LOOP_SIZE = 3;
-    String[] inputData = new String[LOOP_SIZE*LOOP_SIZE];
-    int k = 0;
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        inputData[k++] = si + "\t"+j;
-      }
-    }
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-    server.registerQuery("store A into 'default.junit_unparted' using "+HCatStorer.class.getName()+"('','a:int,b:chararray');");
-    server.executeBatch();
-
-    driver.run("select * from junit_unparted");
-    ArrayList<String> res = new ArrayList<String>();
-    driver.getResults(res);
-    driver.run("drop table junit_unparted");
-    Iterator<String> itr = res.iterator();
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        Assert.assertEquals( si + "\t"+j,itr.next());
-      }
-    }
-   Assert.assertFalse(itr.hasNext());
-
-  }
-
-  @Test
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table if exists employee");
-    String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-        " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
-
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
-                          "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
-                          "111239\tSatya\t01/01/2001\tM\tIN\tKL",
-                          "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
-
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL);
-    pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
-        "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
-    pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
-    pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"('emp_country=IN');");
-    pig.executeBatch();
-    driver.run("select * from employee");
-    ArrayList<String> results = new ArrayList<String>();
-    driver.getResults(results);
-    Assert.assertEquals(4, results.size());
-    Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
-    driver.run("drop table employee");
-  }
-
-  @Test
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table if exists employee");
-    String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-        " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
-
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table.");
-    }
-
-    String[] inputData = {"111237\tKrishna\t01/01/1990\tM\tIN\tTN",
-                          "111238\tKalpana\t01/01/2000\tF\tIN\tKA",
-                          "111239\tSatya\t01/01/2001\tM\tIN\tKL",
-                          "111240\tKavya\t01/01/2002\tF\tIN\tAP"};
-
-    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-    PigServer pig = new PigServer(ExecType.LOCAL);
-    pig.setBatchOn();
-    pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
-        "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
-    pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
-    pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"();");
-    pig.executeBatch();
-    driver.run("select * from employee");
-    ArrayList<String> results = new ArrayList<String>();
-    driver.getResults(results);
-    Assert.assertEquals(4, results.size());
-    Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
-    driver.run("drop table employee");
-  }
-
-    @Test
-    public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException{
-
-      driver.run("drop table if exists employee");
-      String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-          " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS RCFILE";
-
-      int retCode = driver.run(createTable).getResponseCode();
-      if(retCode != 0) {
-        throw new IOException("Failed to create table.");
-      }
-
-      String[] inputData = {};
-      HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, inputData);
-
-      PigServer pig = new PigServer(ExecType.LOCAL);
-      pig.setBatchOn();
-      pig.registerQuery("A = LOAD '"+INPUT_FILE_NAME+"' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
-          "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
-      pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
-      pig.registerQuery("STORE IN INTO 'employee' USING "+HCatStorer.class.getName()+"();");
-      pig.executeBatch();
-      driver.run("select * from employee");
-      ArrayList<String> results = new ArrayList<String>();
-      driver.getResults(results);
-      Assert.assertEquals(0, results.size());
-      driver.run("drop table employee");
+        PigServer pig = new PigServer(ExecType.LOCAL);
+        pig.setBatchOn();
+        pig.registerQuery("A = LOAD '" + INPUT_FILE_NAME + "' USING PigStorage() AS (emp_id:int,emp_name:chararray,emp_start_date:chararray," +
+            "emp_gender:chararray,emp_country:chararray,emp_state:chararray);");
+        pig.registerQuery("IN = FILTER A BY emp_country == 'IN';");
+        pig.registerQuery("STORE IN INTO 'employee' USING " + HCatStorer.class.getName() + "();");
+        pig.executeBatch();
+        driver.run("select * from employee");
+        ArrayList<String> results = new ArrayList<String>();
+        driver.getResults(results);
+        Assert.assertEquals(0, results.size());
+        driver.run("drop table employee");
     }
 }

Modified: incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java (original)
+++ incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java Mon Sep 10 23:28:55 2012
@@ -37,159 +37,160 @@ import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 
 public class TestHCatStorerMulti extends TestCase {
-  private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
-      "/build/test/data/" + TestHCatStorerMulti.class.getCanonicalName();
-  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
-  private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-
-  private static final String BASIC_TABLE = "junit_unparted_basic";
-  private static final String PARTITIONED_TABLE = "junit_parted_basic";
-  private static Driver driver;
-
-  private static Map<Integer,Pair<Integer,String>> basicInputData;
-
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException{
-    driver.run("drop table "+tablename);
-  }
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException{
-    String createTable;
-    createTable = "create table "+tablename+"("+schema+") ";
-    if ((partitionedBy != null)&&(!partitionedBy.trim().isEmpty())){
-      createTable = createTable + "partitioned by ("+partitionedBy+") ";
-    }
-    createTable = createTable + "stored as RCFILE tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
-    "'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver') ";
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table. ["+createTable+"], return code from hive driver : ["+retCode+"]");
-    }
-  }
-
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException{
-    createTable(tablename,schema,null);
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-    if (driver == null){
-      HiveConf hiveConf = new HiveConf(this.getClass());
-      hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-      hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-      hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-      hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
-      driver = new Driver(hiveConf);
-      SessionState.start(new CliSessionState(hiveConf));
-    }
-
-    cleanup();
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    cleanup();
-  }
-
-  public void testStoreBasicTable() throws Exception {
-
-
-    createTable(BASIC_TABLE,"a int, b string");
-
-    populateBasicFile();
-
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-    server.registerQuery("store A into '"+BASIC_TABLE+"' using org.apache.hcatalog.pig.HCatStorer();");
-
-    server.executeBatch();
-
-    driver.run("select * from "+BASIC_TABLE);
-    ArrayList<String> unpartitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
-    driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
-    assertEquals(basicInputData.size(),unpartitionedTableValuesReadFromHiveDriver.size());
-  }
-
-  public void testStorePartitionedTable() throws Exception {
-    createTable(PARTITIONED_TABLE,"a int, b string","bkt string");
-
-    populateBasicFile();
-
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-
-    server.registerQuery("B2 = filter A by a < 2;");
-    server.registerQuery("store B2 into '"+PARTITIONED_TABLE+"' using org.apache.hcatalog.pig.HCatStorer('bkt=0');");
-    server.registerQuery("C2 = filter A by a >= 2;");
-    server.registerQuery("store C2 into '"+PARTITIONED_TABLE+"' using org.apache.hcatalog.pig.HCatStorer('bkt=1');");
-
-    server.executeBatch();
-
-    driver.run("select * from "+PARTITIONED_TABLE);
-    ArrayList<String> partitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
-    driver.getResults(partitionedTableValuesReadFromHiveDriver);
-    assertEquals(basicInputData.size(),partitionedTableValuesReadFromHiveDriver.size());
-  }
-
-  public void testStoreTableMulti() throws Exception {
-
-
-    createTable(BASIC_TABLE,"a int, b string");
-    createTable(PARTITIONED_TABLE,"a int, b string","bkt string");
-
-    populateBasicFile();
-
-    PigServer server = new PigServer(ExecType.LOCAL);
-    server.setBatchOn();
-    server.registerQuery("A = load '"+INPUT_FILE_NAME+"' as (a:int, b:chararray);");
-    server.registerQuery("store A into '"+BASIC_TABLE+"' using org.apache.hcatalog.pig.HCatStorer();");
-
-    server.registerQuery("B2 = filter A by a < 2;");
-    server.registerQuery("store B2 into '"+PARTITIONED_TABLE+"' using org.apache.hcatalog.pig.HCatStorer('bkt=0');");
-    server.registerQuery("C2 = filter A by a >= 2;");
-    server.registerQuery("store C2 into '"+PARTITIONED_TABLE+"' using org.apache.hcatalog.pig.HCatStorer('bkt=1');");
-
-    server.executeBatch();
-
-    driver.run("select * from "+BASIC_TABLE);
-    ArrayList<String> unpartitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
-    driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
-    driver.run("select * from "+PARTITIONED_TABLE);
-    ArrayList<String> partitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
-    driver.getResults(partitionedTableValuesReadFromHiveDriver);
-    assertEquals(basicInputData.size(),unpartitionedTableValuesReadFromHiveDriver.size());
-    assertEquals(basicInputData.size(),partitionedTableValuesReadFromHiveDriver.size());
-  }
-
-  private void populateBasicFile() throws IOException {
-    int LOOP_SIZE = 3;
-    String[] input = new String[LOOP_SIZE*LOOP_SIZE];
-    basicInputData = new HashMap<Integer,Pair<Integer,String>>();
-    int k = 0;
-    File file = new File(INPUT_FILE_NAME);
-    file.deleteOnExit();
-    FileWriter writer = new FileWriter(file);
-    for(int i = 1; i <= LOOP_SIZE; i++) {
-      String si = i + "";
-      for(int j=1;j<=LOOP_SIZE;j++) {
-        String sj = "S"+j+"S";
-        input[k] = si + "\t" + sj;
-        basicInputData.put(k, new Pair<Integer,String>(i,sj));
-        writer.write(input[k] + "\n");
-        k++;
-      }
-    }
-    writer.close();
-  }
-
-  private void cleanup() throws IOException, CommandNeedRetryException {
-    File f = new File(TEST_WAREHOUSE_DIR);
-    if (f.exists()) {
-      FileUtil.fullyDelete(f);
-    }
-    new File(TEST_WAREHOUSE_DIR).mkdirs();
-
-    dropTable(BASIC_TABLE);
-    dropTable(PARTITIONED_TABLE);
-  }
+    private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
+        "/build/test/data/" + TestHCatStorerMulti.class.getCanonicalName();
+    private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
+    private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
+
+    private static final String BASIC_TABLE = "junit_unparted_basic";
+    private static final String PARTITIONED_TABLE = "junit_parted_basic";
+    private static Driver driver;
+
+    private static Map<Integer, Pair<Integer, String>> basicInputData;
+
+    private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+        driver.run("drop table " + tablename);
+    }
+
+    private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+        String createTable;
+        createTable = "create table " + tablename + "(" + schema + ") ";
+        if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
+            createTable = createTable + "partitioned by (" + partitionedBy + ") ";
+        }
+        createTable = createTable + "stored as RCFILE tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
+            "'hcat.osd'='org.apache.hcatalog.rcfile.RCFileOutputDriver') ";
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + "]");
+        }
+    }
+
+    private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+        createTable(tablename, schema, null);
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+        if (driver == null) {
+            HiveConf hiveConf = new HiveConf(this.getClass());
+            hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+            hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+            hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+            hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+            driver = new Driver(hiveConf);
+            SessionState.start(new CliSessionState(hiveConf));
+        }
+
+        cleanup();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        cleanup();
+    }
+
+    public void testStoreBasicTable() throws Exception {
+
+
+        createTable(BASIC_TABLE, "a int, b string");
+
+        populateBasicFile();
+
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into '" + BASIC_TABLE + "' using org.apache.hcatalog.pig.HCatStorer();");
+
+        server.executeBatch();
+
+        driver.run("select * from " + BASIC_TABLE);
+        ArrayList<String> unpartitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
+        driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
+        assertEquals(basicInputData.size(), unpartitionedTableValuesReadFromHiveDriver.size());
+    }
+
+    public void testStorePartitionedTable() throws Exception {
+        createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
+
+        populateBasicFile();
+
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+
+        server.registerQuery("B2 = filter A by a < 2;");
+        server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hcatalog.pig.HCatStorer('bkt=0');");
+        server.registerQuery("C2 = filter A by a >= 2;");
+        server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hcatalog.pig.HCatStorer('bkt=1');");
+
+        server.executeBatch();
+
+        driver.run("select * from " + PARTITIONED_TABLE);
+        ArrayList<String> partitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
+        driver.getResults(partitionedTableValuesReadFromHiveDriver);
+        assertEquals(basicInputData.size(), partitionedTableValuesReadFromHiveDriver.size());
+    }
+
+    public void testStoreTableMulti() throws Exception {
+
+
+        createTable(BASIC_TABLE, "a int, b string");
+        createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
+
+        populateBasicFile();
+
+        PigServer server = new PigServer(ExecType.LOCAL);
+        server.setBatchOn();
+        server.registerQuery("A = load '" + INPUT_FILE_NAME + "' as (a:int, b:chararray);");
+        server.registerQuery("store A into '" + BASIC_TABLE + "' using org.apache.hcatalog.pig.HCatStorer();");
+
+        server.registerQuery("B2 = filter A by a < 2;");
+        server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hcatalog.pig.HCatStorer('bkt=0');");
+        server.registerQuery("C2 = filter A by a >= 2;");
+        server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hcatalog.pig.HCatStorer('bkt=1');");
+
+        server.executeBatch();
+
+        driver.run("select * from " + BASIC_TABLE);
+        ArrayList<String> unpartitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
+        driver.getResults(unpartitionedTableValuesReadFromHiveDriver);
+        driver.run("select * from " + PARTITIONED_TABLE);
+        ArrayList<String> partitionedTableValuesReadFromHiveDriver = new ArrayList<String>();
+        driver.getResults(partitionedTableValuesReadFromHiveDriver);
+        assertEquals(basicInputData.size(), unpartitionedTableValuesReadFromHiveDriver.size());
+        assertEquals(basicInputData.size(), partitionedTableValuesReadFromHiveDriver.size());
+    }
+
+    private void populateBasicFile() throws IOException {
+        int LOOP_SIZE = 3;
+        String[] input = new String[LOOP_SIZE * LOOP_SIZE];
+        basicInputData = new HashMap<Integer, Pair<Integer, String>>();
+        int k = 0;
+        File file = new File(INPUT_FILE_NAME);
+        file.deleteOnExit();
+        FileWriter writer = new FileWriter(file);
+        for (int i = 1; i <= LOOP_SIZE; i++) {
+            String si = i + "";
+            for (int j = 1; j <= LOOP_SIZE; j++) {
+                String sj = "S" + j + "S";
+                input[k] = si + "\t" + sj;
+                basicInputData.put(k, new Pair<Integer, String>(i, sj));
+                writer.write(input[k] + "\n");
+                k++;
+            }
+        }
+        writer.close();
+    }
+
+    private void cleanup() throws IOException, CommandNeedRetryException {
+        File f = new File(TEST_WAREHOUSE_DIR);
+        if (f.exists()) {
+            FileUtil.fullyDelete(f);
+        }
+        new File(TEST_WAREHOUSE_DIR).mkdirs();
+
+        dropTable(BASIC_TABLE);
+        dropTable(PARTITIONED_TABLE);
+    }
 }

Modified: incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigHCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigHCatUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigHCatUtil.java (original)
+++ incubator/hcatalog/trunk/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestPigHCatUtil.java Mon Sep 10 23:28:55 2012
@@ -31,60 +31,60 @@ import org.junit.Test;
 
 public class TestPigHCatUtil {
 
-  @Test
-  public void testGetBagSubSchema() throws Exception {
+    @Test
+    public void testGetBagSubSchema() throws Exception {
 
-    // Define the expected schema.
-    ResourceFieldSchema[] bagSubFieldSchemas = new ResourceFieldSchema[1];
-    bagSubFieldSchemas[0] = new ResourceFieldSchema().setName("innertuple")
-        .setDescription("The tuple in the bag").setType(DataType.TUPLE);
-
-    ResourceFieldSchema[] innerTupleFieldSchemas = new ResourceFieldSchema[1];
-    innerTupleFieldSchemas[0] =
-        new ResourceFieldSchema().setName("innerfield").setType(DataType.CHARARRAY);
-
-    bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
-    ResourceSchema expected = new ResourceSchema().setFields(bagSubFieldSchemas);
-
-    // Get the actual converted schema.
-    HCatSchema hCatSchema = new HCatSchema(Lists.newArrayList(
-        new HCatFieldSchema("innerLlama", HCatFieldSchema.Type.STRING, null)));
-    HCatFieldSchema hCatFieldSchema =
-        new HCatFieldSchema("llama", HCatFieldSchema.Type.ARRAY, hCatSchema, null);
-    ResourceSchema actual = PigHCatUtil.getBagSubSchema(hCatFieldSchema);
-
-    Assert.assertEquals(expected.toString(), actual.toString());
-  }
-
-  @Test
-  public void testGetBagSubSchemaConfigured() throws Exception {
-
-    // NOTE: pig-0.8 sets client system properties by actually getting the client
-    // system properties. Starting in pig-0.9 you must pass the properties in.
-    // When updating our pig dependency this will need updated.
-    System.setProperty(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME, "t");
-    System.setProperty(HCatConstants.HCAT_PIG_INNER_FIELD_NAME, "FIELDNAME_tuple");
-    UDFContext.getUDFContext().setClientSystemProps();
-
-    // Define the expected schema.
-    ResourceFieldSchema[] bagSubFieldSchemas = new ResourceFieldSchema[1];
-    bagSubFieldSchemas[0] = new ResourceFieldSchema().setName("t")
-        .setDescription("The tuple in the bag").setType(DataType.TUPLE);
-
-    ResourceFieldSchema[] innerTupleFieldSchemas = new ResourceFieldSchema[1];
-    innerTupleFieldSchemas[0] =
-        new ResourceFieldSchema().setName("llama_tuple").setType(DataType.CHARARRAY);
-
-    bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
-    ResourceSchema expected = new ResourceSchema().setFields(bagSubFieldSchemas);
-
-    // Get the actual converted schema.
-    HCatSchema actualHCatSchema = new HCatSchema(Lists.newArrayList(
-        new HCatFieldSchema("innerLlama", HCatFieldSchema.Type.STRING, null)));
-    HCatFieldSchema actualHCatFieldSchema =
-        new HCatFieldSchema("llama", HCatFieldSchema.Type.ARRAY, actualHCatSchema, null);
-    ResourceSchema actual = PigHCatUtil.getBagSubSchema(actualHCatFieldSchema);
+        // Define the expected schema.
+        ResourceFieldSchema[] bagSubFieldSchemas = new ResourceFieldSchema[1];
+        bagSubFieldSchemas[0] = new ResourceFieldSchema().setName("innertuple")
+            .setDescription("The tuple in the bag").setType(DataType.TUPLE);
+
+        ResourceFieldSchema[] innerTupleFieldSchemas = new ResourceFieldSchema[1];
+        innerTupleFieldSchemas[0] =
+            new ResourceFieldSchema().setName("innerfield").setType(DataType.CHARARRAY);
+
+        bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
+        ResourceSchema expected = new ResourceSchema().setFields(bagSubFieldSchemas);
+
+        // Get the actual converted schema.
+        HCatSchema hCatSchema = new HCatSchema(Lists.newArrayList(
+            new HCatFieldSchema("innerLlama", HCatFieldSchema.Type.STRING, null)));
+        HCatFieldSchema hCatFieldSchema =
+            new HCatFieldSchema("llama", HCatFieldSchema.Type.ARRAY, hCatSchema, null);
+        ResourceSchema actual = PigHCatUtil.getBagSubSchema(hCatFieldSchema);
+
+        Assert.assertEquals(expected.toString(), actual.toString());
+    }
+
+    @Test
+    public void testGetBagSubSchemaConfigured() throws Exception {
+
+        // NOTE: pig-0.8 sets client system properties by actually getting the client
+        // system properties. Starting in pig-0.9 you must pass the properties in.
+        // When updating our pig dependency this will need updated.
+        System.setProperty(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME, "t");
+        System.setProperty(HCatConstants.HCAT_PIG_INNER_FIELD_NAME, "FIELDNAME_tuple");
+        UDFContext.getUDFContext().setClientSystemProps();
+
+        // Define the expected schema.
+        ResourceFieldSchema[] bagSubFieldSchemas = new ResourceFieldSchema[1];
+        bagSubFieldSchemas[0] = new ResourceFieldSchema().setName("t")
+            .setDescription("The tuple in the bag").setType(DataType.TUPLE);
+
+        ResourceFieldSchema[] innerTupleFieldSchemas = new ResourceFieldSchema[1];
+        innerTupleFieldSchemas[0] =
+            new ResourceFieldSchema().setName("llama_tuple").setType(DataType.CHARARRAY);
+
+        bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas));
+        ResourceSchema expected = new ResourceSchema().setFields(bagSubFieldSchemas);
+
+        // Get the actual converted schema.
+        HCatSchema actualHCatSchema = new HCatSchema(Lists.newArrayList(
+            new HCatFieldSchema("innerLlama", HCatFieldSchema.Type.STRING, null)));
+        HCatFieldSchema actualHCatFieldSchema =
+            new HCatFieldSchema("llama", HCatFieldSchema.Type.ARRAY, actualHCatSchema, null);
+        ResourceSchema actual = PigHCatUtil.getBagSubSchema(actualHCatFieldSchema);
 
-    Assert.assertEquals(expected.toString(), actual.toString());
-  }
+        Assert.assertEquals(expected.toString(), actual.toString());
+    }
 }

Modified: incubator/hcatalog/trunk/shims/src/20/java/org/apache/hcatalog/shims/HCatHadoopShims20S.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/shims/src/20/java/org/apache/hcatalog/shims/HCatHadoopShims20S.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/shims/src/20/java/org/apache/hcatalog/shims/HCatHadoopShims20S.java (original)
+++ incubator/hcatalog/trunk/shims/src/20/java/org/apache/hcatalog/shims/HCatHadoopShims20S.java Mon Sep 10 23:28:55 2012
@@ -46,9 +46,8 @@ public class HCatHadoopShims20S implemen
         return new TaskAttemptID();
     }
 
-	@Override
-	public TaskAttemptContext createTaskAttemptContext(Configuration conf,
-			TaskAttemptID taskId) {
+    @Override
+    public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) {
         return new TaskAttemptContext(conf, taskId);
     }
 
@@ -133,12 +132,12 @@ public class HCatHadoopShims20S implemen
     @Override
     public String getPropertyName(PropertyName name) {
         switch (name) {
-            case CACHE_ARCHIVES:
-                return DistributedCache.CACHE_ARCHIVES;
-            case CACHE_FILES:
-                return DistributedCache.CACHE_FILES;
-            case CACHE_SYMLINK:
-                return DistributedCache.CACHE_SYMLINK;
+        case CACHE_ARCHIVES:
+            return DistributedCache.CACHE_ARCHIVES;
+        case CACHE_FILES:
+            return DistributedCache.CACHE_FILES;
+        case CACHE_SYMLINK:
+            return DistributedCache.CACHE_SYMLINK;
         }
 
         return "";

Modified: incubator/hcatalog/trunk/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java (original)
+++ incubator/hcatalog/trunk/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java Mon Sep 10 23:28:55 2012
@@ -49,20 +49,20 @@ public class HCatHadoopShims23 implement
 
     @Override
     public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf,
-            org.apache.hadoop.mapreduce.TaskAttemptID taskId) {
+                                                                                   org.apache.hadoop.mapreduce.TaskAttemptID taskId) {
         return new org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl(conf, taskId);
     }
 
     @Override
     public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
-            org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
+                                                                                org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
         org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
         try {
             java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(
-                    org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
-                    Reporter.class);
+                org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
+                Reporter.class);
             construct.setAccessible(true);
-            newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, (Reporter)progressable);
+            newContext = (org.apache.hadoop.mapred.TaskAttemptContext) construct.newInstance(conf, taskId, (Reporter) progressable);
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
@@ -71,7 +71,7 @@ public class HCatHadoopShims23 implement
 
     @Override
     public JobContext createJobContext(Configuration conf,
-            JobID jobId) {
+                                       JobID jobId) {
         JobContext ctxt = new JobContextImpl(conf, jobId);
 
         return ctxt;
@@ -79,15 +79,15 @@ public class HCatHadoopShims23 implement
 
     @Override
     public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
-            org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
-        org.apache.hadoop.mapred.JobContext newContext = 
-            new org.apache.hadoop.mapred.JobContextImpl(conf, jobId, (org.apache.hadoop.mapred.Reporter)progressable);
+                                                                org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
+        org.apache.hadoop.mapred.JobContext newContext =
+            new org.apache.hadoop.mapred.JobContextImpl(conf, jobId, (org.apache.hadoop.mapred.Reporter) progressable);
         return newContext;
     }
 
     @Override
     public void commitJob(OutputFormat outputFormat, ResourceSchema schema,
-            String arg1, Job job) throws IOException {
+                          String arg1, Job job) throws IOException {
         // Do nothing as this was fixed by MAPREDUCE-1447.
     }
 
@@ -106,12 +106,12 @@ public class HCatHadoopShims23 implement
     @Override
     public String getPropertyName(PropertyName name) {
         switch (name) {
-            case CACHE_ARCHIVES:
-                return MRJobConfig.CACHE_ARCHIVES;
-            case CACHE_FILES:
-                return MRJobConfig.CACHE_FILES;
-            case CACHE_SYMLINK:
-                return MRJobConfig.CACHE_SYMLINK;
+        case CACHE_ARCHIVES:
+            return MRJobConfig.CACHE_ARCHIVES;
+        case CACHE_FILES:
+            return MRJobConfig.CACHE_FILES;
+        case CACHE_SYMLINK:
+            return MRJobConfig.CACHE_SYMLINK;
         }
 
         return "";



Mime
View raw message