incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [23/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/WriteLock.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/WriteLock.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/WriteLock.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/WriteLock.java Mon Sep 10 23:28:55 2012
@@ -21,7 +21,9 @@ package org.apache.hcatalog.hbase.snapsh
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
+
 import static org.apache.zookeeper.CreateMode.EPHEMERAL_SEQUENTIAL;
+
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Stat;
@@ -78,7 +80,7 @@ public class WriteLock extends ProtocolS
      * @param callback the call back instance
      */
     public WriteLock(ZooKeeper zookeeper, String dir, List<ACL> acl,
-            LockListener callback) {
+                     LockListener callback) {
         this(zookeeper, dir, acl);
         this.callback = callback;
     }
@@ -126,15 +128,14 @@ public class WriteLock extends ProtocolS
             } catch (InterruptedException e) {
                 LOG.warn("Caught: " + e, e);
                 //set that we have been interrupted.
-               Thread.currentThread().interrupt();
+                Thread.currentThread().interrupt();
             } catch (KeeperException.NoNodeException e) {
                 // do nothing
             } catch (KeeperException e) {
                 LOG.warn("Caught: " + e, e);
                 throw (RuntimeException) new RuntimeException(e.getMessage()).
                     initCause(e);
-            }
-            finally {
+            } finally {
                 if (callback != null) {
                     callback.lockReleased();
                 }
@@ -152,7 +153,7 @@ public class WriteLock extends ProtocolS
         public void process(WatchedEvent event) {
             // lets either become the leader or watch the new/updated node
             LOG.debug("Watcher fired on path: " + event.getPath() + " state: " +
-                    event.getState() + " type " + event.getType());
+                event.getState() + " type " + event.getType());
             try {
                 lock();
             } catch (Exception e) {
@@ -165,7 +166,7 @@ public class WriteLock extends ProtocolS
      * a zoookeeper operation that is mainly responsible
      * for all the magic required for locking.
      */
-    private  class LockZooKeeperOperation implements ZooKeeperOperation {
+    private class LockZooKeeperOperation implements ZooKeeperOperation {
 
         /** find if we have been created earler if not create our node
          *
@@ -189,7 +190,7 @@ public class WriteLock extends ProtocolS
             }
             if (id == null) {
                 id = zookeeper.create(dir + "/" + prefix, data,
-                        getAcl(), EPHEMERAL_SEQUENTIAL);
+                    getAcl(), EPHEMERAL_SEQUENTIAL);
 
                 if (LOG.isDebugEnabled()) {
                     LOG.debug("Created id: " + id);
@@ -217,7 +218,7 @@ public class WriteLock extends ProtocolS
                     List<String> names = zookeeper.getChildren(dir, false);
                     if (names.isEmpty()) {
                         LOG.warn("No children in: " + dir + " when we've just " +
-                        "created one! Lets recreate it...");
+                            "created one! Lets recreate it...");
                         // lets force the recreation of the id
                         id = null;
                     } else {
@@ -240,7 +241,7 @@ public class WriteLock extends ProtocolS
                                 return Boolean.FALSE;
                             } else {
                                 LOG.warn("Could not find the" +
-                                		" stats for less than me: " + lastChildName.getName());
+                                    " stats for less than me: " + lastChildName.getName());
                             }
                         } else {
                             if (isOwner()) {
@@ -256,7 +257,9 @@ public class WriteLock extends ProtocolS
             while (id == null);
             return Boolean.FALSE;
         }
-    };
+    }
+
+    ;
 
     /**
      * Attempts to acquire the exclusive write lock returning whether or not it was
@@ -293,7 +296,7 @@ public class WriteLock extends ProtocolS
      * @return the id for this lock
      */
     public String getId() {
-       return this.id;
+        return this.id;
     }
 }
 

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZNodeName.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZNodeName.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZNodeName.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/ZNodeName.java Mon Sep 10 23:28:55 2012
@@ -49,7 +49,7 @@ public class ZNodeName implements Compar
             } catch (NumberFormatException e) {
                 LOG.info("Number format exception for " + idx, e);
             } catch (ArrayIndexOutOfBoundsException e) {
-               LOG.info("Array out of bounds for " + idx, e);
+                LOG.info("Array out of bounds for " + idx, e);
             }
         }
     }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java Mon Sep 10 23:28:55 2012
@@ -95,30 +95,30 @@ public class ManyMiniCluster {
         miniZookeeperClusterEnabled = b.miniZookeeperClusterEnabled;
     }
 
-    protected synchronized  void start() {
+    protected synchronized void start() {
         try {
             if (!started) {
                 FileUtil.fullyDelete(workDir);
-                if(miniMRClusterEnabled) {
+                if (miniMRClusterEnabled) {
                     setupMRCluster();
                 }
-                if(miniZookeeperClusterEnabled || miniHBaseClusterEnabled) {
+                if (miniZookeeperClusterEnabled || miniHBaseClusterEnabled) {
                     miniZookeeperClusterEnabled = true;
                     setupZookeeper();
                 }
-                if(miniHBaseClusterEnabled) {
+                if (miniHBaseClusterEnabled) {
                     setupHBaseCluster();
                 }
-                if(miniHiveMetastoreEnabled) {
+                if (miniHiveMetastoreEnabled) {
                     setUpMetastore();
                 }
             }
-        } catch(Exception e) {
-            throw new IllegalStateException("Failed to setup cluster",e);
+        } catch (Exception e) {
+            throw new IllegalStateException("Failed to setup cluster", e);
         }
     }
 
-    protected synchronized  void stop() {
+    protected synchronized void stop() {
         if (hbaseCluster != null) {
             HConnectionManager.deleteAllConnections(true);
             try {
@@ -136,19 +136,19 @@ public class ManyMiniCluster {
             }
             zookeeperCluster = null;
         }
-        if(mrCluster != null) {
+        if (mrCluster != null) {
             try {
                 mrCluster.shutdown();
-            } catch(Exception e) {
+            } catch (Exception e) {
                 e.printStackTrace();
             }
             mrCluster = null;
         }
-        if(dfsCluster != null) {
+        if (dfsCluster != null) {
             try {
                 dfsCluster.getFileSystem().close();
                 dfsCluster.shutdown();
-            } catch(Exception e) {
+            } catch (Exception e) {
                 e.printStackTrace();
             }
             dfsCluster = null;
@@ -189,7 +189,7 @@ public class ManyMiniCluster {
         try {
             return FileSystem.get(jobConf);
         } catch (IOException e) {
-            throw new IllegalStateException("Failed to get FileSystem",e);
+            throw new IllegalStateException("Failed to get FileSystem", e);
         }
     }
 
@@ -205,38 +205,38 @@ public class ManyMiniCluster {
             final int jobTrackerPort = findFreePort();
             final int taskTrackerPort = findFreePort();
 
-            if(jobConf == null)
+            if (jobConf == null)
                 jobConf = new JobConf();
 
             jobConf.setInt("mapred.submit.replication", 1);
             //conf.set("hadoop.job.history.location",new File(workDir).getAbsolutePath()+"/history");
-            System.setProperty("hadoop.log.dir",new File(workDir,"/logs").getAbsolutePath());
+            System.setProperty("hadoop.log.dir", new File(workDir, "/logs").getAbsolutePath());
 
             mrCluster = new MiniMRCluster(jobTrackerPort,
-                                          taskTrackerPort,
-                                          numTaskTrackers,
-                                          getFileSystem().getUri().toString(),
-                                          numTaskTrackers,
-                                          null,
-                                          null,
-                                          null,
-                                          jobConf);
+                taskTrackerPort,
+                numTaskTrackers,
+                getFileSystem().getUri().toString(),
+                numTaskTrackers,
+                null,
+                null,
+                null,
+                jobConf);
 
             jobConf = mrCluster.createJobConf();
         } catch (IOException e) {
-            throw new IllegalStateException("Failed to Setup MR Cluster",e);
+            throw new IllegalStateException("Failed to Setup MR Cluster", e);
         }
     }
 
     private void setupZookeeper() {
         try {
-            zookeeperDir = new File(workDir,"zk").getAbsolutePath();
+            zookeeperDir = new File(workDir, "zk").getAbsolutePath();
             zookeeperPort = findFreePort();
             zookeeperCluster = new MiniZooKeeperCluster();
             zookeeperCluster.setDefaultClientPort(zookeeperPort);
             zookeeperCluster.startup(new File(zookeeperDir));
-        } catch(Exception e) {
-            throw new IllegalStateException("Failed to Setup Zookeeper Cluster",e);
+        } catch (Exception e) {
+            throw new IllegalStateException("Failed to Setup Zookeeper Cluster", e);
         }
     }
 
@@ -244,10 +244,10 @@ public class ManyMiniCluster {
         final int numRegionServers = 1;
 
         try {
-            hbaseDir = new File(workDir,"hbase").getAbsolutePath();
+            hbaseDir = new File(workDir, "hbase").getAbsolutePath();
             hbaseRoot = "file://" + hbaseDir;
 
-            if(hbaseConf == null)
+            if (hbaseConf == null)
                 hbaseConf = HBaseConfiguration.create();
 
             hbaseConf.set("hbase.rootdir", hbaseRoot);
@@ -264,12 +264,12 @@ public class ManyMiniCluster {
             //opening the META table ensures that cluster is running
             new HTable(hbaseConf, HConstants.META_TABLE_NAME);
         } catch (Exception e) {
-            throw new IllegalStateException("Failed to setup HBase Cluster",e);
+            throw new IllegalStateException("Failed to setup HBase Cluster", e);
         }
     }
 
     private void setUpMetastore() throws Exception {
-        if(hiveConf == null)
+        if (hiveConf == null)
             hiveConf = new HiveConf(this.getClass());
 
         //The default org.apache.hadoop.hive.ql.hooks.PreExecutePrinter hook
@@ -278,13 +278,13 @@ public class ManyMiniCluster {
         hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
         hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
         hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
-                     "jdbc:derby:"+new File(workDir+"/metastore_db")+";create=true");
+            "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true");
         hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(),
-                     new File(workDir,"warehouse").toString());
+            new File(workDir, "warehouse").toString());
         //set where derby logs
-        File derbyLogFile = new File(workDir+"/derby.log");
+        File derbyLogFile = new File(workDir + "/derby.log");
         derbyLogFile.createNewFile();
-        System.setProperty("derby.stream.error.file",derbyLogFile.getPath());
+        System.setProperty("derby.stream.error.file", derbyLogFile.getPath());
 
 
 //    Driver driver = new Driver(hiveConf);

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java Mon Sep 10 23:28:55 2012
@@ -46,7 +46,7 @@ public abstract class SkeletonHBaseTest 
 
     protected final static String DEFAULT_CONTEXT_HANDLE = "default";
 
-    protected static Map<String,Context> contextMap = new HashMap<String,Context>();
+    protected static Map<String, Context> contextMap = new HashMap<String, Context>();
     protected static Set<String> tableNames = new HashSet<String>();
 
     /**
@@ -59,7 +59,7 @@ public abstract class SkeletonHBaseTest 
         try {
             HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
             HTableDescriptor tableDesc = new HTableDescriptor(tableName);
-            for(String family: families) {
+            for (String family : families) {
                 HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
                 tableDesc.addFamily(columnDescriptor);
             }
@@ -72,13 +72,13 @@ public abstract class SkeletonHBaseTest 
     }
 
     protected String newTableName(String prefix) {
-        String name =null;
+        String name = null;
         int tries = 100;
         do {
-            name = prefix+"_"+Math.abs(new Random().nextLong());
-        } while(tableNames.contains(name) && --tries > 0);
-        if(tableNames.contains(name))
-            throw new IllegalStateException("Couldn't find a unique table name, tableNames size: "+tableNames.size());
+            name = prefix + "_" + Math.abs(new Random().nextLong());
+        } while (tableNames.contains(name) && --tries > 0);
+        if (tableNames.contains(name))
+            throw new IllegalStateException("Couldn't find a unique table name, tableNames size: " + tableNames.size());
         tableNames.add(name);
         return name;
     }
@@ -89,8 +89,8 @@ public abstract class SkeletonHBaseTest 
      */
     @BeforeClass
     public static void setup() {
-        if(!contextMap.containsKey(getContextHandle()))
-            contextMap.put(getContextHandle(),new Context(getContextHandle()));
+        if (!contextMap.containsKey(getContextHandle()))
+            contextMap.put(getContextHandle(), new Context(getContextHandle()));
 
         contextMap.get(getContextHandle()).start();
     }
@@ -172,18 +172,18 @@ public abstract class SkeletonHBaseTest 
 
         public Context(String handle) {
             try {
-                testDir = new File(TEST_DIR+"/test_"+handle+"_"+Math.abs(new Random().nextLong())+"/").getCanonicalPath();
+                testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getCanonicalPath();
             } catch (IOException e) {
-                throw new IllegalStateException("Failed to generate testDir",e);
+                throw new IllegalStateException("Failed to generate testDir", e);
             }
-            System.out.println("Cluster work directory: "+testDir);
+            System.out.println("Cluster work directory: " + testDir);
         }
 
         public void start() {
-            if(usageCount++ == 0) {
-            	ManyMiniCluster.Builder b = ManyMiniCluster.create(new File(testDir));
+            if (usageCount++ == 0) {
+                ManyMiniCluster.Builder b = ManyMiniCluster.create(new File(testDir));
                 if (testConf != null) {
-                   b.hbaseConf(HBaseConfiguration.create(testConf));
+                    b.hbaseConf(HBaseConfiguration.create(testConf));
                 }
                 cluster = b.build();
                 cluster.start();
@@ -195,16 +195,16 @@ public abstract class SkeletonHBaseTest 
         }
 
         public void stop() {
-            if( --usageCount == 0)  {
+            if (--usageCount == 0) {
                 try {
                     cluster.stop();
                     cluster = null;
                 } finally {
-                    System.out.println("Trying to cleanup: "+testDir);
+                    System.out.println("Trying to cleanup: " + testDir);
                     try {
                         FileUtil.fullyDelete(new File(testDir));
                     } catch (IOException e) {
-                        throw new IllegalStateException("Failed to cleanup test dir",e);
+                        throw new IllegalStateException("Failed to cleanup test dir", e);
                     }
                 }
             }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java Mon Sep 10 23:28:55 2012
@@ -92,9 +92,9 @@ public class TestHBaseBulkOutputFormat e
     public TestHBaseBulkOutputFormat() {
         allConf = getHiveConf();
         allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                HCatSemanticAnalyzer.class.getName());
+            HCatSemanticAnalyzer.class.getName());
         allConf.set(HiveConf.ConfVars.HADOOPFS.varname, getFileSystem().getUri().toString());
-        allConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, new Path(getTestDir(),"warehouse").toString());
+        allConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, new Path(getTestDir(), "warehouse").toString());
 
         //Add hbase properties
         for (Map.Entry<String, String> el : getHbaseConf())
@@ -103,8 +103,8 @@ public class TestHBaseBulkOutputFormat e
             allConf.set(el.getKey(), el.getValue());
 
         HBaseConfiguration.merge(
-                allConf,
-                RevisionManagerConfiguration.create());
+            allConf,
+            RevisionManagerConfiguration.create());
         SessionState.start(new CliSessionState(allConf));
         hcatDriver = new HCatDriver();
     }
@@ -121,17 +121,17 @@ public class TestHBaseBulkOutputFormat e
 
         @Override
         public void map(LongWritable key, Text value,
-                OutputCollector<ImmutableBytesWritable, Put> output,
-                Reporter reporter) throws IOException {
+                        OutputCollector<ImmutableBytesWritable, Put> output,
+                        Reporter reporter) throws IOException {
             String vals[] = value.toString().split(",");
             Put put = new Put(Bytes.toBytes(vals[0]));
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
                 put.add(Bytes.toBytes("my_family"),
-                        Bytes.toBytes(pair[0]),
-                        Bytes.toBytes(pair[1]));
+                    Bytes.toBytes(pair[0]),
+                    Bytes.toBytes(pair[1]));
             }
-            output.collect(new ImmutableBytesWritable(Bytes.toBytes(vals[0])),put);
+            output.collect(new ImmutableBytesWritable(Bytes.toBytes(vals[0])), put);
         }
 
     }
@@ -142,37 +142,37 @@ public class TestHBaseBulkOutputFormat e
         public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
             String vals[] = value.toString().split(",");
             Put put = new Put(Bytes.toBytes(vals[0]));
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
                 put.add(Bytes.toBytes("my_family"),
-                        Bytes.toBytes(pair[0]),
-                        Bytes.toBytes(pair[1]));
+                    Bytes.toBytes(pair[0]),
+                    Bytes.toBytes(pair[1]));
             }
-            context.write(new ImmutableBytesWritable(Bytes.toBytes(vals[0])),put);
+            context.write(new ImmutableBytesWritable(Bytes.toBytes(vals[0])), put);
         }
     }
 
     public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {
         @Override
         public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
-            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
             HCatRecord record = new DefaultHCatRecord(3);
             HCatSchema schema = jobInfo.getOutputSchema();
             String vals[] = value.toString().split(",");
-            record.setInteger("key",schema,Integer.parseInt(vals[0]));
-            for(int i=1;i<vals.length;i++) {
+            record.setInteger("key", schema, Integer.parseInt(vals[0]));
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                record.set(pair[0],schema,pair[1]);
+                record.set(pair[0], schema, pair[1]);
             }
-            context.write(null,record);
+            context.write(null, record);
         }
     }
 
     @Test
     public void hbaseBulkOutputFormatTest() throws IOException, ClassNotFoundException, InterruptedException {
         String testName = "hbaseBulkOutputFormatTest";
-        Path methodTestDir = new Path(getTestDir(),testName);
-        LOG.info("starting: "+testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
+        LOG.info("starting: " + testName);
 
         String tableName = newTableName(testName).toLowerCase();
         String familyName = "my_family";
@@ -186,21 +186,20 @@ public class TestHBaseBulkOutputFormat e
         createTable(tableName, new String[]{familyName});
 
         String data[] = {"1,english:one,spanish:uno",
-                               "2,english:two,spanish:dos",
-                               "3,english:three,spanish:tres"};
-
+            "2,english:two,spanish:dos",
+            "3,english:three,spanish:tres"};
 
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
-        FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile.txt"));
-        for(String line: data)
+        Path inputPath = new Path(methodTestDir, "mr_input");
+        FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile.txt"));
+        for (String line : data)
             os.write(Bytes.toBytes(line + "\n"));
         os.close();
-        Path interPath = new Path(methodTestDir,"inter");
+        Path interPath = new Path(methodTestDir, "inter");
         //create job
         JobConf job = new JobConf(conf);
-        job.setWorkingDirectory(new Path(methodTestDir,"mr_work"));
+        job.setWorkingDirectory(new Path(methodTestDir, "mr_work"));
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapWriteOldMapper.class);
 
@@ -217,9 +216,9 @@ public class TestHBaseBulkOutputFormat e
             OutputJobInfo outputJobInfo = OutputJobInfo.create("default", tableName, null);
             Transaction txn = rm.beginWriteTransaction(tableName, Arrays.asList(familyName));
             outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
-                                                      HCatUtil.serialize(txn));
+                HCatUtil.serialize(txn));
             job.set(HCatConstants.HCAT_KEY_OUTPUT_INFO,
-                                       HCatUtil.serialize(outputJobInfo));
+                HCatUtil.serialize(outputJobInfo));
         } finally {
             rm.close();
         }
@@ -241,18 +240,18 @@ public class TestHBaseBulkOutputFormat e
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
             }
             index++;
         }
         //test if load count is the same
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
         //test if scratch directory was erased
         assertFalse(FileSystem.get(job).exists(interPath));
     }
@@ -260,8 +259,8 @@ public class TestHBaseBulkOutputFormat e
     @Test
     public void importSequenceFileTest() throws IOException, ClassNotFoundException, InterruptedException {
         String testName = "importSequenceFileTest";
-        Path methodTestDir = new Path(getTestDir(),testName);
-        LOG.info("starting: "+testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
+        LOG.info("starting: " + testName);
 
         String tableName = newTableName(testName).toLowerCase();
         String familyName = "my_family";
@@ -271,28 +270,27 @@ public class TestHBaseBulkOutputFormat e
         Configuration conf = new Configuration(allConf);
 
         //create table
-        createTable(tableName,new String[]{familyName});
+        createTable(tableName, new String[]{familyName});
 
         String data[] = {"1,english:one,spanish:uno",
-                               "2,english:two,spanish:dos",
-                               "3,english:three,spanish:tres"};
-
+            "2,english:two,spanish:dos",
+            "3,english:three,spanish:tres"};
 
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
+        Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
-        FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile.txt"));
-        for(String line: data)
+        FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile.txt"));
+        for (String line : data)
             os.write(Bytes.toBytes(line + "\n"));
         os.close();
-        Path interPath = new Path(methodTestDir,"inter");
-        Path scratchPath = new Path(methodTestDir,"scratch");
+        Path interPath = new Path(methodTestDir, "inter");
+        Path scratchPath = new Path(methodTestDir, "scratch");
 
 
         //create job
         Job job = new Job(conf, testName);
-        job.setWorkingDirectory(new Path(methodTestDir,"mr_work"));
+        job.setWorkingDirectory(new Path(methodTestDir, "mr_work"));
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapWrite.class);
 
@@ -300,7 +298,7 @@ public class TestHBaseBulkOutputFormat e
         TextInputFormat.setInputPaths(job, inputPath);
 
         job.setOutputFormatClass(SequenceFileOutputFormat.class);
-        SequenceFileOutputFormat.setOutputPath(job,interPath);
+        SequenceFileOutputFormat.setOutputPath(job, interPath);
 
         job.setMapOutputKeyClass(ImmutableBytesWritable.class);
         job.setMapOutputValueClass(Put.class);
@@ -311,7 +309,7 @@ public class TestHBaseBulkOutputFormat e
         job.setNumReduceTasks(0);
         assertTrue(job.waitForCompletion(true));
 
-        job = new Job(new Configuration(allConf),testName+"_importer");
+        job = new Job(new Configuration(allConf), testName + "_importer");
         assertTrue(ImportSequenceFile.runJob(job, tableName, interPath, scratchPath));
 
         //verify
@@ -319,18 +317,18 @@ public class TestHBaseBulkOutputFormat e
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
             }
             index++;
         }
         //test if load count is the same
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
         //test if scratch directory was erased
         assertFalse(FileSystem.get(job.getConfiguration()).exists(scratchPath));
     }
@@ -338,11 +336,11 @@ public class TestHBaseBulkOutputFormat e
     @Test
     public void bulkModeHCatOutputFormatTest() throws Exception {
         String testName = "bulkModeHCatOutputFormatTest";
-        Path methodTestDir = new Path(getTestDir(),testName);
-        LOG.info("starting: "+testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
+        LOG.info("starting: " + testName);
 
         String databaseName = testName.toLowerCase();
-        String dbDir = new Path(methodTestDir,"DB_"+testName).toString();
+        String dbDir = new Path(methodTestDir, "DB_" + testName).toString();
         String tableName = newTableName(testName).toLowerCase();
         String familyName = "my_family";
         byte[] familyNameBytes = Bytes.toBytes(familyName);
@@ -355,31 +353,31 @@ public class TestHBaseBulkOutputFormat e
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + dbDir + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName +
-                              "(key int, english string, spanish string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
-                              "TBLPROPERTIES ('"+HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY+"'='true',"+
-                              "'hbase.columns.mapping'=':key,"+familyName+":english,"+familyName+":spanish')" ;
+            "(key int, english string, spanish string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
+            "TBLPROPERTIES ('" + HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY + "'='true'," +
+            "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName + ":spanish')";
 
         assertEquals(0, hcatDriver.run(dbquery).getResponseCode());
         assertEquals(0, hcatDriver.run(tableQuery).getResponseCode());
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                               "2,english:TWO,spanish:DOS",
-                               "3,english:THREE,spanish:TRES"};
+            "2,english:TWO,spanish:DOS",
+            "3,english:THREE,spanish:TRES"};
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
+        Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
         //create multiple files so we can test with multiple mappers
-        for(int i=0;i<data.length;i++) {
-            FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile"+i+".txt"));
+        for (int i = 0; i < data.length; i++) {
+            FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile" + i + ".txt"));
             os.write(Bytes.toBytes(data[i] + "\n"));
             os.close();
         }
 
         //create job
-        Job job = new Job(conf,testName);
-        job.setWorkingDirectory(new Path(methodTestDir,"mr_work"));
+        Job job = new Job(conf, testName);
+        job.setWorkingDirectory(new Path(methodTestDir, "mr_work"));
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapHCatWrite.class);
 
@@ -388,8 +386,8 @@ public class TestHBaseBulkOutputFormat e
 
 
         job.setOutputFormatClass(HCatOutputFormat.class);
-        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,tableName,null);
-        HCatOutputFormat.setOutput(job,outputJobInfo);
+        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName, tableName, null);
+        HCatOutputFormat.setOutput(job, outputJobInfo);
 
         job.setMapOutputKeyClass(BytesWritable.class);
         job.setMapOutputValueClass(HCatRecord.class);
@@ -402,41 +400,41 @@ public class TestHBaseBulkOutputFormat e
         assertTrue(job.waitForCompletion(true));
         RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf);
         try {
-            TableSnapshot snapshot = rm.createSnapshot(databaseName+"."+tableName);
-            for(String el: snapshot.getColumnFamilies()) {
-                assertEquals(1,snapshot.getRevision(el));
+            TableSnapshot snapshot = rm.createSnapshot(databaseName + "." + tableName);
+            for (String el : snapshot.getColumnFamilies()) {
+                assertEquals(1, snapshot.getRevision(el));
             }
         } finally {
             rm.close();
         }
 
         //verify
-        HTable table = new HTable(conf, databaseName+"."+tableName);
+        HTable table = new HTable(conf, databaseName + "." + tableName);
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
-                assertEquals(1l,result.getColumn(familyNameBytes,Bytes.toBytes(pair[0])).get(0).getTimestamp());
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
+                assertEquals(1l, result.getColumn(familyNameBytes, Bytes.toBytes(pair[0])).get(0).getTimestamp());
             }
             index++;
         }
         //test if load count is the same
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
     }
 
     @Test
     public void bulkModeHCatOutputFormatTestWithDefaultDB() throws Exception {
         String testName = "bulkModeHCatOutputFormatTestWithDefaultDB";
-        Path methodTestDir = new Path(getTestDir(),testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
 
         String databaseName = "default";
-        String dbDir = new Path(methodTestDir,"DB_"+testName).toString();
+        String dbDir = new Path(methodTestDir, "DB_" + testName).toString();
         String tableName = newTableName(testName).toLowerCase();
         String familyName = "my_family";
         byte[] familyNameBytes = Bytes.toBytes(familyName);
@@ -449,29 +447,29 @@ public class TestHBaseBulkOutputFormat e
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + dbDir + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName +
-                              "(key int, english string, spanish string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
-                              "TBLPROPERTIES ('"+HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY+"'='true',"+
-                              "'hbase.columns.mapping'=':key,"+familyName+":english,"+familyName+":spanish')" ;
+            "(key int, english string, spanish string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
+            "TBLPROPERTIES ('" + HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY + "'='true'," +
+            "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName + ":spanish')";
 
         assertEquals(0, hcatDriver.run(dbquery).getResponseCode());
         assertEquals(0, hcatDriver.run(tableQuery).getResponseCode());
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                               "2,english:TWO,spanish:DOS",
-                               "3,english:THREE,spanish:TRES"};
+            "2,english:TWO,spanish:DOS",
+            "3,english:THREE,spanish:TRES"};
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
+        Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
-        FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile.txt"));
-        for(String line: data)
+        FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile.txt"));
+        for (String line : data)
             os.write(Bytes.toBytes(line + "\n"));
         os.close();
 
         //create job
-        Job job = new Job(conf,testName);
-        job.setWorkingDirectory(new Path(methodTestDir,"mr_work"));
+        Job job = new Job(conf, testName);
+        job.setWorkingDirectory(new Path(methodTestDir, "mr_work"));
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapHCatWrite.class);
 
@@ -480,8 +478,8 @@ public class TestHBaseBulkOutputFormat e
 
 
         job.setOutputFormatClass(HCatOutputFormat.class);
-        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,tableName,null);
-        HCatOutputFormat.setOutput(job,outputJobInfo);
+        OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName, tableName, null);
+        HCatOutputFormat.setOutput(job, outputJobInfo);
 
         job.setMapOutputKeyClass(BytesWritable.class);
         job.setMapOutputValueClass(HCatRecord.class);
@@ -498,18 +496,18 @@ public class TestHBaseBulkOutputFormat e
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
             }
             index++;
         }
         //test if load count is the same
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
     }
 
     @Test
@@ -526,37 +524,37 @@ public class TestHBaseBulkOutputFormat e
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(allConf.getAllProperties()));
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + dbDir
-                + "'";
+            + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName +
-                "(key int, english string, spanish string) STORED BY " +
-                "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
-                "TBLPROPERTIES ('" + HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY + "'='true'," +
-                "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName
-                + ":spanish')";
+            "(key int, english string, spanish string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
+            "TBLPROPERTIES ('" + HBaseConstants.PROPERTY_BULK_OUTPUT_MODE_KEY + "'='true'," +
+            "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName
+            + ":spanish')";
 
         assertEquals(0, hcatDriver.run(dbquery).getResponseCode());
         assertEquals(0, hcatDriver.run(tableQuery).getResponseCode());
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                "2,english:TWO,spanish:DOS",
-                "3,english:THREE,spanish:TRES"};
+            "2,english:TWO,spanish:DOS",
+            "3,english:THREE,spanish:TRES"};
 
         Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
         // create multiple files so we can test with multiple mappers
         for (int i = 0; i < data.length; i++) {
             FSDataOutputStream os = getFileSystem().create(
-                    new Path(inputPath, "inputFile" + i + ".txt"));
+                new Path(inputPath, "inputFile" + i + ".txt"));
             os.write(Bytes.toBytes(data[i] + "\n"));
             os.close();
         }
 
         Path workingDir = new Path(methodTestDir, "mr_abort");
         OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
-                tableName, null);
+            tableName, null);
         Job job = configureJob(testName,
-                conf, workingDir, MapWriteAbortTransaction.class,
-                outputJobInfo, inputPath);
+            conf, workingDir, MapWriteAbortTransaction.class,
+            outputJobInfo, inputPath);
         assertFalse(job.waitForCompletion(true));
 
         // verify that revision manager has it as aborted transaction
@@ -566,7 +564,7 @@ public class TestHBaseBulkOutputFormat e
             for (String family : snapshot.getColumnFamilies()) {
                 assertEquals(1, snapshot.getRevision(family));
                 List<FamilyRevision> abortedWriteTransactions = rm.getAbortedWriteTransactions(
-                        databaseName + "." + tableName, family);
+                    databaseName + "." + tableName, family);
                 assertEquals(1, abortedWriteTransactions.size());
                 assertEquals(1, abortedWriteTransactions.get(0).getRevision());
             }
@@ -585,7 +583,7 @@ public class TestHBaseBulkOutputFormat e
 
         // verify that the storage handler input format returns empty results.
         Path outputDir = new Path(getTestDir(),
-                "mapred/testHBaseTableBulkIgnoreAbortedTransactions");
+            "mapred/testHBaseTableBulkIgnoreAbortedTransactions");
         FileSystem fs = getFileSystem();
         if (fs.exists(outputDir)) {
             fs.delete(outputDir, true);
@@ -595,7 +593,7 @@ public class TestHBaseBulkOutputFormat e
         job.setMapperClass(MapReadAbortedTransaction.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName,
-                tableName, null);
+            tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -608,8 +606,8 @@ public class TestHBaseBulkOutputFormat e
     }
 
     private Job configureJob(String jobName, Configuration conf,
-            Path workingDir, Class<? extends Mapper> mapperClass,
-            OutputJobInfo outputJobInfo, Path inputPath) throws IOException {
+                             Path workingDir, Class<? extends Mapper> mapperClass,
+                             OutputJobInfo outputJobInfo, Path inputPath) throws IOException {
         Job job = new Job(conf, jobName);
         job.setWorkingDirectory(workingDir);
         job.setJarByClass(this.getClass());

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java Mon Sep 10 23:28:55 2012
@@ -86,9 +86,9 @@ public class TestHBaseDirectOutputFormat
     public TestHBaseDirectOutputFormat() {
         allConf = getHiveConf();
         allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                HCatSemanticAnalyzer.class.getName());
+            HCatSemanticAnalyzer.class.getName());
         allConf.set(HiveConf.ConfVars.HADOOPFS.varname, getFileSystem().getUri().toString());
-        allConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, new Path(getTestDir(),"warehouse").toString());
+        allConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, new Path(getTestDir(), "warehouse").toString());
 
         //Add hbase properties
         for (Map.Entry<String, String> el : getHbaseConf())
@@ -96,8 +96,8 @@ public class TestHBaseDirectOutputFormat
         for (Map.Entry<String, String> el : getJobConf())
             allConf.set(el.getKey(), el.getValue());
         HBaseConfiguration.merge(
-                allConf,
-                RevisionManagerConfiguration.create());
+            allConf,
+            RevisionManagerConfiguration.create());
         SessionState.start(new CliSessionState(allConf));
         hcatDriver = new HCatDriver();
     }
@@ -105,7 +105,7 @@ public class TestHBaseDirectOutputFormat
     @Test
     public void directOutputFormatTest() throws IOException, ClassNotFoundException, InterruptedException {
         String testName = "directOutputFormatTest";
-        Path methodTestDir = new Path(getTestDir(),testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
 
         String tableName = newTableName(testName).toLowerCase();
         String familyName = "my_family";
@@ -116,26 +116,25 @@ public class TestHBaseDirectOutputFormat
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(allConf.getAllProperties()));
 
         //create table
-        createTable(tableName,new String[]{familyName});
+        createTable(tableName, new String[]{familyName});
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                "2,english:ONE,spanish:DOS",
-                "3,english:ONE,spanish:TRES"};
-
+            "2,english:ONE,spanish:DOS",
+            "3,english:ONE,spanish:TRES"};
 
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
+        Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
-        FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile.txt"));
-        for(String line: data)
+        FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile.txt"));
+        for (String line : data)
             os.write(Bytes.toBytes(line + "\n"));
         os.close();
 
         //create job
         JobConf job = new JobConf(conf);
         job.setJobName(testName);
-        job.setWorkingDirectory(new Path(methodTestDir,"mr_work"));
+        job.setWorkingDirectory(new Path(methodTestDir, "mr_work"));
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapWrite.class);
 
@@ -152,9 +151,9 @@ public class TestHBaseDirectOutputFormat
             OutputJobInfo outputJobInfo = OutputJobInfo.create("default", tableName, null);
             Transaction txn = rm.beginWriteTransaction(tableName, Arrays.asList(familyName));
             outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
-                                                      HCatUtil.serialize(txn));
+                HCatUtil.serialize(txn));
             job.set(HCatConstants.HCAT_KEY_OUTPUT_INFO,
-                                       HCatUtil.serialize(outputJobInfo));
+                HCatUtil.serialize(outputJobInfo));
         } finally {
             rm.close();
         }
@@ -174,26 +173,26 @@ public class TestHBaseDirectOutputFormat
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
             }
             index++;
         }
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
     }
 
     @Test
     public void directHCatOutputFormatTest() throws Exception {
         String testName = "directHCatOutputFormatTest";
-        Path methodTestDir = new Path(getTestDir(),testName);
+        Path methodTestDir = new Path(getTestDir(), testName);
 
         String databaseName = testName;
-        String dbDir = new Path(methodTestDir,"DB_"+testName).toString();
+        String dbDir = new Path(methodTestDir, "DB_" + testName).toString();
         String tableName = newTableName(testName);
         String familyName = "my_family";
         byte[] familyNameBytes = Bytes.toBytes(familyName);
@@ -207,24 +206,24 @@ public class TestHBaseDirectOutputFormat
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + dbDir + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName +
-                              "(key int, english string, spanish string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
-                              "TBLPROPERTIES (" +
-                              "'hbase.columns.mapping'=':key,"+familyName+":english,"+familyName+":spanish')" ;
+            "(key int, english string, spanish string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
+            "TBLPROPERTIES (" +
+            "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName + ":spanish')";
 
         assertEquals(0, hcatDriver.run(dbquery).getResponseCode());
         assertEquals(0, hcatDriver.run(tableQuery).getResponseCode());
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                               "2,english:ONE,spanish:DOS",
-                               "3,english:ONE,spanish:TRES"};
+            "2,english:ONE,spanish:DOS",
+            "3,english:ONE,spanish:TRES"};
 
         // input/output settings
-        Path inputPath = new Path(methodTestDir,"mr_input");
+        Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
         //create multiple files so we can test with multiple mappers
-        for(int i=0;i<data.length;i++) {
-            FSDataOutputStream os = getFileSystem().create(new Path(inputPath,"inputFile"+i+".txt"));
+        for (int i = 0; i < data.length; i++) {
+            FSDataOutputStream os = getFileSystem().create(new Path(inputPath, "inputFile" + i + ".txt"));
             os.write(Bytes.toBytes(data[i] + "\n"));
             os.close();
         }
@@ -232,16 +231,16 @@ public class TestHBaseDirectOutputFormat
         //create job
         Path workingDir = new Path(methodTestDir, "mr_work");
         OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
-                tableName, null);
+            tableName, null);
         Job job = configureJob(testName, conf, workingDir, MapHCatWrite.class,
-                outputJobInfo, inputPath);
+            outputJobInfo, inputPath);
         assertTrue(job.waitForCompletion(true));
 
         RevisionManager rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(conf);
         try {
             TableSnapshot snapshot = rm.createSnapshot(hbaseTableName);
-            for(String el: snapshot.getColumnFamilies()) {
-                assertEquals(1,snapshot.getRevision(el));
+            for (String el : snapshot.getColumnFamilies()) {
+                assertEquals(1, snapshot.getRevision(el));
             }
         } finally {
             rm.close();
@@ -252,18 +251,18 @@ public class TestHBaseDirectOutputFormat
         Scan scan = new Scan();
         scan.addFamily(familyNameBytes);
         ResultScanner scanner = table.getScanner(scan);
-        int index=0;
-        for(Result result: scanner) {
+        int index = 0;
+        for (Result result : scanner) {
             String vals[] = data[index].toString().split(",");
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes(pair[0])));
-                assertEquals(pair[1],Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes(pair[0]))));
-                assertEquals(1l,result.getColumn(familyNameBytes,Bytes.toBytes(pair[0])).get(0).getTimestamp());
+                assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
+                assertEquals(pair[1], Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
+                assertEquals(1l, result.getColumn(familyNameBytes, Bytes.toBytes(pair[0])).get(0).getTimestamp());
             }
             index++;
         }
-        assertEquals(data.length,index);
+        assertEquals(data.length, index);
     }
 
     @Test
@@ -283,36 +282,36 @@ public class TestHBaseDirectOutputFormat
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(allConf.getAllProperties()));
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + dbDir
-                + "'";
+            + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName +
-                "(key int, english string, spanish string) STORED BY " +
-                "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
-                "TBLPROPERTIES (" +
-                "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName +
-                ":spanish','hbase.table.name'='"+ hbaseTableName +"')";
+            "(key int, english string, spanish string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'" +
+            "TBLPROPERTIES (" +
+            "'hbase.columns.mapping'=':key," + familyName + ":english," + familyName +
+            ":spanish','hbase.table.name'='" + hbaseTableName + "')";
 
         assertEquals(0, hcatDriver.run(dbquery).getResponseCode());
         assertEquals(0, hcatDriver.run(tableQuery).getResponseCode());
 
         String data[] = {"1,english:ONE,spanish:UNO",
-                "2,english:TWO,spanish:DOS",
-                "3,english:THREE,spanish:TRES"};
+            "2,english:TWO,spanish:DOS",
+            "3,english:THREE,spanish:TRES"};
 
         Path inputPath = new Path(methodTestDir, "mr_input");
         getFileSystem().mkdirs(inputPath);
         // create multiple files so we can test with multiple mappers
         for (int i = 0; i < data.length; i++) {
             FSDataOutputStream os = getFileSystem().create(
-                    new Path(inputPath, "inputFile" + i + ".txt"));
+                new Path(inputPath, "inputFile" + i + ".txt"));
             os.write(Bytes.toBytes(data[i] + "\n"));
             os.close();
         }
 
         Path workingDir = new Path(methodTestDir, "mr_abort");
         OutputJobInfo outputJobInfo = OutputJobInfo.create(databaseName,
-                tableName, null);
+            tableName, null);
         Job job = configureJob(testName, conf, workingDir, MapWriteAbortTransaction.class,
-                outputJobInfo, inputPath);
+            outputJobInfo, inputPath);
         assertFalse(job.waitForCompletion(true));
 
         // verify that revision manager has it as aborted transaction
@@ -322,7 +321,7 @@ public class TestHBaseDirectOutputFormat
             for (String family : snapshot.getColumnFamilies()) {
                 assertEquals(1, snapshot.getRevision(family));
                 List<FamilyRevision> abortedWriteTransactions = rm.getAbortedWriteTransactions(
-                        hbaseTableName, family);
+                    hbaseTableName, family);
                 assertEquals(1, abortedWriteTransactions.size());
                 assertEquals(1, abortedWriteTransactions.get(0).getRevision());
             }
@@ -339,15 +338,15 @@ public class TestHBaseDirectOutputFormat
         for (Result result : scanner) {
             String key = Bytes.toString(result.getRow());
             assertNotSame(MapWriteAbortTransaction.failedKey, key);
-            int index = Integer.parseInt(key)-1;
+            int index = Integer.parseInt(key) - 1;
             String vals[] = data[index].toString().split(",");
             for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
                 assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes(pair[0])));
                 assertEquals(pair[1],
-                        Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
+                    Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes(pair[0]))));
                 assertEquals(1l, result.getColumn(familyNameBytes, Bytes.toBytes(pair[0])).get(0)
-                        .getTimestamp());
+                    .getTimestamp());
             }
             count++;
         }
@@ -355,7 +354,7 @@ public class TestHBaseDirectOutputFormat
 
         // verify that the inputformat returns empty results.
         Path outputDir = new Path(getTestDir(),
-                "mapred/testHBaseTableIgnoreAbortedTransactions");
+            "mapred/testHBaseTableIgnoreAbortedTransactions");
         FileSystem fs = getFileSystem();
         if (fs.exists(outputDir)) {
             fs.delete(outputDir, true);
@@ -365,7 +364,7 @@ public class TestHBaseDirectOutputFormat
         job.setMapperClass(MapReadAbortedTransaction.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName,
-                tableName, null);
+            tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -378,8 +377,8 @@ public class TestHBaseDirectOutputFormat
     }
 
     private Job configureJob(String jobName, Configuration conf,
-            Path workingDir, Class<? extends Mapper> mapperClass,
-            OutputJobInfo outputJobInfo, Path inputPath) throws IOException {
+                             Path workingDir, Class<? extends Mapper> mapperClass,
+                             OutputJobInfo outputJobInfo, Path inputPath) throws IOException {
         Job job = new Job(conf, jobName);
         job.setWorkingDirectory(workingDir);
         job.setJarByClass(this.getClass());
@@ -409,16 +408,16 @@ public class TestHBaseDirectOutputFormat
 
         @Override
         public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
-            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
             HCatRecord record = new DefaultHCatRecord(3);
             HCatSchema schema = jobInfo.getOutputSchema();
             String vals[] = value.toString().split(",");
-            record.setInteger("key",schema,Integer.parseInt(vals[0]));
-            for(int i=1;i<vals.length;i++) {
+            record.setInteger("key", schema, Integer.parseInt(vals[0]));
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
-                record.set(pair[0],schema,pair[1]);
+                record.set(pair[0], schema, pair[1]);
             }
-            context.write(null,record);
+            context.write(null, record);
         }
     }
 
@@ -434,15 +433,15 @@ public class TestHBaseDirectOutputFormat
 
         @Override
         public void map(LongWritable key, Text value,
-                OutputCollector<BytesWritable, Put> output, Reporter reporter)
-                throws IOException {
+                        OutputCollector<BytesWritable, Put> output, Reporter reporter)
+            throws IOException {
             String vals[] = value.toString().split(",");
             Put put = new Put(Bytes.toBytes(vals[0]));
-            for(int i=1;i<vals.length;i++) {
+            for (int i = 1; i < vals.length; i++) {
                 String pair[] = vals[i].split(":");
                 put.add(Bytes.toBytes("my_family"),
-                        Bytes.toBytes(pair[0]),
-                        Bytes.toBytes(pair[1]));
+                    Bytes.toBytes(pair[0]),
+                    Bytes.toBytes(pair[1]));
             }
             output.collect(null, put);
         }
@@ -454,7 +453,7 @@ public class TestHBaseDirectOutputFormat
 
         @Override
         public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
-            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
             HCatRecord record = new DefaultHCatRecord(3);
             HCatSchema schema = jobInfo.getOutputSchema();
             String vals[] = value.toString().split(",");
@@ -477,18 +476,18 @@ public class TestHBaseDirectOutputFormat
     }
 
     static class MapReadAbortedTransaction
-            extends
-            Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
+        extends
+        Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
 
         @Override
         public void run(Context context) throws IOException,
-                InterruptedException {
+            InterruptedException {
             setup(context);
             if (context.nextKeyValue()) {
                 map(context.getCurrentKey(), context.getCurrentValue(), context);
                 while (context.nextKeyValue()) {
                     map(context.getCurrentKey(), context.getCurrentValue(),
-                            context);
+                        context);
                 }
                 throw new IOException("There should have been no records");
             }
@@ -497,7 +496,7 @@ public class TestHBaseDirectOutputFormat
 
         @Override
         public void map(ImmutableBytesWritable key, HCatRecord value,
-                Context context) throws IOException, InterruptedException {
+                        Context context) throws IOException, InterruptedException {
             System.out.println("HCat record value" + value.toString());
         }
     }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java Mon Sep 10 23:28:55 2012
@@ -76,19 +76,19 @@ import org.junit.Test;
 
 public class TestHBaseInputFormat extends SkeletonHBaseTest {
 
-    private static HiveConf   hcatConf;
+    private static HiveConf hcatConf;
     private static HCatDriver hcatDriver;
-    private final byte[] FAMILY     = Bytes.toBytes("testFamily");
+    private final byte[] FAMILY = Bytes.toBytes("testFamily");
     private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
     private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
 
-   public TestHBaseInputFormat() throws Exception {
+    public TestHBaseInputFormat() throws Exception {
         hcatConf = getHiveConf();
         hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                HCatSemanticAnalyzer.class.getName());
+            HCatSemanticAnalyzer.class.getName());
         URI fsuri = getFileSystem().getUri();
         Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
-                getTestDir());
+            getTestDir());
         hcatConf.set(HiveConf.ConfVars.HADOOPFS.varname, fsuri.toString());
         hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
 
@@ -100,7 +100,7 @@ public class TestHBaseInputFormat extend
             }
         }
         HBaseConfiguration.merge(hcatConf,
-               RevisionManagerConfiguration.create());
+            RevisionManagerConfiguration.create());
 
 
         SessionState.start(new CliSessionState(hcatConf));
@@ -123,7 +123,7 @@ public class TestHBaseInputFormat extend
                 put.add(FAMILY, QUALIFIER2, i, Bytes.toBytes("textValue-" + i));
                 myPuts.add(put);
                 Transaction tsx = rm.beginWriteTransaction(tableName,
-                        columnFamilies);
+                    columnFamilies);
                 rm.commitWriteTransaction(tsx);
             }
         } finally {
@@ -134,14 +134,14 @@ public class TestHBaseInputFormat extend
         return myPuts;
     }
 
-   private void populateHBaseTable(String tName, int revisions) throws IOException {
+    private void populateHBaseTable(String tName, int revisions) throws IOException {
         List<Put> myPuts = generatePuts(revisions, tName);
         HTable table = new HTable(getHbaseConf(), Bytes.toBytes(tName));
         table.put(myPuts);
     }
 
     private long populateHBaseTableQualifier1(String tName, int value, Boolean commit)
-            throws IOException {
+        throws IOException {
         List<String> columnFamilies = Arrays.asList("testFamily");
         RevisionManager rm = null;
         List<Put> myPuts = new ArrayList<Put>();
@@ -154,7 +154,7 @@ public class TestHBaseInputFormat extend
             Put put = new Put(Bytes.toBytes("testRow"));
             revision = tsx.getRevisionNumber();
             put.add(FAMILY, QUALIFIER1, revision,
-                    Bytes.toBytes("textValue-" + value));
+                Bytes.toBytes("textValue-" + value));
             myPuts.add(put);
 
             // If commit is null it is left as a running transaction
@@ -183,11 +183,11 @@ public class TestHBaseInputFormat extend
         String db_dir = getTestDir() + "/hbasedb";
 
         String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
-                            + db_dir + "'";
+            + db_dir + "'";
         String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
-                              + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-                              + "TBLPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')" ;
+            + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+            + "TBLPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')";
 
         CommandProcessorResponse responseOne = hcatDriver.run(dbquery);
         assertEquals(0, responseOne.getResponseCode());
@@ -201,7 +201,7 @@ public class TestHBaseInputFormat extend
         populateHBaseTable(hbaseTableName, 5);
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                HCatUtil.serialize(getHiveConf().getAllProperties()));
+            HCatUtil.serialize(getHiveConf().getAllProperties()));
 
         // output settings
         Path outputDir = new Path(getTestDir(), "mapred/testHbaseTableMRRead");
@@ -217,7 +217,7 @@ public class TestHBaseInputFormat extend
 
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName,
-                null);
+            null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -232,7 +232,7 @@ public class TestHBaseInputFormat extend
         assertFalse(MapReadHTable.error);
         assertEquals(MapReadHTable.count, 1);
 
-        String dropTableQuery = "DROP TABLE " + hbaseTableName ;
+        String dropTableQuery = "DROP TABLE " + hbaseTableName;
         CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
         assertEquals(0, responseThree.getResponseCode());
 
@@ -251,11 +251,11 @@ public class TestHBaseInputFormat extend
         //Table name as specified by hbase.table.name property
         String hbaseTableName = "MyDB_" + tableName;
         String tableQuery = "CREATE TABLE " + tableName
-                              + "(key string, testqualifier1 string, testqualifier2 string) STORED BY "
-                              + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-                              + "TBLPROPERTIES ('hbase.columns.mapping'="
-                              + "':key,testFamily:testQualifier1,testFamily:testQualifier2',"
-                              + "'hbase.table.name'='" + hbaseTableName+ "')" ;
+            + "(key string, testqualifier1 string, testqualifier2 string) STORED BY "
+            + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+            + "TBLPROPERTIES ('hbase.columns.mapping'="
+            + "':key,testFamily:testQualifier1,testFamily:testQualifier2',"
+            + "'hbase.table.name'='" + hbaseTableName + "')";
 
         CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
         assertEquals(0, responseTwo.getResponseCode());
@@ -268,7 +268,7 @@ public class TestHBaseInputFormat extend
 
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                HCatUtil.serialize(getHiveConf().getAllProperties()));
+            HCatUtil.serialize(getHiveConf().getAllProperties()));
 
         // output settings
         Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
@@ -282,7 +282,7 @@ public class TestHBaseInputFormat extend
         job.setMapperClass(MapReadProjHTable.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
+            MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setOutputSchema(job, getProjectionSchema());
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
@@ -296,7 +296,7 @@ public class TestHBaseInputFormat extend
         assertFalse(MapReadProjHTable.error);
         assertEquals(MapReadProjHTable.count, 1);
 
-        String dropTableQuery = "DROP TABLE " + tableName ;
+        String dropTableQuery = "DROP TABLE " + tableName;
         CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
         assertEquals(0, responseThree.getResponseCode());
 
@@ -309,10 +309,10 @@ public class TestHBaseInputFormat extend
 
         String tableName = newTableName("mytable");
         String tableQuery = "CREATE TABLE " + tableName
-                              + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-                              + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-                                    "testFamily:testQualifier1,testFamily:testQualifier2')" ;
+            + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+            + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
+            "testFamily:testQualifier1,testFamily:testQualifier2')";
 
         CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
         assertEquals(0, responseTwo.getResponseCode());
@@ -325,7 +325,7 @@ public class TestHBaseInputFormat extend
 
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                HCatUtil.serialize(getHiveConf().getAllProperties()));
+            HCatUtil.serialize(getHiveConf().getAllProperties()));
 
         // output settings
         Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
@@ -341,7 +341,7 @@ public class TestHBaseInputFormat extend
         job.setInputFormat(HBaseInputFormat.class);
 
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
+            MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         //Configure projection schema
         job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema()));
         Job newJob = new Job(job);
@@ -369,7 +369,7 @@ public class TestHBaseInputFormat extend
         assertFalse(MapReadProjHTable.error);
         assertEquals(MapReadProjHTable.count, 1);
 
-        String dropTableQuery = "DROP TABLE " + tableName ;
+        String dropTableQuery = "DROP TABLE " + tableName;
         CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
         assertEquals(0, responseThree.getResponseCode());
 
@@ -381,10 +381,10 @@ public class TestHBaseInputFormat extend
     public void TestHBaseTableIgnoreAbortedTransactions() throws Exception {
         String tableName = newTableName("mytable");
         String tableQuery = "CREATE TABLE " + tableName
-                              + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-                              + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-                                    "testFamily:testQualifier1,testFamily:testQualifier2')" ;
+            + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+            + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
+            "testFamily:testQualifier1,testFamily:testQualifier2')";
 
         CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
         assertEquals(0, responseTwo.getResponseCode());
@@ -399,7 +399,7 @@ public class TestHBaseInputFormat extend
 
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                HCatUtil.serialize(getHiveConf().getAllProperties()));
+            HCatUtil.serialize(getHiveConf().getAllProperties()));
 
         Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableIgnoreAbortedTransactions");
         FileSystem fs = getFileSystem();
@@ -412,7 +412,7 @@ public class TestHBaseInputFormat extend
         MapReadHTable.resetCounters();
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
+            MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -428,7 +428,7 @@ public class TestHBaseInputFormat extend
         assertFalse(MapReadHTable.error);
         assertEquals(1, MapReadHTable.count);
 
-        String dropTableQuery = "DROP TABLE " + tableName ;
+        String dropTableQuery = "DROP TABLE " + tableName;
         CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
         assertEquals(0, responseThree.getResponseCode());
 
@@ -440,10 +440,10 @@ public class TestHBaseInputFormat extend
     public void TestHBaseTableIgnoreAbortedAndRunningTransactions() throws Exception {
         String tableName = newTableName("mytable");
         String tableQuery = "CREATE TABLE " + tableName
-                              + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-                              + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-                                    "testFamily:testQualifier1,testFamily:testQualifier2')" ;
+            + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
+            "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+            + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
+            "testFamily:testQualifier1,testFamily:testQualifier2')";
 
         CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
         assertEquals(0, responseTwo.getResponseCode());
@@ -462,7 +462,7 @@ public class TestHBaseInputFormat extend
 
         Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-                HCatUtil.serialize(getHiveConf().getAllProperties()));
+            HCatUtil.serialize(getHiveConf().getAllProperties()));
 
         Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableIgnoreAbortedTransactions");
         FileSystem fs = getFileSystem();
@@ -474,7 +474,7 @@ public class TestHBaseInputFormat extend
         job.setMapperClass(MapReadHTableRunningAbort.class);
         job.setInputFormatClass(HCatInputFormat.class);
         InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
+            MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -489,7 +489,7 @@ public class TestHBaseInputFormat extend
         assertFalse(MapReadHTableRunningAbort.error);
         assertEquals(1, MapReadHTableRunningAbort.count);
 
-        String dropTableQuery = "DROP TABLE " + tableName ;
+        String dropTableQuery = "DROP TABLE " + tableName;
         CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
         assertEquals(0, responseThree.getResponseCode());
 
@@ -499,20 +499,20 @@ public class TestHBaseInputFormat extend
 
 
     static class MapReadHTable
-            extends
-            Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
+        extends
+        Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
 
         static boolean error = false;
         static int count = 0;
 
         @Override
         public void map(ImmutableBytesWritable key, HCatRecord value,
-                Context context) throws IOException, InterruptedException {
+                        Context context) throws IOException, InterruptedException {
             System.out.println("HCat record value" + value.toString());
             boolean correctValues = (value.size() == 3)
-                    && (value.get(0).toString()).equalsIgnoreCase("testRow")
-                    && (value.get(1).toString()).equalsIgnoreCase("textValue-5")
-                    && (value.get(2).toString()).equalsIgnoreCase("textValue-5");
+                && (value.get(0).toString()).equalsIgnoreCase("testRow")
+                && (value.get(1).toString()).equalsIgnoreCase("textValue-5")
+                && (value.get(2).toString()).equalsIgnoreCase("textValue-5");
 
             if (correctValues == false) {
                 error = true;
@@ -527,18 +527,19 @@ public class TestHBaseInputFormat extend
     }
 
     static class MapReadProjHTable
-            extends
-            Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
+        extends
+        Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
 
         static boolean error = false;
         static int count = 0;
+
         @Override
         public void map(ImmutableBytesWritable key, HCatRecord value,
-                Context context) throws IOException, InterruptedException {
+                        Context context) throws IOException, InterruptedException {
             System.out.println("HCat record value" + value.toString());
             boolean correctValues = (value.size() == 2)
-                    && (value.get(0).toString()).equalsIgnoreCase("testRow")
-                    && (value.get(1).toString()).equalsIgnoreCase("textValue-5");
+                && (value.get(0).toString()).equalsIgnoreCase("testRow")
+                && (value.get(1).toString()).equalsIgnoreCase("textValue-5");
 
             if (correctValues == false) {
                 error = true;
@@ -548,7 +549,7 @@ public class TestHBaseInputFormat extend
     }
 
     static class MapReadProjectionHTable
-            implements org.apache.hadoop.mapred.Mapper<ImmutableBytesWritable, Result, WritableComparable<?>, Text> {
+        implements org.apache.hadoop.mapred.Mapper<ImmutableBytesWritable, Result, WritableComparable<?>, Text> {
 
         static boolean error = false;
         static int count = 0;
@@ -563,15 +564,15 @@ public class TestHBaseInputFormat extend
 
         @Override
         public void map(ImmutableBytesWritable key, Result result,
-                OutputCollector<WritableComparable<?>, Text> output, Reporter reporter)
-                throws IOException {
+                        OutputCollector<WritableComparable<?>, Text> output, Reporter reporter)
+            throws IOException {
             System.out.println("Result " + result.toString());
             List<KeyValue> list = result.list();
             boolean correctValues = (list.size() == 1)
-                    && (Bytes.toString(list.get(0).getRow())).equalsIgnoreCase("testRow")
-                    && (Bytes.toString(list.get(0).getValue())).equalsIgnoreCase("textValue-5")
-                    && (Bytes.toString(list.get(0).getFamily())).equalsIgnoreCase("testFamily")
-                    && (Bytes.toString(list.get(0).getQualifier())).equalsIgnoreCase("testQualifier1");
+                && (Bytes.toString(list.get(0).getRow())).equalsIgnoreCase("testRow")
+                && (Bytes.toString(list.get(0).getValue())).equalsIgnoreCase("textValue-5")
+                && (Bytes.toString(list.get(0).getFamily())).equalsIgnoreCase("testFamily")
+                && (Bytes.toString(list.get(0).getQualifier())).equalsIgnoreCase("testQualifier1");
 
             if (correctValues == false) {
                 error = true;
@@ -581,20 +582,20 @@ public class TestHBaseInputFormat extend
     }
 
     static class MapReadHTableRunningAbort
-            extends
-            Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
+        extends
+        Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
 
         static boolean error = false;
         static int count = 0;
 
         @Override
         public void map(ImmutableBytesWritable key, HCatRecord value,
-                Context context) throws IOException, InterruptedException {
+                        Context context) throws IOException, InterruptedException {
             System.out.println("HCat record value" + value.toString());
             boolean correctValues = (value.size() == 3)
-                    && (value.get(0).toString()).equalsIgnoreCase("testRow")
-                    && (value.get(1).toString()).equalsIgnoreCase("textValue-3")
-                    && (value.get(2).toString()).equalsIgnoreCase("textValue-2");
+                && (value.get(0).toString()).equalsIgnoreCase("testRow")
+                && (value.get(1).toString()).equalsIgnoreCase("textValue-3")
+                && (value.get(2).toString()).equalsIgnoreCase("textValue-2");
 
             if (correctValues == false) {
                 error = true;
@@ -607,9 +608,9 @@ public class TestHBaseInputFormat extend
 
         HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());
         schema.append(new HCatFieldSchema("key", HCatFieldSchema.Type.STRING,
-                ""));
+            ""));
         schema.append(new HCatFieldSchema("testqualifier1",
-                HCatFieldSchema.Type.STRING, ""));
+            HCatFieldSchema.Type.STRING, ""));
         return schema;
     }
 



Mime
View raw message