hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1522583 [1/2] - in /hive/trunk/hcatalog: build-support/ant/ core/src/main/java/org/apache/hcatalog/mapreduce/ core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/ core/src/main/java/org/apache/hive/hcatalog/common/ core/src/ma...
Date Thu, 12 Sep 2013 14:05:12 GMT
Author: hashutosh
Date: Thu Sep 12 14:05:11 2013
New Revision: 1522583

URL: http://svn.apache.org/r1522583
Log:
HIVE-5261 : Make the Hive HBase storage handler work from HCatalog, and use HiveStorageHandlers instead of HCatStorageHandlers (Viraj Bhat via Sushanth Sowmyan)

Added:
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseStorageHandler.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHiveHBaseTableOutputFormat.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestPigHBaseStorageHandler.java
Removed:
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatStorageHandler.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/DummyStorageHandler.java
Modified:
    hive/trunk/hcatalog/build-support/ant/checkstyle.xml
    hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java
    hive/trunk/hcatalog/storage-handlers/hbase/pom.xml
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
    hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java

Modified: hive/trunk/hcatalog/build-support/ant/checkstyle.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support/ant/checkstyle.xml?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/build-support/ant/checkstyle.xml (original)
+++ hive/trunk/hcatalog/build-support/ant/checkstyle.xml Thu Sep 12 14:05:11 2013
@@ -45,6 +45,7 @@
           <exclude name="storage-handlers/hbase/src/gen-java/**"/>
           <exclude name="storage-handlers/hbase/src/test/all-tests"/>
           <exclude name="storage-handlers/hbase/src/test/excluded-tests"/>
+          <exclude name="storage-handlers/hbase/metastore_db/**"/>
           <exclude name="storage-handlers/hbase/partitions*"/>
           <exclude name="storage-handlers/hbase/.partitions*"/>
           <exclude name="KEYS"/>

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatStorageHandler.java Thu Sep 12 14:05:11 2013
@@ -31,7 +31,7 @@ import org.apache.hadoop.mapred.OutputFo
 /**
  * The abstract Class HCatStorageHandler would server as the base class for all
  * the storage handlers required for non-native tables in HCatalog.
- * @deprecated Use/modify {@link org.apache.hive.hcatalog.mapreduce.HCatStorageHandler} instead
+ * @deprecated Use/modify {@link org.apache.hcatalog.mapreduce.HCatStorageHandler} instead
  */
 public abstract class HCatStorageHandler implements HiveStorageHandler {
 

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Thu Sep 12 14:05:11 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.io.RCFi
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
@@ -44,7 +45,6 @@ import org.apache.hadoop.hive.ql.plan.Cr
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatUtil;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 
 final class CreateTableHook extends HCatSemanticAnalyzerBase {
 
@@ -196,7 +196,7 @@ final class CreateTableHook extends HCat
     if (StringUtils.isEmpty(storageHandler)) {
     } else {
       try {
-        HCatStorageHandler storageHandlerInst = HCatUtil
+        HiveStorageHandler storageHandlerInst = HCatUtil
           .getStorageHandler(context.getConf(),
             desc.getStorageHandler(),
             desc.getSerName(),

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Thu Sep 12 14:05:11 2013
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -63,7 +64,6 @@ import org.apache.hive.hcatalog.data.sch
 import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
 import org.apache.hive.hcatalog.mapreduce.FosterStorageHandler;
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.PartInfo;
@@ -371,7 +371,7 @@ public class HCatUtil {
    * @return storageHandler instance
    * @throws IOException
    */
-  public static HCatStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
+  public static HiveStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
     return getStorageHandler(conf,
       storerInfo.getStorageHandlerClass(),
       storerInfo.getSerdeClass(),
@@ -379,7 +379,7 @@ public class HCatUtil {
       storerInfo.getOfClass());
   }
 
-  public static HCatStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
+  public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
     return HCatUtil.getStorageHandler(
       conf,
       partitionInfo.getStorageHandlerClassName(),
@@ -400,7 +400,7 @@ public class HCatUtil {
    * @return storageHandler instance
    * @throws IOException
    */
-  public static HCatStorageHandler getStorageHandler(Configuration conf,
+  public static HiveStorageHandler getStorageHandler(Configuration conf,
                              String storageHandler,
                              String serDe,
                              String inputFormat,
@@ -420,10 +420,10 @@ public class HCatUtil {
     }
 
     try {
-      Class<? extends HCatStorageHandler> handlerClass =
-        (Class<? extends HCatStorageHandler>) Class
+      Class<? extends HiveStorageHandler> handlerClass =
+        (Class<? extends HiveStorageHandler>) Class
           .forName(storageHandler, true, JavaUtils.getClassLoader());
-      return (HCatStorageHandler) ReflectionUtils.newInstance(
+      return (HiveStorageHandler) ReflectionUtils.newInstance(
         handlerClass, conf);
     } catch (ClassNotFoundException e) {
       throw new IOException("Error in loading storage handler."
@@ -444,8 +444,8 @@ public class HCatUtil {
   }
 
   public static Map<String, String>
-  getInputJobProperties(HCatStorageHandler storageHandler,
-              InputJobInfo inputJobInfo) {
+  getInputJobProperties(HiveStorageHandler storageHandler,
+      InputJobInfo inputJobInfo) {
     TableDesc tableDesc = new TableDesc(storageHandler.getSerDeClass(),
       storageHandler.getInputFormatClass(),
       storageHandler.getOutputFormatClass(),
@@ -454,6 +454,9 @@ public class HCatUtil {
       tableDesc.setJobProperties(new HashMap<String, String>());
     }
 
+    Properties mytableProperties = tableDesc.getProperties();
+    mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName());
+
     Map<String, String> jobProperties = new HashMap<String, String>();
     try {
       tableDesc.getJobProperties().put(
@@ -474,7 +477,7 @@ public class HCatUtil {
   @InterfaceAudience.Private
   @InterfaceStability.Evolving
   public static void
-  configureOutputStorageHandler(HCatStorageHandler storageHandler,
+  configureOutputStorageHandler(HiveStorageHandler storageHandler,
                   Configuration conf,
                   OutputJobInfo outputJobInfo) {
     //TODO replace IgnoreKeyTextOutputFormat with a
@@ -489,6 +492,11 @@ public class HCatUtil {
       tableDesc.getJobProperties().put(el.getKey(), el.getValue());
     }
 
+    Properties mytableProperties = tableDesc.getProperties();
+    mytableProperties.setProperty(
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
+        outputJobInfo.getDatabaseName()+ "." + outputJobInfo.getTableName());
+
     Map<String, String> jobProperties = new HashMap<String, String>();
     try {
       tableDesc.getJobProperties().put(
@@ -498,6 +506,18 @@ public class HCatUtil {
       storageHandler.configureOutputJobProperties(tableDesc,
         jobProperties);
 
+      Map<String, String> tableJobProperties = tableDesc.getJobProperties();
+      if (tableJobProperties != null) {
+        if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
+          String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
+          if (jobString != null) {
+            if  (!jobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
+              jobProperties.put(HCatConstants.HCAT_KEY_OUTPUT_INFO,
+                  tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
+            }
+          }
+        }
+      }
       for (Map.Entry<String, String> el : jobProperties.entrySet()) {
         conf.set(el.getKey(), el.getValue());
       }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java Thu Sep 12 14:05:11 2013
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.mapredu
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -37,7 +38,7 @@ import org.apache.hive.hcatalog.data.HCa
  */
 class DefaultRecordWriterContainer extends RecordWriterContainer {
 
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
   private final SerDe serDe;
   private final OutputJobInfo jobInfo;
   private final ObjectInspector hcatRecordOI;

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java Thu Sep 12 14:05:11 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -75,7 +76,7 @@ class FileOutputCommitterContainer exten
 
   private Map<String, Map<String, String>> partitionsDiscoveredByPath;
   private Map<String, JobContext> contextDiscoveredByPath;
-  private final HCatStorageHandler cachedStorageHandler;
+  private final HiveStorageHandler cachedStorageHandler;
 
   HarOutputCommitterPostProcessor harProcessor = new HarOutputCommitterPostProcessor();
 

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java Thu Sep 12 14:05:11 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -87,7 +88,7 @@ class FileOutputFormatContainer extends 
     OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil
       .deserialize(jobInfoString);
     StorerInfo storeInfo = jobInfo.getTableInfo().getStorerInfo();
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
       context.getConfiguration(), storeInfo);
     Class<? extends SerDe> serde = storageHandler.getSerDeClass();
     SerDe sd = (SerDe) ReflectionUtils.newInstance(serde,

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java Thu Sep 12 14:05:11 2013
@@ -27,6 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -51,7 +52,7 @@ import org.apache.hive.hcatalog.data.HCa
  */
 class FileRecordWriterContainer extends RecordWriterContainer {
 
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
   private final SerDe serDe;
   private final ObjectInspector objectInspector;
 
@@ -125,7 +126,7 @@ class FileRecordWriterContainer extends 
   /**
    * @return the storagehandler
    */
-  public HCatStorageHandler getStorageHandler() {
+  public HiveStorageHandler getStorageHandler() {
     return storageHandler;
   }
 

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java Thu Sep 12 14:05:11 2013
@@ -25,11 +25,13 @@ import org.apache.hadoop.hive.common.Fil
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.io.RCFile;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatUtil;
@@ -44,7 +46,7 @@ import java.util.Map;
  *  artifacts of tables which don't define a SerDe. This StorageHandler assumes
  *  the supplied storage artifacts are for a file-based storage system.
  */
-public class FosterStorageHandler extends HCatStorageHandler {
+public class FosterStorageHandler extends DefaultStorageHandler {
 
   public Configuration conf;
   /** The directory under which data is initially written for a partitioned table */
@@ -92,6 +94,11 @@ public class FosterStorageHandler extend
   }
 
   @Override
+  public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+    //do nothing currently
+  }
+
+  @Override
   public void configureInputJobProperties(TableDesc tableDesc,
                       Map<String, String> jobProperties) {
 
@@ -161,7 +168,11 @@ public class FosterStorageHandler extend
 
   }
 
-  @Override
+  public void configureTableJobProperties(TableDesc tableDesc,
+      Map<String, String> jobProperties) {
+    return;
+  }
+
   OutputFormatContainer getOutputFormatContainer(
     org.apache.hadoop.mapred.OutputFormat outputFormat) {
     return new FileOutputFormatContainer(outputFormat);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseInputFormat.java Thu Sep 12 14:05:11 2013
@@ -29,6 +29,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
@@ -116,7 +117,7 @@ public abstract class HCatBaseInputForma
       return splits;
     }
 
-    HCatStorageHandler storageHandler;
+    HiveStorageHandler storageHandler;
     JobConf jobConf;
     //For each matching partition, call getSplits on the underlying InputFormat
     for (PartInfo partitionInfo : partitionInfoList) {
@@ -185,7 +186,7 @@ public abstract class HCatBaseInputForma
     JobContext jobContext = taskContext;
     Configuration conf = jobContext.getConfiguration();
 
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
       conf, partitionInfo);
 
     JobConf jobConf = HCatUtil.getJobConfFromContext(jobContext);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java Thu Sep 12 14:05:11 2013
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -78,12 +79,21 @@ public abstract class HCatBaseOutputForm
    * @return the output format instance
    * @throws IOException
    */
-  protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) throws IOException {
+  protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) 
+    throws IOException {
     OutputJobInfo jobInfo = getJobInfo(context);
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), 
+        jobInfo.getTableInfo().getStorerInfo());
     //why do we need this?
     configureOutputStorageHandler(context);
-    return storageHandler.getOutputFormatContainer(ReflectionUtils.newInstance(storageHandler.getOutputFormatClass(), context.getConfiguration()));
+    if (storageHandler instanceof FosterStorageHandler) {
+      return new FileOutputFormatContainer(ReflectionUtils.newInstance(
+          storageHandler.getOutputFormatClass(),context.getConfiguration()));
+    }
+    else { 
+      return new DefaultOutputFormatContainer(ReflectionUtils.newInstance(
+          storageHandler.getOutputFormatClass(),context.getConfiguration()));
+    }
   }
 
   /**
@@ -134,7 +144,7 @@ public abstract class HCatBaseOutputForm
     Configuration conf = jobContext.getConfiguration();
     try {
       OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
-      HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, jobInfo.getTableInfo().getStorerInfo());
+      HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(jobContext.getConfiguration(),jobInfo.getTableInfo().getStorerInfo());
 
       Map<String, String> partitionValues = jobInfo.getPartitionValues();
       String location = jobInfo.getLocation();

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java Thu Sep 12 14:05:11 2013
@@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -170,7 +171,7 @@ public class HCatOutputFormat extends HC
         partitionCols.add(schema.getName());
       }
 
-      HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
+      HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 
       //Serialize the output info into the configuration
       outputJobInfo.setTableInfo(HCatTableInfo.valueOf(table.getTTable()));

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatRecordReader.java Thu Sep 12 14:05:11 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.io.Writable;
@@ -58,7 +59,7 @@ class HCatRecordReader extends RecordRea
   private org.apache.hadoop.mapred.RecordReader<WritableComparable, Writable> baseRecordReader;
 
   /** The storage handler used */
-  private final HCatStorageHandler storageHandler;
+  private final HiveStorageHandler storageHandler;
 
   private Deserializer deserializer;
 
@@ -70,7 +71,7 @@ class HCatRecordReader extends RecordRea
   /**
    * Instantiates a new hcat record reader.
    */
-  public HCatRecordReader(HCatStorageHandler storageHandler,
+  public HCatRecordReader(HiveStorageHandler storageHandler,
               Map<String, String> valuesNotInDataCols) {
     this.storageHandler = storageHandler;
     this.valuesNotInDataCols = valuesNotInDataCols;
@@ -106,7 +107,7 @@ class HCatRecordReader extends RecordRea
   }
 
   private org.apache.hadoop.mapred.RecordReader createBaseRecordReader(HCatSplit hcatSplit,
-                                     HCatStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException {
+                                     HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException {
 
     JobConf jobConf = HCatUtil.getJobConfFromContext(taskContext);
     HCatUtil.copyJobPropertiesToJobConf(hcatSplit.getPartitionInfo().getJobProperties(), jobConf);
@@ -116,7 +117,7 @@ class HCatRecordReader extends RecordRea
       InternalUtil.createReporter(taskContext));
   }
 
-  private void createDeserializer(HCatSplit hcatSplit, HCatStorageHandler storageHandler,
+  private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler,
                   TaskAttemptContext taskContext) throws IOException {
 
     deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(),

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java Thu Sep 12 14:05:11 2013
@@ -27,6 +27,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -154,7 +155,7 @@ class InitializeInput {
     StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
 
     Properties hcatProperties = new Properties();
-    HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
+    HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 
     // copy the properties from storageHandler to jobProperties
     Map<String, String> jobProperties = HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/PartInfo.java Thu Sep 12 14:05:11 2013
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 
 /** The Class used to serialize the partition information read from the metadata server that maps to a partition. */
@@ -63,7 +64,7 @@ public class PartInfo implements Seriali
    * @param jobProperties the job properties
    * @param tableInfo the table information
    */
-  public PartInfo(HCatSchema partitionSchema, HCatStorageHandler storageHandler,
+  public PartInfo(HCatSchema partitionSchema, HiveStorageHandler storageHandler,
           String location, Properties hcatProperties,
           Map<String, String> jobProperties, HCatTableInfo tableInfo) {
     this.partitionSchema = partitionSchema;

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/security/StorageDelegationAuthorizationProvider.java Thu Sep 12 14:05:11 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProviderBase;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 
 /**
  * A HiveAuthorizationProvider which delegates the authorization requests to 
@@ -84,8 +83,8 @@ public class StorageDelegationAuthorizat
     HiveStorageHandler handler = table.getStorageHandler();
 
     if (handler != null) {
-      if (handler instanceof HCatStorageHandler) {
-        return ((HCatStorageHandler) handler).getAuthorizationProvider();
+      if (handler instanceof HiveStorageHandler) {
+        return ((HiveStorageHandler) handler).getAuthorizationProvider();
       } else {
         String authProviderClass = authProviders.get(handler.getClass().getCanonicalName());
 

Modified: hive/trunk/hcatalog/storage-handlers/hbase/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/pom.xml?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/pom.xml (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/pom.xml Thu Sep 12 14:05:11 2013
@@ -54,6 +54,12 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hcatalog-pig-adapter</artifactId>
+      <version>${hcatalog.version}</version>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <version>${zookeeper.version}</version>
@@ -68,6 +74,12 @@
 
     <!-- test scope -->
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hcatalog-pig-adapter</artifactId>
+      <version>${hcatalog.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
       <version>${commons-io.version}</version>

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java Thu Sep 12 14:05:11 2013
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.serde2.SerDe;
@@ -66,7 +67,6 @@ import org.apache.hive.hcatalog.mapreduc
 import org.apache.hive.hcatalog.mapreduce.HCatTableInfo;
 import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.HCatStorageHandler;
 import org.apache.thrift.TBase;
 import org.apache.zookeeper.ZooKeeper;
 
@@ -78,7 +78,7 @@ import com.google.common.util.concurrent
  * tables through HCatalog. The implementation is very similar to the
  * HiveHBaseStorageHandler, with more details to suit HCatalog.
  */
-public class HBaseHCatStorageHandler extends HCatStorageHandler implements HiveMetaHook, Configurable {
+public class HBaseHCatStorageHandler extends  DefaultStorageHandler implements HiveMetaHook, Configurable {
 
   public final static String DEFAULT_PREFIX = "default.";
   private final static String PROPERTY_INT_OUTPUT_LOCATION = "hcat.hbase.mapreduce.intermediateOutputLocation";
@@ -87,6 +87,7 @@ public class HBaseHCatStorageHandler ext
   private Configuration jobConf;
   private HBaseAdmin admin;
 
+  @Deprecated
   @Override
   public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
     // Populate jobProperties with input table name, table columns, RM snapshot,
@@ -135,6 +136,7 @@ public class HBaseHCatStorageHandler ext
     }
   }
 
+  @Deprecated
   @Override
   public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
     // Populate jobProperties with output table name, hbase-default.xml, hbase-site.xml, OutputJobInfo
@@ -198,6 +200,7 @@ public class HBaseHCatStorageHandler ext
   * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#
   * getAuthorizationProvider()
   */
+  @Deprecated
   @Override
   public HiveAuthorizationProvider getAuthorizationProvider()
     throws HiveException {
@@ -215,6 +218,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #commitCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void commitCreateTable(Table table) throws MetaException {
   }
@@ -229,6 +233,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #commitDropTable(org.apache.hadoop.hive.metastore.api.Table, boolean)
    */
+  @Deprecated
   @Override
   public void commitDropTable(Table tbl, boolean deleteData)
     throws MetaException {
@@ -244,6 +249,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #preCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void preCreateTable(Table tbl) throws MetaException {
     boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
@@ -262,7 +268,7 @@ public class HBaseHCatStorageHandler ext
       if (hbaseColumnsMapping == null) {
         throw new MetaException(
           "No hbase.columns.mapping defined in table"
-            + " properties.");
+              + " properties.");
       }
 
       List<String> hbaseColumnFamilies = new ArrayList<String>();
@@ -346,6 +352,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #preDropTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void preDropTable(Table table) throws MetaException {
   }
@@ -358,6 +365,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #rollbackCreateTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void rollbackCreateTable(Table table) throws MetaException {
     checkDeleteTable(table);
@@ -371,6 +379,7 @@ public class HBaseHCatStorageHandler ext
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler
    * #rollbackDropTable(org.apache.hadoop.hive.metastore.api.Table)
    */
+  @Deprecated
   @Override
   public void rollbackDropTable(Table table) throws MetaException {
   }
@@ -380,6 +389,7 @@ public class HBaseHCatStorageHandler ext
    *
    * @see org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#getMetaHook()
    */
+  @Deprecated
   @Override
   public HiveMetaHook getMetaHook() {
     return this;
@@ -432,6 +442,7 @@ public class HBaseHCatStorageHandler ext
     return qualifiedName;
   }
 
+  @Deprecated
   @Override
   public Class<? extends InputFormat> getInputFormatClass() {
     return HBaseInputFormat.class;
@@ -450,6 +461,7 @@ public class HBaseHCatStorageHandler ext
   * @see
   * org.apache.hive.hcatalog.storagehandler.HCatStorageHandler#getSerDeClass()
   */
+  @Deprecated
   @Override
   public Class<? extends SerDe> getSerDeClass()
     throws UnsupportedOperationException {
@@ -460,6 +472,7 @@ public class HBaseHCatStorageHandler ext
     return jobConf;
   }
 
+  @Deprecated
   @Override
   public Configuration getConf() {
 
@@ -469,6 +482,7 @@ public class HBaseHCatStorageHandler ext
     return hbaseConf;
   }
 
+  @Deprecated
   @Override
   public void setConf(Configuration conf) {
     //setConf is called both during DDL operations and  mapred read/write jobs.

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java Thu Sep 12 14:05:11 2013
@@ -36,6 +36,7 @@ public interface RevisionManager {
   /**
    * Initialize the revision manager.
    */
+  @Deprecated
   public void initialize(Configuration conf);
 
   /**
@@ -43,6 +44,7 @@ public interface RevisionManager {
    *
    * @throws IOException
    */
+  @Deprecated
   public void open() throws IOException;
 
   /**
@@ -50,6 +52,7 @@ public interface RevisionManager {
    *
    * @throws IOException
    */
+  @Deprecated
   public void close() throws IOException;
 
   /**
@@ -57,12 +60,14 @@ public interface RevisionManager {
    * @param table the hbase table name
    * @param columnFamilies the column families in the table
    */
+  @Deprecated
   public void createTable(String table, List<String> columnFamilies) throws IOException;
 
   /**
    * Remove table data from revision manager for a dropped table.
    * @param table the hbase table name
    */
+  @Deprecated
   public void dropTable(String table) throws IOException;
 
   /**
@@ -73,6 +78,7 @@ public interface RevisionManager {
    * @return a new Transaction
    * @throws IOException
    */
+  @Deprecated
   public Transaction beginWriteTransaction(String table, List<String> families)
     throws IOException;
 
@@ -85,6 +91,7 @@ public interface RevisionManager {
    * @return a new Transaction
    * @throws IOException
    */
+  @Deprecated
   public Transaction beginWriteTransaction(String table,
                        List<String> families, long keepAlive) throws IOException;
 
@@ -94,6 +101,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void commitWriteTransaction(Transaction transaction)
     throws IOException;
 
@@ -103,6 +111,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void abortWriteTransaction(Transaction transaction)
     throws IOException;
 
@@ -114,8 +123,9 @@ public interface RevisionManager {
    * @return a list of aborted WriteTransactions
    * @throws java.io.IOException
    */
+  @Deprecated
   public List<FamilyRevision> getAbortedWriteTransactions(String table,
-                              String columnFamily) throws IOException;
+      String columnFamily) throws IOException;
 
   /**
    * Create the latest snapshot of the table.
@@ -124,6 +134,7 @@ public interface RevisionManager {
    * @return a new snapshot
    * @throws IOException
    */
+  @Deprecated
   public TableSnapshot createSnapshot(String tableName) throws IOException;
 
   /**
@@ -134,6 +145,7 @@ public interface RevisionManager {
    * @return a new snapshot
    * @throws IOException
    */
+  @Deprecated
   public TableSnapshot createSnapshot(String tableName, long revision)
     throws IOException;
 
@@ -143,6 +155,7 @@ public interface RevisionManager {
    * @param transaction
    * @throws IOException
    */
+  @Deprecated
   public void keepAlive(Transaction transaction) throws IOException;
 
 }

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java Thu Sep 12 14:05:11 2013
@@ -172,8 +172,12 @@ public abstract class SkeletonHBaseTest 
     protected int usageCount = 0;
 
     public Context(String handle) {
-      testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getPath();
-      System.out.println("Cluster work directory: " + testDir);
+      try {
+        testDir = new File(TEST_DIR + "/test_" + handle + "_" + Math.abs(new Random().nextLong()) + "/").getCanonicalPath();
+        System.out.println("Cluster work directory: " + testDir);
+      } catch (IOException e) {
+        throw new IllegalStateException("Failed to generate testDir", e);
+      }
     }
 
     public void start() {

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java?rev=1522583&r1=1522582&r2=1522583&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java Thu Sep 12 14:05:11 2013
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.hcatalog.hbase;
 
 import static org.junit.Assert.assertEquals;
@@ -33,29 +34,22 @@ import java.util.Map.Entry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.hbase.HBaseSerDe;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
@@ -67,12 +61,8 @@ import org.apache.hive.hcatalog.common.H
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hcatalog.hbase.snapshot.RevisionManager;
-import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
-import org.apache.hcatalog.hbase.snapshot.Transaction;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
-import org.apache.hive.hcatalog.mapreduce.PartInfo;
 import org.junit.Test;
 
 public class TestHBaseInputFormat extends SkeletonHBaseTest {
@@ -100,9 +90,6 @@ public class TestHBaseInputFormat extend
         hcatConf.set(el.getKey(), el.getValue());
       }
     }
-    HBaseConfiguration.merge(hcatConf,
-      RevisionManagerConfiguration.create());
-
 
     SessionState.start(new CliSessionState(hcatConf));
     hcatDriver = new HCatDriver();
@@ -112,26 +99,14 @@ public class TestHBaseInputFormat extend
   private List<Put> generatePuts(int num, String tableName) throws IOException {
 
     List<String> columnFamilies = Arrays.asList("testFamily");
-    RevisionManager rm = null;
     List<Put> myPuts;
-    try {
-      rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(hcatConf);
-      rm.open();
-      myPuts = new ArrayList<Put>();
-      for (int i = 1; i <= num; i++) {
-        Put put = new Put(Bytes.toBytes("testRow"));
-        put.add(FAMILY, QUALIFIER1, i, Bytes.toBytes("textValue-" + i));
-        put.add(FAMILY, QUALIFIER2, i, Bytes.toBytes("textValue-" + i));
-        myPuts.add(put);
-        Transaction tsx = rm.beginWriteTransaction(tableName,
-          columnFamilies);
-        rm.commitWriteTransaction(tsx);
-      }
-    } finally {
-      if (rm != null)
-        rm.close();
+    myPuts = new ArrayList<Put>();
+    for (int i = 1; i <= num; i++) {
+      Put put = new Put(Bytes.toBytes("testRow"));
+      put.add(FAMILY, QUALIFIER1, i, Bytes.toBytes("textValue-" + i));
+      put.add(FAMILY, QUALIFIER2, i, Bytes.toBytes("textValue-" + i));
+      myPuts.add(put);
     }
-
     return myPuts;
   }
 
@@ -141,40 +116,6 @@ public class TestHBaseInputFormat extend
     table.put(myPuts);
   }
 
-  private long populateHBaseTableQualifier1(String tName, int value, Boolean commit)
-    throws IOException {
-    List<String> columnFamilies = Arrays.asList("testFamily");
-    RevisionManager rm = null;
-    List<Put> myPuts = new ArrayList<Put>();
-    long revision;
-    try {
-      rm = HBaseRevisionManagerUtil.getOpenedRevisionManager(hcatConf);
-      rm.open();
-      Transaction tsx = rm.beginWriteTransaction(tName, columnFamilies);
-
-      Put put = new Put(Bytes.toBytes("testRow"));
-      revision = tsx.getRevisionNumber();
-      put.add(FAMILY, QUALIFIER1, revision,
-        Bytes.toBytes("textValue-" + value));
-      myPuts.add(put);
-
-      // If commit is null it is left as a running transaction
-      if (commit != null) {
-        if (commit) {
-          rm.commitWriteTransaction(tsx);
-        } else {
-          rm.abortWriteTransaction(tsx);
-        }
-      }
-    } finally {
-      if (rm != null)
-        rm.close();
-    }
-    HTable table = new HTable(getHbaseConf(), Bytes.toBytes(tName));
-    table.put(myPuts);
-    return revision;
-  }
-
   @Test
   public void TestHBaseTableReadMR() throws Exception {
     String tableName = newTableName("MyTable");
@@ -184,11 +125,11 @@ public class TestHBaseInputFormat extend
     String db_dir = new Path(getTestDir(), "hbasedb").toString();
 
     String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
-      + db_dir + "'";
+                            + db_dir + "'";
     String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
       + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-      "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-      + "TBLPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')";
+      "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
+      + " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')" ;
 
     CommandProcessorResponse responseOne = hcatDriver.run(dbquery);
     assertEquals(0, responseOne.getResponseCode());
@@ -204,6 +145,8 @@ public class TestHBaseInputFormat extend
     conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
       HCatUtil.serialize(getHiveConf().getAllProperties()));
 
+    conf.set(HBaseSerDe.HBASE_TABLE_NAME,hbaseTableName);
+    conf.set(TableInputFormat.INPUT_TABLE, hbaseTableName);
     // output settings
     Path outputDir = new Path(getTestDir(), "mapred/testHbaseTableMRRead");
     FileSystem fs = getFileSystem();
@@ -217,7 +160,9 @@ public class TestHBaseInputFormat extend
     MapReadHTable.resetCounters();
 
     job.setInputFormatClass(HCatInputFormat.class);
-    HCatInputFormat.setInput(job.getConfiguration(), databaseName, tableName);
+    InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName,
+                null);
+    HCatInputFormat.setInput(job, inputJobInfo);
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, outputDir);
     job.setMapOutputKeyClass(BytesWritable.class);
@@ -231,7 +176,7 @@ public class TestHBaseInputFormat extend
     assertFalse(MapReadHTable.error);
     assertEquals(MapReadHTable.count, 1);
 
-    String dropTableQuery = "DROP TABLE " + hbaseTableName;
+    String dropTableQuery = "DROP TABLE " + hbaseTableName ;
     CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
     assertEquals(0, responseThree.getResponseCode());
 
@@ -251,10 +196,10 @@ public class TestHBaseInputFormat extend
     String hbaseTableName = "MyDB_" + tableName;
     String tableQuery = "CREATE TABLE " + tableName
       + "(key string, testqualifier1 string, testqualifier2 string) STORED BY "
-      + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-      + "TBLPROPERTIES ('hbase.columns.mapping'="
-      + "':key,testFamily:testQualifier1,testFamily:testQualifier2',"
-      + "'hbase.table.name'='" + hbaseTableName + "')";
+      + "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
+      + " WITH  SERDEPROPERTIES ('hbase.columns.mapping'="
+      + "':key,testFamily:testQualifier1,testFamily:testQualifier2')"
+      + " TBLPROPERTIES ('hbase.table.name'='" + hbaseTableName+ "')" ;
 
     CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
     assertEquals(0, responseTwo.getResponseCode());
@@ -280,8 +225,10 @@ public class TestHBaseInputFormat extend
     job.setJarByClass(this.getClass());
     job.setMapperClass(MapReadProjHTable.class);
     job.setInputFormatClass(HCatInputFormat.class);
+    InputJobInfo inputJobInfo = InputJobInfo.create(
+      MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null);
     HCatInputFormat.setOutputSchema(job, getProjectionSchema());
-    HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+    HCatInputFormat.setInput(job, inputJobInfo);
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, outputDir);
     job.setMapOutputKeyClass(BytesWritable.class);
@@ -301,193 +248,6 @@ public class TestHBaseInputFormat extend
     assertFalse(isHbaseTableThere);
   }
 
-  @Test
-  public void TestHBaseInputFormatProjectionReadMR() throws Exception {
-
-    String tableName = newTableName("mytable");
-    String tableQuery = "CREATE TABLE " + tableName
-      + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-      "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-      + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-      "testFamily:testQualifier1,testFamily:testQualifier2')";
-
-    CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
-    assertEquals(0, responseTwo.getResponseCode());
-
-    HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
-    boolean doesTableExist = hAdmin.tableExists(tableName);
-    assertTrue(doesTableExist);
-
-    populateHBaseTable(tableName, 5);
-
-    Configuration conf = new Configuration(hcatConf);
-    conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-      HCatUtil.serialize(getHiveConf().getAllProperties()));
-
-    // output settings
-    Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
-    FileSystem fs = getFileSystem();
-    if (fs.exists(outputDir)) {
-      fs.delete(outputDir, true);
-    }
-    // create job
-    JobConf job = new JobConf(conf);
-    job.setJobName("hbase-scan-column");
-    job.setJarByClass(this.getClass());
-    job.setMapperClass(MapReadProjectionHTable.class);
-    job.setInputFormat(HBaseInputFormat.class);
-
-    //Configure projection schema
-    job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema()));
-    Job newJob = new Job(job);
-    HCatInputFormat.setInput(newJob, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
-    String inputJobString = newJob.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO);
-    InputJobInfo info = (InputJobInfo) HCatUtil.deserialize(inputJobString);
-    job.set(HCatConstants.HCAT_KEY_JOB_INFO, inputJobString);
-    for (PartInfo partinfo : info.getPartitions()) {
-      for (Entry<String, String> entry : partinfo.getJobProperties().entrySet())
-        job.set(entry.getKey(), entry.getValue());
-    }
-    assertEquals("testFamily:testQualifier1", job.get(TableInputFormat.SCAN_COLUMNS));
-
-    job.setOutputFormat(org.apache.hadoop.mapred.TextOutputFormat.class);
-    org.apache.hadoop.mapred.TextOutputFormat.setOutputPath(job, outputDir);
-    job.setMapOutputKeyClass(BytesWritable.class);
-    job.setMapOutputValueClass(Text.class);
-    job.setOutputKeyClass(BytesWritable.class);
-    job.setOutputValueClass(Text.class);
-    job.setNumReduceTasks(0);
-
-    RunningJob runJob = JobClient.runJob(job);
-    runJob.waitForCompletion();
-    assertTrue(runJob.isSuccessful());
-    assertFalse(MapReadProjHTable.error);
-    assertEquals(MapReadProjHTable.count, 1);
-
-    String dropTableQuery = "DROP TABLE " + tableName;
-    CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
-    assertEquals(0, responseThree.getResponseCode());
-
-    boolean isHbaseTableThere = hAdmin.tableExists(tableName);
-    assertFalse(isHbaseTableThere);
-  }
-
-  @Test
-  public void TestHBaseTableIgnoreAbortedTransactions() throws Exception {
-    String tableName = newTableName("mytable");
-    String tableQuery = "CREATE TABLE " + tableName
-      + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-      "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-      + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-      "testFamily:testQualifier1,testFamily:testQualifier2')";
-
-    CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
-    assertEquals(0, responseTwo.getResponseCode());
-
-    HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
-    boolean doesTableExist = hAdmin.tableExists(tableName);
-    assertTrue(doesTableExist);
-
-    populateHBaseTable(tableName, 5);
-    populateHBaseTableQualifier1(tableName, 6, false);
-    populateHBaseTableQualifier1(tableName, 7, false);
-
-    Configuration conf = new Configuration(hcatConf);
-    conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-      HCatUtil.serialize(getHiveConf().getAllProperties()));
-
-    Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableIgnoreAbortedTransactions");
-    FileSystem fs = getFileSystem();
-    if (fs.exists(outputDir)) {
-      fs.delete(outputDir, true);
-    }
-    Job job = new Job(conf, "hbase-aborted-transaction");
-    job.setJarByClass(this.getClass());
-    job.setMapperClass(MapReadHTable.class);
-    MapReadHTable.resetCounters();
-    job.setInputFormatClass(HCatInputFormat.class);
-    HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    TextOutputFormat.setOutputPath(job, outputDir);
-    job.setMapOutputKeyClass(BytesWritable.class);
-    job.setMapOutputValueClass(Text.class);
-    job.setOutputKeyClass(BytesWritable.class);
-    job.setOutputValueClass(Text.class);
-    job.setNumReduceTasks(0);
-    assertTrue(job.waitForCompletion(true));
-    // Verify that the records do not contain aborted transaction
-    // revisions 6 and 7 for testFamily:testQualifier1 and
-    // fetches revision 5 for both testQualifier1 and testQualifier2
-    assertFalse(MapReadHTable.error);
-    assertEquals(1, MapReadHTable.count);
-
-    String dropTableQuery = "DROP TABLE " + tableName;
-    CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
-    assertEquals(0, responseThree.getResponseCode());
-
-    boolean isHbaseTableThere = hAdmin.tableExists(tableName);
-    assertFalse(isHbaseTableThere);
-  }
-
-  @Test
-  public void TestHBaseTableIgnoreAbortedAndRunningTransactions() throws Exception {
-    String tableName = newTableName("mytable");
-    String tableQuery = "CREATE TABLE " + tableName
-      + "(key string, testqualifier1 string, testqualifier2 string) STORED BY " +
-      "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
-      + "TBLPROPERTIES ('hbase.columns.mapping'=':key," +
-      "testFamily:testQualifier1,testFamily:testQualifier2')";
-
-    CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
-    assertEquals(0, responseTwo.getResponseCode());
-
-    HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
-    boolean doesTableExist = hAdmin.tableExists(tableName);
-    assertTrue(doesTableExist);
-
-    populateHBaseTable(tableName, 2);
-    populateHBaseTableQualifier1(tableName, 3, Boolean.TRUE); //Committed transaction
-    populateHBaseTableQualifier1(tableName, 4, null); //Running transaction
-    populateHBaseTableQualifier1(tableName, 5, Boolean.FALSE);  //Aborted transaction
-    populateHBaseTableQualifier1(tableName, 6, Boolean.TRUE); //Committed transaction
-    populateHBaseTableQualifier1(tableName, 7, null); //Running Transaction
-    populateHBaseTableQualifier1(tableName, 8, Boolean.FALSE); //Aborted Transaction
-
-    Configuration conf = new Configuration(hcatConf);
-    conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-      HCatUtil.serialize(getHiveConf().getAllProperties()));
-
-    Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableIgnoreAbortedTransactions");
-    FileSystem fs = getFileSystem();
-    if (fs.exists(outputDir)) {
-      fs.delete(outputDir, true);
-    }
-    Job job = new Job(conf, "hbase-running-aborted-transaction");
-    job.setJarByClass(this.getClass());
-    job.setMapperClass(MapReadHTableRunningAbort.class);
-    job.setInputFormatClass(HCatInputFormat.class);
-    HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    TextOutputFormat.setOutputPath(job, outputDir);
-    job.setMapOutputKeyClass(BytesWritable.class);
-    job.setMapOutputValueClass(Text.class);
-    job.setOutputKeyClass(BytesWritable.class);
-    job.setOutputValueClass(Text.class);
-    job.setNumReduceTasks(0);
-    assertTrue(job.waitForCompletion(true));
-    // Verify that the records do not contain running and aborted transaction
-    // and it fetches revision 2 for testQualifier1 and testQualifier2
-    assertFalse(MapReadHTableRunningAbort.error);
-    assertEquals(1, MapReadHTableRunningAbort.count);
-
-    String dropTableQuery = "DROP TABLE " + tableName;
-    CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
-    assertEquals(0, responseThree.getResponseCode());
-
-    boolean isHbaseTableThere = hAdmin.tableExists(tableName);
-    assertFalse(isHbaseTableThere);
-  }
-
 
   static class MapReadHTable
     extends
@@ -499,7 +259,6 @@ public class TestHBaseInputFormat extend
     @Override
     public void map(ImmutableBytesWritable key, HCatRecord value,
             Context context) throws IOException, InterruptedException {
-      System.out.println("HCat record value" + value.toString());
       boolean correctValues = (value.size() == 3)
         && (value.get(0).toString()).equalsIgnoreCase("testRow")
         && (value.get(1).toString()).equalsIgnoreCase("textValue-5")
@@ -523,11 +282,9 @@ public class TestHBaseInputFormat extend
 
     static boolean error = false;
     static int count = 0;
-
     @Override
     public void map(ImmutableBytesWritable key, HCatRecord value,
             Context context) throws IOException, InterruptedException {
-      System.out.println("HCat record value" + value.toString());
       boolean correctValues = (value.size() == 2)
         && (value.get(0).toString()).equalsIgnoreCase("testRow")
         && (value.get(1).toString()).equalsIgnoreCase("textValue-5");
@@ -539,62 +296,6 @@ public class TestHBaseInputFormat extend
     }
   }
 
-  static class MapReadProjectionHTable
-    implements org.apache.hadoop.mapred.Mapper<ImmutableBytesWritable, Result, WritableComparable<?>, Text> {
-
-    static boolean error = false;
-    static int count = 0;
-
-    @Override
-    public void configure(JobConf job) {
-    }
-
-    @Override
-    public void close() throws IOException {
-    }
-
-    @Override
-    public void map(ImmutableBytesWritable key, Result result,
-            OutputCollector<WritableComparable<?>, Text> output, Reporter reporter)
-      throws IOException {
-      System.out.println("Result " + result.toString());
-      List<KeyValue> list = result.list();
-      boolean correctValues = (list.size() == 1)
-        && (Bytes.toString(list.get(0).getRow())).equalsIgnoreCase("testRow")
-        && (Bytes.toString(list.get(0).getValue())).equalsIgnoreCase("textValue-5")
-        && (Bytes.toString(list.get(0).getFamily())).equalsIgnoreCase("testFamily")
-        && (Bytes.toString(list.get(0).getQualifier())).equalsIgnoreCase("testQualifier1");
-
-      if (correctValues == false) {
-        error = true;
-      }
-      count++;
-    }
-  }
-
-  static class MapReadHTableRunningAbort
-    extends
-    Mapper<ImmutableBytesWritable, HCatRecord, WritableComparable<?>, Text> {
-
-    static boolean error = false;
-    static int count = 0;
-
-    @Override
-    public void map(ImmutableBytesWritable key, HCatRecord value,
-            Context context) throws IOException, InterruptedException {
-      System.out.println("HCat record value" + value.toString());
-      boolean correctValues = (value.size() == 3)
-        && (value.get(0).toString()).equalsIgnoreCase("testRow")
-        && (value.get(1).toString()).equalsIgnoreCase("textValue-3")
-        && (value.get(2).toString()).equalsIgnoreCase("textValue-2");
-
-      if (correctValues == false) {
-        error = true;
-      }
-      count++;
-    }
-  }
-
   private HCatSchema getProjectionSchema() throws HCatException {
 
     HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());



Mime
View raw message