incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tof...@apache.org
Subject svn commit: r1197369 - in /incubator/hcatalog/trunk: ./ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/ storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/
Date Fri, 04 Nov 2011 00:22:01 GMT
Author: toffer
Date: Fri Nov  4 00:22:01 2011
New Revision: 1197369

URL: http://svn.apache.org/viewvc?rev=1197369&view=rev
Log:
HCATALOG-151 Fixed native table names used for tables stored in non-default DBs in HBaseInputStorageDriver
(avandana via toffer)

Modified:
    incubator/hcatalog/trunk/CHANGES.txt
    incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
    incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputStorageDriver.java
    incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputStorageDriver.java

Modified: incubator/hcatalog/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1197369&r1=1197368&r2=1197369&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Fri Nov  4 00:22:01 2011
@@ -23,6 +23,8 @@ Trunk (unreleased changes)
   INCOMPATIBLE CHANGES
 
   NEW FEATURES
+  HCAT-151. Fixed native table names used for tables stored in non-default DBs in HBaseInputStorageDriver
(avandana via toffer)
+
   HCAT-143. Projection pushdown for HBaseInputStorageDriver (avandana via toffer)
 
   HCAT-121. TextStorageOutputDriver for Pig (daijyc via hashutosh)  

Modified: incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java?rev=1197369&r1=1197368&r2=1197369&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
(original)
+++ incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java
Fri Nov  4 00:22:01 2011
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hcatalog.mapreduce.HCatInputStorageDriver;
 import org.apache.hcatalog.mapreduce.HCatOutputStorageDriver;
+import org.apache.hcatalog.mapreduce.HCatTableInfo;
 import org.apache.hcatalog.storagehandler.HCatStorageHandler;
 
 /**
@@ -381,4 +382,18 @@ public class HBaseHCatStorageHandler ext
         }
     }
 
+    static String getFullyQualifiedName(HCatTableInfo tableInfo){
+        String qualifiedName;
+        String databaseName = tableInfo.getDatabaseName();
+        String tableName = tableInfo.getTableName();
+
+        if ((databaseName == null) || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)))
{
+            qualifiedName = tableName;
+        } else {
+            qualifiedName = databaseName + "." + tableName;
+        }
+
+        return qualifiedName;
+    }
+
 }

Modified: incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputStorageDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputStorageDriver.java?rev=1197369&r1=1197368&r2=1197369&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputStorageDriver.java
(original)
+++ incubator/hcatalog/trunk/storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputStorageDriver.java
Fri Nov  4 00:22:01 2011
@@ -98,7 +98,8 @@ public class HBaseInputStorageDriver ext
     public InputFormat<ImmutableBytesWritable, Result> getInputFormat(
             Properties hcatProperties) {
         HBaseInputFormat tableInputFormat = new HBaseInputFormat();
-        jobConf.set(TableInputFormat.INPUT_TABLE, tableInfo.getTableName());
+        String hbaseTableName = HBaseHCatStorageHandler.getFullyQualifiedName(tableInfo);
+        jobConf.set(TableInputFormat.INPUT_TABLE, hbaseTableName);
         jobConf.set(TableInputFormat.SCAN_COLUMNS, scanColumns);
         tableInputFormat.setConf(jobConf);
         // TODO: Make the caching configurable by the user

Modified: incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputStorageDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputStorageDriver.java?rev=1197369&r1=1197368&r2=1197369&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputStorageDriver.java
(original)
+++ incubator/hcatalog/trunk/storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputStorageDriver.java
Fri Nov  4 00:22:01 2011
@@ -17,36 +17,37 @@
  */
 package org.apache.hcatalog.hbase;
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.net.URI;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hive.hbase.HBaseSerDe;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hcatalog.cli.HCatDriver;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.common.HCatUtil;
@@ -59,6 +60,8 @@ import org.junit.Test;
 
 public class TestHBaseInputStorageDriver extends SkeletonHBaseTest {
 
+    private static HiveConf   hcatConf;
+    private static HCatDriver hcatDriver;
     private final byte[] FAMILY     = Bytes.toBytes("testFamily");
     private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
     private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
@@ -76,71 +79,66 @@ public class TestHBaseInputStorageDriver
         return myPuts;
     }
 
-    private void registerHBaseTable(String tableName) throws Exception {
+    public void Initialize() throws Exception {
+        hcatConf = getHiveConf();
+        hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+                HCatSemanticAnalyzer.class.getName());
+        URI fsuri = getFileSystem().getUri();
+        Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
+                getTestDir());
+        hcatConf.set(HiveConf.ConfVars.HADOOPFS.varname, fsuri.toString());
+        hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
 
-        String databaseName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
-        HiveMetaStoreClient client = getCluster().getHiveMetaStoreClient();
-        try {
-            client.dropTable(databaseName, tableName);
-        } catch (Exception e) {
-        } // can fail with NoSuchObjectException
-
-        Table tbl = new Table();
-        tbl.setDbName(databaseName);
-        tbl.setTableName(tableName);
-        tbl.setTableType(TableType.EXTERNAL_TABLE.toString());
-        tbl.setPartitionKeys(new ArrayList<FieldSchema>());
-        Map<String, String> tableParams = new HashMap<String, String>();
-        tableParams.put(HCatConstants.HCAT_ISD_CLASS,
-                HBaseInputStorageDriver.class.getName());
-        tableParams.put(HCatConstants.HCAT_OSD_CLASS, "NotRequired");
-        tableParams.put(HBaseConstants.PROPERTY_COLUMN_MAPPING_KEY,
-                ":key,testFamily:testQualifier1,testFamily:testQualifier2");
-        tableParams.put(Constants.SERIALIZATION_FORMAT, "9");
-        tableParams.put(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
-        tbl.setParameters(tableParams);
-
-        StorageDescriptor sd = new StorageDescriptor();
-        sd.setCols(HCatUtil.getFieldSchemaList(getSchema().getFields()));
-        sd.setBucketCols(new ArrayList<String>(3));
-        sd.setSerdeInfo(new SerDeInfo());
-        sd.getSerdeInfo().setName(tbl.getTableName());
-        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
-        sd.getSerdeInfo().getParameters()
-                .put(Constants.SERIALIZATION_FORMAT, "9");
-        sd.getSerdeInfo().setSerializationLib(HBaseSerDe.class.getName());
-        sd.setInputFormat(HBaseInputFormat.class.getName());
-        sd.setOutputFormat("NotRequired");
+        //Add hbase properties
 
-        tbl.setSd(sd);
-        client.createTable(tbl);
+        for (Map.Entry<String, String> el : getHbaseConf()) {
+            if (el.getKey().startsWith("hbase.")) {
+                hcatConf.set(el.getKey(), el.getValue());
+            }
+        }
+
+        SessionState.start(new CliSessionState(hcatConf));
+        hcatDriver = new HCatDriver();
 
     }
 
-    public void populateTable(String tableName) throws IOException {
+    public void populateHBaseTable(String tName) throws IOException {
         List<Put> myPuts = generatePuts(10);
-        HTable table = new HTable(getHbaseConf(), Bytes.toBytes(tableName));
+        HTable table = new HTable(getHbaseConf(), Bytes.toBytes(tName));
         table.put(myPuts);
     }
 
     @Test
     public void TestHBaseTableReadMR() throws Exception {
-        String tableName  = "testtableone";
-        Configuration conf = new Configuration();
-        // include hbase config in conf file
-        for (Map.Entry<String, String> el : getHbaseConf()) {
-            if (el.getKey().startsWith("hbase.")) {
-                conf.set(el.getKey(), el.getValue());
-            }
-        }
+        Initialize();
+        String tableName = newTableName("mytable");
+        String databaseName = newTableName("mydatabase");
+        String db_dir = getTestDir() + "/hbasedb";
+
+        String dbquery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
+                            + db_dir + "'";
+        String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
+                              + "(key string, testqualifier1 string, testqualifier2 string)
STORED BY " +
+                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+                              + "TBLPROPERTIES ('hcat.isd'='org.apache.hcatalog.hbase.HBaseInputStorageDriver',
" +
+                              "'hcat.osd'='org.apache.hcatalog.hbase.HBaseOutputStorageDriver',"
+
+                              "'hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
;
+
+        CommandProcessorResponse responseOne = hcatDriver.run(dbquery);
+        assertEquals(0, responseOne.getResponseCode());
+        CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
+        assertEquals(0, responseTwo.getResponseCode());
+
+        HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
+        String hbaseTableName = databaseName + "." + tableName;
+        boolean doesTableExist = hAdmin.tableExists(hbaseTableName);
+        assertTrue(doesTableExist);
 
+        populateHBaseTable(hbaseTableName);
+        Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
                 HCatUtil.serialize(getHiveConf().getAllProperties()));
 
-        // create Hbase table using admin
-        createTable(tableName, new String[] { "testFamily" });
-        registerHBaseTable(tableName);
-        populateTable(tableName);
         // output settings
         Path outputDir = new Path(getTestDir(), "mapred/testHbaseTableMRRead");
         FileSystem fs = getFileSystem();
@@ -151,10 +149,10 @@ public class TestHBaseInputStorageDriver
         Job job = new Job(conf, "hbase-mr-read-test");
         job.setJarByClass(this.getClass());
         job.setMapperClass(MapReadHTable.class);
+
         job.setInputFormatClass(HCatInputFormat.class);
-        InputJobInfo inputJobInfo = InputJobInfo.create(
-                MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null,
-                null);
+        InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName,
+                null, null, null);
         HCatInputFormat.setInput(job, inputJobInfo);
         job.setOutputFormatClass(TextOutputFormat.class);
         TextOutputFormat.setOutputPath(job, outputDir);
@@ -165,27 +163,44 @@ public class TestHBaseInputStorageDriver
         job.setNumReduceTasks(0);
         assertTrue(job.waitForCompletion(true));
         assertTrue(MapReadHTable.error == false);
+
+        String dropTableQuery = "DROP TABLE " + hbaseTableName ;
+        CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
+        assertEquals(0, responseThree.getResponseCode());
+
+        boolean isHbaseTableThere = hAdmin.tableExists(hbaseTableName);
+        assertTrue(isHbaseTableThere == false);
+
+        String dropDB = "DROP DATABASE " + databaseName;
+        CommandProcessorResponse responseFour = hcatDriver.run(dropDB);
+        assertEquals(0, responseFour.getResponseCode());
     }
 
     @Test
     public void TestHBaseTableProjectionReadMR() throws Exception {
 
-        String tableName = "testtabletwo";
-        Configuration conf = new Configuration();
-        // include hbase config in conf file
-        for (Map.Entry<String, String> el : getHbaseConf()) {
-            if (el.getKey().startsWith("hbase.")) {
-                conf.set(el.getKey(), el.getValue());
-            }
-        }
+        Initialize();
+        String tableName = newTableName("mytable");
+        String tableQuery = "CREATE TABLE " + tableName
+                              + "(key string, testqualifier1 string, testqualifier2 string)
STORED BY " +
+                              "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler'"
+                              + "TBLPROPERTIES ('hcat.isd'='org.apache.hcatalog.hbase.HBaseInputStorageDriver',
" +
+                              "'hcat.osd'='org.apache.hcatalog.hbase.HBaseOutputStorageDriver',"
+
+                              "'hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
;
+
+        CommandProcessorResponse responseTwo = hcatDriver.run(tableQuery);
+        assertEquals(0, responseTwo.getResponseCode());
+
+        HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
+        boolean doesTableExist = hAdmin.tableExists(tableName);
+        assertTrue(doesTableExist);
 
+        populateHBaseTable(tableName);
+
+        Configuration conf = new Configuration(hcatConf);
         conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
                 HCatUtil.serialize(getHiveConf().getAllProperties()));
 
-        // create Hbase table using admin
-        createTable(tableName, new String[] { "testFamily" });
-        registerHBaseTable(tableName);
-        populateTable(tableName);
         // output settings
         Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
         FileSystem fs = getFileSystem();
@@ -211,6 +226,13 @@ public class TestHBaseInputStorageDriver
         job.setNumReduceTasks(0);
         assertTrue(job.waitForCompletion(true));
         assertTrue(MapReadHTable.error == false);
+
+        String dropTableQuery = "DROP TABLE " + tableName ;
+        CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
+        assertEquals(0, responseThree.getResponseCode());
+
+        boolean isHbaseTableThere = hAdmin.tableExists(tableName);
+        assertTrue(isHbaseTableThere == false);
     }
 
 
@@ -253,18 +275,6 @@ public class TestHBaseInputStorageDriver
         }
     }
 
-    private HCatSchema getSchema() throws HCatException {
-
-        HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());
-        schema.append(new HCatFieldSchema("key", HCatFieldSchema.Type.STRING,
-                ""));
-        schema.append(new HCatFieldSchema("testqualifier1",
-                HCatFieldSchema.Type.STRING, ""));
-        schema.append(new HCatFieldSchema("testqualifier2",
-                HCatFieldSchema.Type.STRING, ""));
-        return schema;
-    }
-
  private HCatSchema getProjectionSchema() throws HCatException {
 
         HCatSchema schema = new HCatSchema(new ArrayList<HCatFieldSchema>());



Mime
View raw message