hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject git commit: HBASE-8361 Bulk load and other utilities should not create tables for user (Ashish Singhi)
Date Mon, 13 Oct 2014 04:52:20 GMT
Repository: hbase
Updated Branches:
  refs/heads/master ab42b9ffe -> 8e9a8b002


HBASE-8361 Bulk load and other utilities should not create tables for user (Ashish Singhi)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8e9a8b00
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8e9a8b00
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8e9a8b00

Branch: refs/heads/master
Commit: 8e9a8b002ff088b9ea05aa6188d3c2bedec79a5a
Parents: ab42b9f
Author: stack <stack@apache.org>
Authored: Sun Oct 12 21:52:01 2014 -0700
Committer: stack <stack@apache.org>
Committed: Sun Oct 12 21:52:01 2014 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/mapreduce/ImportTsv.java       | 19 +++++++++++++++----
 .../hbase/mapreduce/LoadIncrementalHFiles.java  | 20 ++++++++++++++++----
 .../hadoop/hbase/mapreduce/TestImportTsv.java   | 16 +++++++++++++++-
 .../mapreduce/TestLoadIncrementalHFiles.java    | 10 ++++++++++
 4 files changed, 56 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/8e9a8b00/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index a953c3e..5b375e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
@@ -94,6 +95,7 @@ public class ImportTsv extends Configured implements Tool {
   final static String DEFAULT_ATTRIBUTES_SEPERATOR = "=>";
   final static String DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ",";
   final static Class DEFAULT_MAPPER = TsvImporterMapper.class;
+  public final static String CREATE_TABLE_CONF_KEY = "create.table";
 
   public static class TsvParser {
     /**
@@ -432,10 +434,16 @@ public class ImportTsv extends Configured implements Tool {
 
     if (hfileOutPath != null) {
       if (!admin.tableExists(tableName)) {
-        LOG.warn(format("Table '%s' does not exist.", tableName));
-        // TODO: this is backwards. Instead of depending on the existence of a table,
-        // create a sane splits file for HFileOutputFormat based on data sampling.
-        createTable(admin, tableName, columns);
+        String errorMsg = format("Table '%s' does not exist.", tableName);
+        if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
+          LOG.warn(errorMsg);
+          // TODO: this is backwards. Instead of depending on the existence of a table,
+          // create a sane splits file for HFileOutputFormat based on data sampling.
+          createTable(admin, tableName, columns);
+        } else {
+          LOG.error(errorMsg);
+          throw new TableNotFoundException(errorMsg);
+        }
       }
       HTable table = new HTable(conf, tableName);
       job.setReducerClass(PutSortReducer.class);
@@ -534,6 +542,9 @@ public class ImportTsv extends Configured implements Tool {
       "  -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of "
+
       DEFAULT_MAPPER.getName() + "\n" +
       "  -D" + JOB_NAME_CONF_KEY + "=jobName - use the specified mapreduce job name for the
import\n" +
+      "  -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this
tool\n" +
+      "  Note: if you set this to 'no', then the target table must already exist in HBase\n"
+
+      "\n" +
       "For performance consider the following options:\n" +
       "  -Dmapreduce.map.speculative=false\n" +
       "  -Dmapreduce.reduce.speculative=false";

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e9a8b00/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 612b87e..855417d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -18,6 +18,8 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import static java.lang.String.format;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InterruptedIOException;
@@ -114,6 +116,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
   public static final String MAX_FILES_PER_REGION_PER_FAMILY
     = "hbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily";
   private static final String ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
+  public final static String CREATE_TABLE_CONF_KEY = "create.table";
 
   private int maxFilesPerRegionPerFamily;
   private boolean assignSeqIds;
@@ -148,9 +151,10 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
   }
 
   private void usage() {
-    System.err.println("usage: " + NAME +
-        " /path/to/hfileoutputformat-output " +
-        "tablename");
+    System.err.println("usage: " + NAME + " /path/to/hfileoutputformat-output tablename"
+ "\n -D"
+        + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this tool\n"
+        + "  Note: if you set this to 'no', then the target table must already exist in HBase\n"
+        + "\n");
   }
 
   /**
@@ -906,7 +910,15 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
     TableName tableName = TableName.valueOf(args[1]);
 
     boolean tableExists = this.doesTableExist(tableName);
-    if (!tableExists) this.createTable(tableName,dirPath);
+    if (!tableExists) {
+      if ("yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"))) {
+        this.createTable(tableName, dirPath);
+      } else {
+        String errorMsg = format("Table '%s' does not exist.", tableName);
+        LOG.error(errorMsg);
+        throw new TableNotFoundException(errorMsg);
+      }
+    }
 
     Path hfofDir = new Path(dirPath);
     HTable table = new HTable(getConf(), tableName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e9a8b00/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index e3b3495..2acddd3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -229,7 +230,20 @@ public class TestImportTsv implements Configurable {
     String data = "KEY\u001bVALUE4\u001bVALUE8\n";
     doMROnTableTest(util, FAMILY, data, args, 4);
   }
-  
+
+  @Test(expected = TableNotFoundException.class)
+  public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
+    String table = "test-" + UUID.randomUUID();
+    String[] args =
+        new String[] { table, "/inputFile" };
+
+    Configuration conf = new Configuration(util.getConfiguration());
+    conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A");
+    conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output");
+    conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
+    ImportTsv.createSubmittableJob(conf, args);
+  }
+
   protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
       String data, String[] args) throws Exception {
     return doMROnTableTest(util, family, data, args, 1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e9a8b00/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 5a36d10..d3019ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -422,5 +423,14 @@ public class TestLoadIncrementalHFiles {
         + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles"));
     }
   }
+
+  @Test(expected = TableNotFoundException.class)
+  public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
+    Configuration conf = util.getConfiguration();
+    conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no");
+    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
+    String[] args = { "directory", "nonExistingTable" };
+    loader.run(args);
+  }
 }
 


Mime
View raw message