hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From e...@apache.org
Subject hbase git commit: HBASE-13328 LoadIncrementalHFile.doBulkLoad(Path, HTable) should handle managed connections
Date Fri, 27 Mar 2015 21:54:27 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-1 b9a615e73 -> 4bda365ab


HBASE-13328 LoadIncrementalHFile.doBulkLoad(Path,HTable) should handle managed connections


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4bda365a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4bda365a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4bda365a

Branch: refs/heads/branch-1
Commit: 4bda365ab1b47f4442b62d18192e9baa2823c6df
Parents: b9a615e
Author: Enis Soztutar <enis@apache.org>
Authored: Fri Mar 27 14:49:58 2015 -0700
Committer: Enis Soztutar <enis@apache.org>
Committed: Fri Mar 27 14:50:23 2015 -0700

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java      | 16 +++++++++++++++-
 1 file changed, 15 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/4bda365a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 61b00bb..3032251 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NeedUnmanagedConnectionException;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.RegionServerCallable;
 import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
@@ -245,6 +246,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
       this.hfilePath = hfilePath;
     }
 
+    @Override
     public String toString() {
       return "family:"+ Bytes.toString(family) + " path:" + hfilePath.toString();
     }
@@ -288,7 +290,17 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
   public void doBulkLoad(Path hfofDir, final HTable table)
     throws TableNotFoundException, IOException
   {
-    doBulkLoad(hfofDir, table.getConnection().getAdmin(), table, table.getRegionLocator());
+    Admin admin = null;
+    try {
+      try {
+        admin = table.getConnection().getAdmin();
+      } catch (NeedUnmanagedConnectionException ex) {
+        admin = new HBaseAdmin(table.getConfiguration());
+      }
+      doBulkLoad(hfofDir, admin, table, table.getRegionLocator());
+    } finally {
+      admin.close();
+    }
   }
 
   /**
@@ -436,6 +448,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
       final Collection<LoadQueueItem> lqis =  e.getValue();
 
       final Callable<List<LoadQueueItem>> call = new Callable<List<LoadQueueItem>>()
{
+        @Override
         public List<LoadQueueItem> call() throws Exception {
           List<LoadQueueItem> toRetry =
               tryAtomicRegionLoad(conn, table.getName(), first, lqis);
@@ -512,6 +525,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool
{
       final LoadQueueItem item = queue.remove();
 
       final Callable<List<LoadQueueItem>> call = new Callable<List<LoadQueueItem>>()
{
+        @Override
         public List<LoadQueueItem> call() throws Exception {
           List<LoadQueueItem> splits = groupOrSplit(regionGroups, item, table, startEndKeys);
           return splits;


Mime
View raw message