hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chia7...@apache.org
Subject [01/19] hbase git commit: HBASE-18839 Apply RegionInfo to code base
Date Thu, 28 Sep 2017 12:31:29 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-2 d26b8f8dd -> 6693f45fa


http://git-wip-us.apache.org/repos/asf/hbase/blob/6693f45f/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
index beef02b..6fa455a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
@@ -34,8 +34,24 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -43,7 +59,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
@@ -57,6 +72,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 
 @Ignore // Until after HBASE-14614 goes in.
 @Category({MiscTests.class, LargeTests.class})
@@ -111,10 +127,10 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
   @Test(timeout=180000)
   public void testFixAssignmentsWhenMETAinTransition() throws Exception {
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
-    admin.unassign(HRegionInfo.FIRST_META_REGIONINFO.getRegionName(), true);
-    assignmentManager.offlineRegion(HRegionInfo.FIRST_META_REGIONINFO);
+    admin.unassign(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName(), true);
+    assignmentManager.offlineRegion(RegionInfoBuilder.FIRST_META_REGIONINFO);
     new MetaTableLocator().deleteMetaLocation(cluster.getMaster().getZooKeeper());
-    assertFalse(regionStates.isRegionOnline(HRegionInfo.FIRST_META_REGIONINFO));
+    assertFalse(regionStates.isRegionOnline(RegionInfoBuilder.FIRST_META_REGIONINFO));
     HBaseFsck hbck = doFsck(conf, true);
     assertErrors(hbck, new HBaseFsck.ErrorReporter.ERROR_CODE[] { HBaseFsck.ErrorReporter.ERROR_CODE.UNKNOWN,
HBaseFsck.ErrorReporter.ERROR_CODE.NO_META_REGION,
         HBaseFsck.ErrorReporter.ERROR_CODE.NULL_META_REGION });
@@ -134,7 +150,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       assertEquals(ROWKEYS.length, countRows());
 
       // Now let's mess it up, by adding a region with a duplicate startkey
-      HRegionInfo hriDupe =
+      RegionInfo hriDupe =
           createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A"), Bytes.toBytes("A2"));
       TEST_UTIL.assignRegion(hriDupe);
 
@@ -172,7 +188,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       assertEquals(ROWKEYS.length, countRows());
 
       // Now let's mess it up, by adding a region with a duplicate startkey
-      HRegionInfo hriDupe =
+      RegionInfo hriDupe =
           createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A"), Bytes.toBytes("B"));
       TEST_UTIL.assignRegion(hriDupe);
 
@@ -221,7 +237,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       assertEquals(ROWKEYS.length, countRows());
 
       // Mess it up by creating an overlap in the metadata
-      HRegionInfo hriOverlap =
+      RegionInfo hriOverlap =
           createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A2"), Bytes.toBytes("B"));
       TEST_UTIL.assignRegion(hriOverlap);
 
@@ -397,11 +413,11 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
 
       // Mess it up by creating an overlap
       MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
-      HRegionInfo hriOverlap1 =
+      RegionInfo hriOverlap1 =
           createRegion(tbl.getTableDescriptor(), Bytes.toBytes("A"), Bytes.toBytes("AB"));
       TEST_UTIL.assignRegion(hriOverlap1);
 
-      HRegionInfo hriOverlap2 =
+      RegionInfo hriOverlap2 =
           createRegion(tbl.getTableDescriptor(), Bytes.toBytes("AB"), Bytes.toBytes("B"));
       TEST_UTIL.assignRegion(hriOverlap2);
 
@@ -491,7 +507,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
     Scan scan = new Scan();
     scan.setStartRow(Bytes.toBytes(tableName+",,"));
     ResultScanner scanner = meta.getScanner(scan);
-    HRegionInfo hri = null;
+    RegionInfo hri = null;
 
     Result res = scanner.next();
     ServerName currServer =
@@ -515,7 +531,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
         put.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
             Bytes.toBytes(sn.getStartcode()));
         meta.put(put);
-        hri = MetaTableAccessor.getHRegionInfo(res);
+        hri = MetaTableAccessor.getRegionInfo(res);
         break;
       }
     }
@@ -555,7 +571,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       // Mess it up by leaving a hole in the meta data
       admin.disableTable(tableName);
       deleteRegion(conf, tbl.getTableDescriptor(), Bytes.toBytes("B"), Bytes.toBytes("C"),
true,
-        true, false, true, HRegionInfo.DEFAULT_REPLICA_ID);
+        true, false, true, RegionInfo.DEFAULT_REPLICA_ID);
       admin.enableTable(tableName);
 
       HBaseFsck hbck = doFsck(conf, false);
@@ -607,7 +623,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       admin.enableCatalogJanitor(false);
       meta = connection.getTable(TableName.META_TABLE_NAME, tableExecutorService);
       HRegionLocation loc = this.connection.getRegionLocation(table, SPLITS[0], false);
-      HRegionInfo hriParent = loc.getRegionInfo();
+      RegionInfo hriParent = loc.getRegionInfo();
 
       // Split Region A just before B
       this.connection.getAdmin().split(table, Bytes.toBytes("A@"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/6693f45f/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index a7c4ad1..e6070dd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -43,6 +42,8 @@ import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -170,7 +171,7 @@ public class OfflineMetaRebuildTestCore {
     }
 
     for (HRegionLocation e : regions) {
-      HRegionInfo hri = e.getRegionInfo();
+      RegionInfo hri = e.getRegionInfo();
       ServerName hsa = e.getServerName();
       if (Bytes.compareTo(hri.getStartKey(), startKey) == 0
           && Bytes.compareTo(hri.getEndKey(), endKey) == 0) {
@@ -199,11 +200,14 @@ public class OfflineMetaRebuildTestCore {
     dumpMeta(htd);
   }
 
-  protected HRegionInfo createRegion(Configuration conf, final Table htbl,
+  protected RegionInfo createRegion(Configuration conf, final Table htbl,
       byte[] startKey, byte[] endKey) throws IOException {
     Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
     HTableDescriptor htd = htbl.getTableDescriptor();
-    HRegionInfo hri = new HRegionInfo(htbl.getName(), startKey, endKey);
+    RegionInfo hri = RegionInfoBuilder.newBuilder(htbl.getName())
+        .setStartKey(startKey)
+        .setEndKey(endKey)
+        .build();
 
     LOG.info("manually adding regioninfo and hdfs data: " + hri.toString());
     Path rootDir = FSUtils.getRootDir(conf);
@@ -213,7 +217,7 @@ public class OfflineMetaRebuildTestCore {
     fs.mkdirs(p);
     Path riPath = new Path(p, HRegionFileSystem.REGION_INFO_FILE);
     FSDataOutputStream out = fs.create(riPath);
-    out.write(hri.toDelimitedByteArray());
+    out.write(RegionInfo.toDelimitedByteArray(hri));
     out.close();
 
     // add to meta.
@@ -230,8 +234,8 @@ public class OfflineMetaRebuildTestCore {
     ResultScanner scanner = meta.getScanner(s);
     List<Delete> dels = new ArrayList<>();
     for (Result r : scanner) {
-      HRegionInfo info =
-          MetaTableAccessor.getHRegionInfo(r);
+      RegionInfo info =
+          MetaTableAccessor.getRegionInfo(r);
       if(info != null && !info.getTable().getNamespaceAsString()
           .equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR)) {
         Delete d = new Delete(r.getRow());

http://git-wip-us.apache.org/repos/asf/hbase/blob/6693f45f/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java
index a666279..472eadd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java
@@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.wal;
 
 import java.io.IOException;
 
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
+import org.apache.yetus.audience.InterfaceAudience;
 
 // imports for things that haven't moved yet
-import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
 
 /**
  * This is a utility class, used by tests, which fails operation specified by FailureType
enum
@@ -44,11 +44,11 @@ public class FaultyFSLog extends FSHLog {
       throws IOException {
     super(fs, rootDir, logName, conf);
   }
-  
+
   public void setFailureType(FailureType fType) {
     this.ft = fType;
   }
-  
+
   @Override
   public void sync(long txid) throws IOException {
     if (this.ft == FailureType.SYNC) {
@@ -58,7 +58,7 @@ public class FaultyFSLog extends FSHLog {
   }
 
   @Override
-  public long append(HRegionInfo info, WALKey key,
+  public long append(RegionInfo info, WALKey key,
       WALEdit edits, boolean inMemstore) throws IOException {
     if (this.ft == FailureType.APPEND) {
       throw new IOException("append");

http://git-wip-us.apache.org/repos/asf/hbase/blob/6693f45f/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 0204230..75aba03 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hbase.wal;
 
+import static com.codahale.metrics.MetricRegistry.name;
+
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -31,7 +33,6 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -42,40 +43,40 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MockRegionServerServices;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.LogRoller;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
+import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
+import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
+import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
 import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
 import org.apache.hadoop.hbase.trace.SpanReceiverHost;
-import org.apache.hadoop.hbase.wal.WALProvider.Writer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.wal.WALProvider.Writer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.htrace.Sampler;
 import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
 import org.apache.htrace.impl.ProbabilitySampler;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import com.codahale.metrics.ConsoleReporter;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Meter;
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.MetricRegistry;
 
 // imports for things that haven't moved from regionserver.wal yet.
-import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
-import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
-import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
-
-import static com.codahale.metrics.MetricRegistry.name;
 
 /**
  * This class runs performance benchmarks for {@link WAL}.
@@ -184,7 +185,7 @@ public final class WALPerformanceEvaluation extends Configured implements
Tool {
             Put put = setupPut(rand, key, value, numFamilies);
             WALEdit walEdit = new WALEdit();
             addFamilyMapToWALEdit(put.getFamilyCellMap(), walEdit);
-            HRegionInfo hri = region.getRegionInfo();
+            RegionInfo hri = region.getRegionInfo();
             final WALKey logkey =
                 new WALKey(hri.getEncodedNameAsBytes(), hri.getTable(), now, mvcc, scopes);
             wal.append(hri, logkey, walEdit, true);
@@ -347,7 +348,7 @@ public final class WALPerformanceEvaluation extends Configured implements
Tool {
         long putTime = runBenchmark(benchmarks, numThreads);
         logBenchmarkResult("Summary: threads=" + numThreads + ", iterations=" + numIterations
+
           ", syncInterval=" + syncInterval, numIterations * numThreads, putTime);
-        
+
         for (int i = 0; i < numRegions; i++) {
           if (regions[i] != null) {
             closeRegion(regions[i]);
@@ -449,7 +450,7 @@ public final class WALPerformanceEvaluation extends Configured implements
Tool {
   private static void logBenchmarkResult(String testName, long numTests, long totalTime)
{
     float tsec = totalTime / 1000.0f;
     LOG.info(String.format("%s took %.3fs %.3fops/s", testName, tsec, numTests / tsec));
-    
+
   }
 
   private void printUsageAndExit() {
@@ -493,7 +494,7 @@ public final class WALPerformanceEvaluation extends Configured implements
Tool {
   private HRegion openRegion(final FileSystem fs, final Path dir, final HTableDescriptor
htd,
       final WALFactory wals, final long whenToRoll, final LogRoller roller) throws IOException
{
     // Initialize HRegion
-    HRegionInfo regionInfo = new HRegionInfo(htd.getTableName());
+    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(htd.getTableName()).build();
     // Initialize WAL
     final WAL wal =
         wals.getWAL(regionInfo.getEncodedNameAsBytes(), regionInfo.getTable().getNamespace());
@@ -529,7 +530,7 @@ public final class WALPerformanceEvaluation extends Configured implements
Tool {
         }
       });
     }
-     
+
     return HRegion.createHRegion(regionInfo, dir, getConf(), htd, wal);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6693f45f/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 48a365d..c1e761a 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -20,6 +20,11 @@ package org.apache.hadoop.hbase.thrift;
 
 import static org.apache.hadoop.hbase.util.Bytes.getBytes;
 
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslServer;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -39,12 +44,6 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.Sasl;
-import javax.security.sasl.SaslServer;
-
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionGroup;
@@ -55,7 +54,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
@@ -63,7 +61,6 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
@@ -72,6 +69,7 @@ import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.OperationWithAttributes;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -83,7 +81,6 @@ import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
 import org.apache.hadoop.hbase.security.SecurityUtil;
 import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.CallQueue.Call;
 import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
 import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
@@ -123,18 +120,23 @@ import org.apache.thrift.transport.TSaslServerTransport;
 import org.apache.thrift.transport.TServerSocket;
 import org.apache.thrift.transport.TServerTransport;
 import org.apache.thrift.transport.TTransportFactory;
-
+import org.apache.yetus.audience.InterfaceAudience;
 import org.eclipse.jetty.http.HttpVersion;
-import org.eclipse.jetty.server.*;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
-import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
+import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 /**
  * ThriftServerRunner - this class starts up a Thrift server which implements
  * the Hbase API specified in the Hbase.thrift IDL file.
@@ -893,7 +895,7 @@ public class ThriftServerRunner implements Runnable {
         List<HRegionLocation> regionLocations = locator.getAllRegionLocations();
         List<TRegionInfo> results = new ArrayList<>(regionLocations.size());
         for (HRegionLocation regionLocation : regionLocations) {
-          HRegionInfo info = regionLocation.getRegionInfo();
+          RegionInfo info = regionLocation.getRegionInfo();
           ServerName serverName = regionLocation.getServerName();
           TRegionInfo region = new TRegionInfo();
           region.serverName = ByteBuffer.wrap(
@@ -987,7 +989,7 @@ public class ThriftServerRunner implements Runnable {
      */
     public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, byte[] family,
         byte[] qualifier, int numVersions, Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1033,7 +1035,7 @@ public class ThriftServerRunner implements Runnable {
     protected List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family,
         byte[] qualifier, long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer>
attributes)
         throws IOError {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1085,7 +1087,7 @@ public class ThriftServerRunner implements Runnable {
     public List<TRowResult> getRowWithColumnsTs(
         ByteBuffer tableName, ByteBuffer row, List<ByteBuffer> columns,
         long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1151,7 +1153,7 @@ public class ThriftServerRunner implements Runnable {
                                                  List<ByteBuffer> rows,
         List<ByteBuffer> columns, long timestamp,
         Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
+
       Table table= null;
       try {
         List<Get> gets = new ArrayList<>(rows.size());
@@ -1500,7 +1502,7 @@ public class ThriftServerRunner implements Runnable {
     public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
         Map<ByteBuffer, ByteBuffer> attributes)
         throws IOError {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1555,7 +1557,7 @@ public class ThriftServerRunner implements Runnable {
     public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
         List<ByteBuffer> columns,
         Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1585,7 +1587,7 @@ public class ThriftServerRunner implements Runnable {
         ByteBuffer stopRow, List<ByteBuffer> columns,
         Map<ByteBuffer, ByteBuffer> attributes)
         throws IOError, TException {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1616,7 +1618,7 @@ public class ThriftServerRunner implements Runnable {
                                      List<ByteBuffer> columns,
         Map<ByteBuffer, ByteBuffer> attributes)
         throws IOError, TException {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1648,7 +1650,7 @@ public class ThriftServerRunner implements Runnable {
     public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
         List<ByteBuffer> columns, long timestamp,
         Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1679,7 +1681,7 @@ public class ThriftServerRunner implements Runnable {
         ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
         Map<ByteBuffer, ByteBuffer> attributes)
         throws IOError, TException {
-      
+
       Table table = null;
       try {
         table = getTable(tableName);
@@ -1709,7 +1711,7 @@ public class ThriftServerRunner implements Runnable {
     @Override
     public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
         ByteBuffer tableName) throws IOError, TException {
-      
+
       Table table = null;
       try {
         TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
@@ -1741,7 +1743,7 @@ public class ThriftServerRunner implements Runnable {
         throw getIOError(e);
       }
     }
-    
+
     @Override
     public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError {
       try {
@@ -1755,9 +1757,9 @@ public class ThriftServerRunner implements Runnable {
         }
 
         // find region start and end keys
-        HRegionInfo regionInfo = MetaTableAccessor.getHRegionInfo(startRowResult);
+        RegionInfo regionInfo = MetaTableAccessor.getRegionInfo(startRowResult);
         if (regionInfo == null) {
-          throw new IOException("HRegionInfo REGIONINFO was null or " +
+          throw new IOException("RegionInfo REGIONINFO was null or " +
                                 " empty in Meta for row="
                                 + Bytes.toStringBinary(row));
         }
@@ -1787,7 +1789,7 @@ public class ThriftServerRunner implements Runnable {
       scan.setReversed(true);
       scan.addFamily(family);
       scan.setStartRow(row);
-      Table table = getTable(tableName);      
+      Table table = getTable(tableName);
       try (ResultScanner scanner = table.getScanner(scan)) {
         return scanner.next();
       } finally{


Mime
View raw message