drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject [1/2] drill git commit: DRILL-4199: Add Support for HBase 1.X
Date Tue, 14 Jun 2016 23:57:47 GMT
Repository: drill
Updated Branches:
  refs/heads/master 6286c0a4b -> c2d9959e0


http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
index ce7d585..b054bfa 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
@@ -23,39 +23,36 @@ import java.util.List;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
 import org.junit.Test;
 
 public class TestHBaseQueries extends BaseHBaseTest {
 
   @Test
   public void testWithEmptyFirstAndLastRegion() throws Exception {
-    HBaseAdmin admin = HBaseTestsSuite.getAdmin();
-    String tableName = "drill_ut_empty_regions";
-    HTable table = null;
+    HBaseAdmin admin = (HBaseAdmin) HBaseTestsSuite.getAdmin();
+    TableName tableName = TableName.valueOf("drill_ut_empty_regions");
 
-    try {
-    HTableDescriptor desc = new HTableDescriptor(tableName);
-    desc.addFamily(new HColumnDescriptor("f"));
-    admin.createTable(desc, Arrays.copyOfRange(TestTableGenerator.SPLIT_KEYS, 0, 2));
+    try (Table table = HBaseTestsSuite.getConnection().getTable(tableName);) {
+      HTableDescriptor desc = new HTableDescriptor(tableName);
+      desc.addFamily(new HColumnDescriptor("f"));
+      admin.createTable(desc, Arrays.copyOfRange(TestTableGenerator.SPLIT_KEYS, 0, 2));
 
-    table = new HTable(admin.getConfiguration(), tableName);
-    Put p = new Put("b".getBytes());
-    p.add("f".getBytes(), "c".getBytes(), "1".getBytes());
-    table.put(p);
+      Put p = new Put("b".getBytes());
+      p.addColumn("f".getBytes(), "c".getBytes(), "1".getBytes());
+      table.put(p);
 
-    setColumnWidths(new int[] {8, 15});
-    runHBaseSQLVerifyCount("SELECT *\n"
-        + "FROM\n"
-        + "  hbase.`" + tableName + "` tableName\n"
-        , 1);
+      setColumnWidths(new int[] {8, 15});
+      runHBaseSQLVerifyCount("SELECT *\n"
+          + "FROM\n"
+          + "  hbase.`" + tableName + "` tableName\n"
+          , 1);
     } finally {
       try {
-        if (table != null) {
-          table.close();
-        }
         admin.disableTable(tableName);
         admin.deleteTable(tableName);
       } catch (Exception e) { } // ignore
@@ -63,20 +60,16 @@ public class TestHBaseQueries extends BaseHBaseTest {
 
   }
 
-
   @Test
   public void testWithEmptyTable() throws Exception {
-    HBaseAdmin admin = HBaseTestsSuite.getAdmin();
-    String tableName = "drill_ut_empty_table";
-    HTable table = null;
+    Admin admin = HBaseTestsSuite.getAdmin();
+    TableName tableName = TableName.valueOf("drill_ut_empty_table");
 
-    try {
+    try (Table table = HBaseTestsSuite.getConnection().getTable(tableName);) {
       HTableDescriptor desc = new HTableDescriptor(tableName);
       desc.addFamily(new HColumnDescriptor("f"));
       admin.createTable(desc, Arrays.copyOfRange(TestTableGenerator.SPLIT_KEYS, 0, 2));
 
-      table = new HTable(admin.getConfiguration(), tableName);
-
       setColumnWidths(new int[] {8, 15});
       runHBaseSQLVerifyCount("SELECT row_key, count(*)\n"
           + "FROM\n"
@@ -84,14 +77,12 @@ public class TestHBaseQueries extends BaseHBaseTest {
           , 0);
     } finally {
       try {
-        if (table != null) {
-          table.close();
-        }
         admin.disableTable(tableName);
         admin.deleteTable(tableName);
       } catch (Exception e) { } // ignore
     }
   }
+
   @Test
   public void testCastEmptyStrings() throws Exception {
     try {
@@ -106,4 +97,5 @@ public class TestHBaseQueries extends BaseHBaseTest {
         test("alter system reset `drill.exec.functions.cast_empty_string_to_null`;");
     }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java
b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java
index 96c3668..391a616 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java
@@ -17,42 +17,20 @@
  */
 package org.apache.drill.hbase;
 
-import static org.apache.drill.TestBuilder.listOf;
-import static org.apache.drill.TestBuilder.mapOf;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import io.netty.buffer.DrillBuf;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import mockit.Injectable;
-
 import org.apache.drill.BaseTestQuery;
-import org.apache.drill.TestBuilder;
-import org.apache.drill.exec.compile.ClassTransformer;
-import org.apache.drill.exec.compile.ClassTransformer.ScalarReplacementOption;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
 import org.apache.drill.exec.record.RecordBatchLoader;
-import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.apache.drill.exec.rpc.user.UserServer;
-import org.apache.drill.exec.server.Drillbit;
-import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
-import org.apache.drill.exec.server.options.OptionValue;
-import org.apache.drill.exec.server.options.OptionValue.OptionType;
-import org.apache.drill.exec.util.ByteBufUtil.HadoopWritables;
-import org.apache.drill.exec.util.VectorUtil;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VarCharVector;
-import org.joda.time.DateTime;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.base.Charsets;

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
index f4f3e93..73df7e4 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
@@ -24,9 +24,15 @@ import java.util.Random;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.BufferedMutator;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Order;
+import org.apache.hadoop.hbase.util.OrderedBytes;
+import org.apache.hadoop.hbase.util.PositionedByteRange;
+import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
 
 public class TestTableGenerator {
 
@@ -39,7 +45,7 @@ public class TestTableGenerator {
   static final byte[] FAMILY_F = {'f'};
   static final byte[] COLUMN_C = {'c'};
 
-  public static void generateHBaseDataset1(HBaseAdmin admin, String tableName, int numberRegions)
throws Exception {
+  public static void generateHBaseDataset1(Connection conn, Admin admin, TableName tableName,
int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -54,81 +60,80 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     Put p = new Put("a1".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f".getBytes(), "c2".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "3".getBytes());
-    p.add("f".getBytes(), "c4".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "5".getBytes());
-    p.add("f".getBytes(), "c6".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f".getBytes(), "c2".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "3".getBytes());
+    p.addColumn("f".getBytes(), "c4".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "5".getBytes());
+    p.addColumn("f".getBytes(), "c6".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("a2".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f".getBytes(), "c2".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "3".getBytes());
-    p.add("f".getBytes(), "c4".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "5".getBytes());
-    p.add("f".getBytes(), "c6".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f".getBytes(), "c2".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "3".getBytes());
+    p.addColumn("f".getBytes(), "c4".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "5".getBytes());
+    p.addColumn("f".getBytes(), "c6".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("a3".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "3".getBytes());
-    p.add("f".getBytes(), "c7".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c8".getBytes(), "5".getBytes());
-    p.add("f".getBytes(), "c9".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "3".getBytes());
+    p.addColumn("f".getBytes(), "c7".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c8".getBytes(), "5".getBytes());
+    p.addColumn("f".getBytes(), "c9".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put(new byte[]{'b', '4', 0});
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f2".getBytes(), "c2".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "3".getBytes());
-    p.add("f2".getBytes(), "c4".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "5".getBytes());
-    p.add("f2".getBytes(), "c6".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f2".getBytes(), "c2".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "3".getBytes());
+    p.addColumn("f2".getBytes(), "c4".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "5".getBytes());
+    p.addColumn("f2".getBytes(), "c6".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("b4".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f2".getBytes(), "c2".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "3".getBytes());
-    p.add("f2".getBytes(), "c4".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "5".getBytes());
-    p.add("f2".getBytes(), "c6".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f2".getBytes(), "c2".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "3".getBytes());
+    p.addColumn("f2".getBytes(), "c4".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "5".getBytes());
+    p.addColumn("f2".getBytes(), "c6".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("b5".getBytes());
-    p.add("f2".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f".getBytes(), "c2".getBytes(), "2".getBytes());
-    p.add("f2".getBytes(), "c3".getBytes(), "3".getBytes());
-    p.add("f".getBytes(), "c4".getBytes(), "4".getBytes());
-    p.add("f2".getBytes(), "c5".getBytes(), "5".getBytes());
-    p.add("f".getBytes(), "c6".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f2".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f".getBytes(), "c2".getBytes(), "2".getBytes());
+    p.addColumn("f2".getBytes(), "c3".getBytes(), "3".getBytes());
+    p.addColumn("f".getBytes(), "c4".getBytes(), "4".getBytes());
+    p.addColumn("f2".getBytes(), "c5".getBytes(), "5".getBytes());
+    p.addColumn("f".getBytes(), "c6".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("b6".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f2".getBytes(), "c3".getBytes(), "2".getBytes());
-    p.add("f".getBytes(), "c5".getBytes(), "3".getBytes());
-    p.add("f2".getBytes(), "c7".getBytes(), "4".getBytes());
-    p.add("f".getBytes(), "c8".getBytes(), "5".getBytes());
-    p.add("f2".getBytes(), "c9".getBytes(), "6".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f2".getBytes(), "c3".getBytes(), "2".getBytes());
+    p.addColumn("f".getBytes(), "c5".getBytes(), "3".getBytes());
+    p.addColumn("f2".getBytes(), "c7".getBytes(), "4".getBytes());
+    p.addColumn("f".getBytes(), "c8".getBytes(), "5".getBytes());
+    p.addColumn("f2".getBytes(), "c9".getBytes(), "6".getBytes());
+    table.mutate(p);
 
     p = new Put("b7".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "1".getBytes());
-    p.add("f".getBytes(), "c2".getBytes(), "2".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "1".getBytes());
+    p.addColumn("f".getBytes(), "c2".getBytes(), "2".getBytes());
+    table.mutate(p);
 
-    table.flushCommits();
     table.close();
   }
 
-  public static void generateHBaseDataset2(HBaseAdmin admin, String tableName, int numberRegions)
throws Exception {
+  public static void generateHBaseDataset2(Connection conn, Admin admin, TableName tableName,
int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -143,7 +148,7 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     int rowCount = 0;
     byte[] bytes = null;
@@ -156,9 +161,9 @@ public class TestTableGenerator {
         Put p = new Put((""+rowKeyChar+iteration).getBytes());
         for (int j = 1; j <= numColumns; j++) {
           bytes = new byte[5000]; random.nextBytes(bytes);
-          p.add("f".getBytes(), ("c"+j).getBytes(), bytes);
+          p.addColumn("f".getBytes(), ("c"+j).getBytes(), bytes);
         }
-        table.put(p);
+        table.mutate(p);
 
         ++rowKeyChar;
         ++rowCount;
@@ -166,13 +171,12 @@ public class TestTableGenerator {
       ++iteration;
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDataset3(HBaseAdmin admin, String tableName, int numberRegions)
throws Exception {
+  public static void generateHBaseDataset3(Connection conn, Admin admin, TableName tableName,
int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -187,34 +191,33 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (int i = 0; i <= 100; ++i) {
       Put p = new Put((String.format("%03d", i)).getBytes());
-      p.add(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes());
+      table.mutate(p);
     }
     for (int i = 0; i <= 1000; ++i) {
       Put p = new Put((String.format("%04d", i)).getBytes());
-      p.add(FAMILY_F, COLUMN_C, String.format("value %04d", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %04d", i).getBytes());
+      table.mutate(p);
     }
 
     Put p = new Put("%_AS_PREFIX_ROW1".getBytes());
-    p.add(FAMILY_F, COLUMN_C, "dummy".getBytes());
-    table.put(p);
+    p.addColumn(FAMILY_F, COLUMN_C, "dummy".getBytes());
+    table.mutate(p);
 
     p = new Put("%_AS_PREFIX_ROW2".getBytes());
-    p.add(FAMILY_F, COLUMN_C, "dummy".getBytes());
-    table.put(p);
+    p.addColumn(FAMILY_F, COLUMN_C, "dummy".getBytes());
+    table.mutate(p);
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetCompositeKeyDate(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetCompositeKeyDate(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -229,7 +232,7 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     Date startDate = new Date(1408924800000L);
     long startTime  = startDate.getTime();
@@ -246,15 +249,14 @@ public class TestTableGenerator {
       }
 
       Put p = new Put(rowKey);
-      p.add(FAMILY_F, COLUMN_C, "dummy".getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, "dummy".getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
   }
 
-  public static void generateHBaseDatasetCompositeKeyTime(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetCompositeKeyTime(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -269,7 +271,7 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     long startTime  = 0;
     long MILLISECONDS_IN_A_SEC  = (long)1000;
@@ -287,8 +289,8 @@ public class TestTableGenerator {
       }
 
       Put p = new Put(rowKey);
-      p.add(FAMILY_F, COLUMN_C, "dummy".getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, "dummy".getBytes());
+      table.mutate(p);
 
       if (interval == smallInterval) {
         interval = largeInterval;
@@ -297,11 +299,10 @@ public class TestTableGenerator {
       }
     }
 
-    table.flushCommits();
     table.close();
   }
 
-  public static void generateHBaseDatasetCompositeKeyInt(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetCompositeKeyInt(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -316,7 +317,7 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     int startVal = 0;
     int stopVal = 1000;
@@ -330,15 +331,14 @@ public class TestTableGenerator {
       }
 
       Put p = new Put(rowKey);
-      p.add(FAMILY_F, COLUMN_C, "dummy".getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, "dummy".getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
   }
 
-  public static void generateHBaseDatasetDoubleOB(HBaseAdmin admin, String tableName, int
numberRegions) throws Exception {
+  public static void generateHBaseDatasetDoubleOB(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -353,26 +353,23 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (double i = 0.5; i <= 100.00; i += 0.75) {
-        byte[] bytes = new byte[9];
-        org.apache.hadoop.hbase.util.PositionedByteRange br =
-                new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9);
-        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, i,
-                org.apache.hadoop.hbase.util.Order.ASCENDING);
+      byte[] bytes = new byte[9];
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
+      OrderedBytes.encodeFloat64(br, i, Order.ASCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetFloatOB(HBaseAdmin admin, String tableName, int
numberRegions) throws Exception {
+  public static void generateHBaseDatasetFloatOB(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -387,60 +384,54 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (float i = (float)0.5; i <= 100.00; i += 0.75) {
       byte[] bytes = new byte[5];
-      org.apache.hadoop.hbase.util.PositionedByteRange br =
-              new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5);
-      org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, i,
-              org.apache.hadoop.hbase.util.Order.ASCENDING);
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
+      OrderedBytes.encodeFloat32(br, i,Order.ASCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetBigIntOB(HBaseAdmin admin, String tableName, int
numberRegions) throws Exception {
+  public static void generateHBaseDatasetBigIntOB(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
     }
 
-   HTableDescriptor desc = new HTableDescriptor(tableName);
-  desc.addFamily(new HColumnDescriptor(FAMILY_F));
+    HTableDescriptor desc = new HTableDescriptor(tableName);
+    desc.addFamily(new HColumnDescriptor(FAMILY_F));
 
-  if (numberRegions > 1) {
-    admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1));
-  } else {
-    admin.createTable(desc);
-  }
+    if (numberRegions > 1) {
+      admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1));
+    } else {
+      admin.createTable(desc);
+    }
 
-  HTable table = new HTable(admin.getConfiguration(), tableName);
-  long startTime = (long)1438034423 * 1000;
-  for (long i = startTime; i <= startTime + 100; i ++) {
-    byte[] bytes = new byte[9];
-    org.apache.hadoop.hbase.util.PositionedByteRange br =
-            new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9);
-    org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, i,
-            org.apache.hadoop.hbase.util.Order.ASCENDING);
-    Put p = new Put(bytes);
-    p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
-    table.put(p);
-  }
+    BufferedMutator table = conn.getBufferedMutator(tableName);
+    long startTime = (long)1438034423 * 1000;
+    for (long i = startTime; i <= startTime + 100; i ++) {
+      byte[] bytes = new byte[9];
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
+      OrderedBytes.encodeInt64(br, i, Order.ASCENDING);
+      Put p = new Put(bytes);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
+      table.mutate(p);
+    }
 
-  table.flushCommits();
-  table.close();
+    table.close();
 
-  admin.flush(tableName);
+    admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetIntOB(HBaseAdmin admin, String tableName, int numberRegions)
throws Exception {
+  public static void generateHBaseDatasetIntOB(Connection conn, Admin admin, TableName tableName,
int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -455,26 +446,23 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (int i = -49; i <= 100; i ++) {
       byte[] bytes = new byte[5];
-      org.apache.hadoop.hbase.util.PositionedByteRange br =
-              new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5);
-      org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, i,
-              org.apache.hadoop.hbase.util.Order.ASCENDING);
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
+      OrderedBytes.encodeInt32(br, i, Order.ASCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetDoubleOBDesc(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetDoubleOBDesc(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -489,26 +477,23 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (double i = 0.5; i <= 100.00; i += 0.75) {
-        byte[] bytes = new byte[9];
-        org.apache.hadoop.hbase.util.PositionedByteRange br =
-                new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9);
-        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, i,
-                org.apache.hadoop.hbase.util.Order.DESCENDING);
+      byte[] bytes = new byte[9];
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
+      OrderedBytes.encodeFloat64(br, i, Order.DESCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetFloatOBDesc(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetFloatOBDesc(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -523,61 +508,55 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (float i = (float)0.5; i <= 100.00; i += 0.75) {
       byte[] bytes = new byte[5];
-      org.apache.hadoop.hbase.util.PositionedByteRange br =
-              new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5);
-      org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, i,
-              org.apache.hadoop.hbase.util.Order.DESCENDING);
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
+      OrderedBytes.encodeFloat32(br, i, Order.DESCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetBigIntOBDesc(HBaseAdmin admin, String tableName,
int numberRegions) throws Exception {
+  public static void generateHBaseDatasetBigIntOBDesc(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
     }
 
-   HTableDescriptor desc = new HTableDescriptor(tableName);
-  desc.addFamily(new HColumnDescriptor(FAMILY_F));
+    HTableDescriptor desc = new HTableDescriptor(tableName);
+    desc.addFamily(new HColumnDescriptor(FAMILY_F));
 
-  if (numberRegions > 1) {
-    admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1));
-  } else {
-    admin.createTable(desc);
-  }
+    if (numberRegions > 1) {
+      admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1));
+    } else {
+      admin.createTable(desc);
+    }
 
-  HTable table = new HTable(admin.getConfiguration(), tableName);
-  long startTime = (long)1438034423 * 1000;
-  for (long i = startTime; i <= startTime + 100; i ++) {
-    byte[] bytes = new byte[9];
-    org.apache.hadoop.hbase.util.PositionedByteRange br =
-            new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9);
-    org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, i,
-            org.apache.hadoop.hbase.util.Order.DESCENDING);
-    Put p = new Put(bytes);
-    p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
-    table.put(p);
-  }
+    BufferedMutator table = conn.getBufferedMutator(tableName);
+    long startTime = (long)1438034423 * 1000;
+    for (long i = startTime; i <= startTime + 100; i ++) {
+      byte[] bytes = new byte[9];
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
+      OrderedBytes.encodeInt64(br, i, Order.DESCENDING);
+      Put p = new Put(bytes);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
+      table.mutate(p);
+    }
 
-  table.flushCommits();
-  table.close();
+    table.close();
 
-  admin.flush(tableName);
+    admin.flush(tableName);
   }
 
 
-  public static void generateHBaseDatasetIntOBDesc(HBaseAdmin admin, String tableName, int
numberRegions) throws Exception {
+  public static void generateHBaseDatasetIntOBDesc(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -592,26 +571,23 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     for (int i = -49; i <= 100; i ++) {
       byte[] bytes = new byte[5];
-      org.apache.hadoop.hbase.util.PositionedByteRange br =
-              new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5);
-      org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, i,
-              org.apache.hadoop.hbase.util.Order.DESCENDING);
+      PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
+      OrderedBytes.encodeInt32(br, i, Order.DESCENDING);
       Put p = new Put(bytes);
-      p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
-      table.put(p);
+      p.addColumn(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes());
+      table.mutate(p);
     }
 
-    table.flushCommits();
     table.close();
 
     admin.flush(tableName);
   }
 
-  public static void generateHBaseDatasetNullStr(HBaseAdmin admin, String tableName, int
numberRegions) throws Exception {
+  public static void generateHBaseDatasetNullStr(Connection conn, Admin admin, TableName
tableName, int numberRegions) throws Exception {
     if (admin.tableExists(tableName)) {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
@@ -625,16 +601,16 @@ public class TestTableGenerator {
       admin.createTable(desc);
     }
 
-    HTable table = new HTable(admin.getConfiguration(), tableName);
+    BufferedMutator table = conn.getBufferedMutator(tableName);
 
     Put p = new Put("a1".getBytes());
-    p.add("f".getBytes(), "c1".getBytes(), "".getBytes());
-    p.add("f".getBytes(), "c2".getBytes(), "".getBytes());
-    p.add("f".getBytes(), "c3".getBytes(), "5".getBytes());
-    p.add("f".getBytes(), "c4".getBytes(), "".getBytes());
-    table.put(p);
+    p.addColumn("f".getBytes(), "c1".getBytes(), "".getBytes());
+    p.addColumn("f".getBytes(), "c2".getBytes(), "".getBytes());
+    p.addColumn("f".getBytes(), "c3".getBytes(), "5".getBytes());
+    p.addColumn("f".getBytes(), "c4".getBytes(), "".getBytes());
+    table.mutate(p);
 
-    table.flushCommits();
     table.close();
   }
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/test/Drill2130StorageHBaseHamcrestConfigurationTest.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/test/Drill2130StorageHBaseHamcrestConfigurationTest.java
b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/test/Drill2130StorageHBaseHamcrestConfigurationTest.java
index b52654d..f1a21b0 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/test/Drill2130StorageHBaseHamcrestConfigurationTest.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/test/Drill2130StorageHBaseHamcrestConfigurationTest.java
@@ -17,13 +17,15 @@
  */
 package org.apache.drill.hbase.test;
 
-import org.junit.Test;
+import static org.hamcrest.CoreMatchers.equalTo;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
-import static org.hamcrest.CoreMatchers.equalTo;
+
+import org.junit.Test;
 
 
 public class Drill2130StorageHBaseHamcrestConfigurationTest {
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Drill2130StorageHBaseHamcrestConfigurationTest.class);
 
   @SuppressWarnings("unused")
   private org.hamcrest.MatcherAssert forCompileTimeCheckForNewEnoughHamcrest;
@@ -38,7 +40,7 @@ public class Drill2130StorageHBaseHamcrestConfigurationTest {
              + "  Got NoSuchMethodError;  e: " + e );
     }
     catch ( AssertionError e ) {
-      System.out.println( "Class path seems fine re new JUnit vs. old Hamcrest."
+      logger.info("Class path seems fine re new JUnit vs. old Hamcrest."
                           + " (Got AssertionError, not NoSuchMethodError.)" );
     }
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/contrib/storage-hbase/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/resources/logback.xml b/contrib/storage-hbase/src/test/resources/logback.xml
index c0a79a8..8d3fc81 100644
--- a/contrib/storage-hbase/src/test/resources/logback.xml
+++ b/contrib/storage-hbase/src/test/resources/logback.xml
@@ -52,9 +52,17 @@
   </logger>
 
   <logger name="org.apache.hadoop" additivity="false">
-    <level value="info" />
+    <level value="warn" />
+    <appender-ref ref="STDOUT" />
+    <appender-ref ref="SOCKET" />
+    <appender-ref ref="FILE" />
+  </logger>
+
+  <logger name="org.apache.hadoop.hbase.client.ConnectionManager" additivity="false">
+    <level value="error" />
     <appender-ref ref="STDOUT" />
     <appender-ref ref="SOCKET" />
+    <appender-ref ref="FILE" />
   </logger>
 
   <root>

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
index d981342..d0f4125 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
@@ -41,6 +41,7 @@ import org.apache.drill.exec.store.sys.store.provider.CachingPersistentStoreProv
 import org.apache.drill.exec.store.sys.PersistentStoreProvider;
 import org.apache.drill.exec.store.sys.PersistentStoreRegistry;
 import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
+import org.apache.drill.exec.util.GuavaPatcher;
 import org.apache.drill.exec.work.WorkManager;
 import org.apache.zookeeper.Environment;
 
@@ -54,6 +55,12 @@ public class Drillbit implements AutoCloseable {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Drillbit.class);
 
   static {
+    /*
+     * HBase client uses older version of Guava's Stopwatch API,
+     * while Drill ships with 18.x which has changes the scope of
+     * these API to 'package', this code make them accessible.
+     */
+    GuavaPatcher.patch();
     Environment.logEnv("Drillbit environment: ", logger);
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/exec/java-exec/src/main/java/org/apache/drill/exec/util/GuavaPatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/GuavaPatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/GuavaPatcher.java
new file mode 100644
index 0000000..0bb13d8
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/GuavaPatcher.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.util;
+
+import java.lang.reflect.Modifier;
+
+import javassist.ClassPool;
+import javassist.CtClass;
+import javassist.CtConstructor;
+import javassist.CtMethod;
+import javassist.CtNewMethod;
+
+public class GuavaPatcher {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(GuavaPatcher.class);
+
+  private static boolean patched;
+
+  public static synchronized void patch() {
+    if (!patched) {
+      try {
+        patchStopwatch();
+        patchCloseables();
+        patched = true;
+      } catch (Throwable e) {
+        logger.warn("Unable to patch Guava classes.", e);
+      }
+    }
+  }
+
+  /**
+   * Makes Guava stopwatch look like the old version for compatibility with hbase-server
(for test purposes).
+   */
+  private static void patchStopwatch() throws Exception {
+
+    ClassPool cp = ClassPool.getDefault();
+    CtClass cc = cp.get("com.google.common.base.Stopwatch");
+
+    // Expose the constructor for Stopwatch for old libraries who use the pattern new Stopwatch().start().
+    for (CtConstructor c : cc.getConstructors()) {
+      if (!Modifier.isStatic(c.getModifiers())) {
+        c.setModifiers(Modifier.PUBLIC);
+      }
+    }
+
+    // Add back the Stopwatch.elapsedMillis() method for old consumers.
+    CtMethod newmethod = CtNewMethod.make(
+        "public long elapsedMillis() { return elapsed(java.util.concurrent.TimeUnit.MILLISECONDS);
}", cc);
+    cc.addMethod(newmethod);
+
+    // Load the modified class instead of the original.
+    cc.toClass();
+
+    logger.info("Google's Stopwatch patched for old HBase Guava version.");
+  }
+
+  private static void patchCloseables() throws Exception {
+
+    ClassPool cp = ClassPool.getDefault();
+    CtClass cc = cp.get("com.google.common.io.Closeables");
+
+
+    // Add back the Closeables.closeQuietly() method for old consumers.
+    CtMethod newmethod = CtNewMethod.make(
+        "public static void closeQuietly(java.io.Closeable closeable) { try{closeable.close();}catch(Exception
e){} }",
+        cc);
+    cc.addMethod(newmethod);
+
+    // Load the modified class instead of the original.
+    cc.toClass();
+
+    logger.info("Google's Closeables patched for old HBase Guava version.");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/c2d9959e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 058036b..4e15aea 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1127,7 +1127,7 @@
           <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-client</artifactId>
-            <version>0.98.7-hadoop2</version>
+            <version>1.1.3</version>
             <exclusions>
               <exclusion>
                 <groupId>org.mortbay.jetty</groupId>
@@ -1239,10 +1239,14 @@
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-testing-util</artifactId>
             <classifier>tests</classifier>
-            <version>0.98.7-hadoop2</version>
+            <version>1.1.3</version>
             <scope>test</scope>
             <exclusions>
               <exclusion>
+                <groupId>javax.servlet</groupId>
+                <artifactId>servlet-api</artifactId>
+              </exclusion>
+              <exclusion>
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>servlet-api-2.5</artifactId>
               </exclusion>
@@ -1390,7 +1394,7 @@
         <alt-hadoop>mapr</alt-hadoop>
         <rat.excludeSubprojects>true</rat.excludeSubprojects>
         <hive.version>1.2.0-mapr-1601</hive.version>
-        <hbase.version>0.98.12-mapr-1602-m7-5.1.0</hbase.version>
+        <hbase.version>1.1.1-mapr-1602-m7-5.1.0</hbase.version>
         <hadoop.version>2.7.0-mapr-1602</hadoop.version>
         <mapr.core.version>5.1.0-mapr</mapr.core.version>
       </properties>


Mime
View raw message