hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mbau...@apache.org
Subject svn commit: r1236031 [7/7] - in /hbase/trunk/src: main/java/org/apache/hadoop/hbase/ main/java/org/apache/hadoop/hbase/io/ main/java/org/apache/hadoop/hbase/io/encoding/ main/java/org/apache/hadoop/hbase/io/hfile/ main/java/org/apache/hadoop/hbase/mapr...
Date Thu, 26 Jan 2012 02:59:00 GMT
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java?rev=1236031&r1=1236030&r2=1236031&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java Thu
Jan 26 02:58:57 2012
@@ -66,7 +66,7 @@ public class TestLoadTestKVGenerator {
     for (int i = 0; i < 1000; ++i) {
       String k = LoadTestKVGenerator.md5PrefixedKey(i);
       assertFalse(keys.contains(k));
-      assertTrue(k.endsWith(":" + i));
+      assertTrue(k.endsWith("-" + i));
       keys.add(k);
     }
   }

Added: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java?rev=1236031&view=auto
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
(added)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
Thu Jan 26 02:58:57 2012
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.junit.experimental.categories.Category;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Runs a load test on a mini HBase cluster with data block encoding turned on.
+ * Compared to other load-test-style unit tests, this one writes a smaller
+ * amount of data, but goes through all available data block encoding
+ * algorithms.
+ */
+@Category(LargeTests.class)
+public class TestMiniClusterLoadEncoded extends TestMiniClusterLoadParallel {
+
+  /** We do not alternate the multi-put flag in this test. */
+  private static final boolean USE_MULTI_PUT = true;
+
+  @Parameters
+  public static Collection<Object[]> parameters() {
+    List<Object[]> parameters = new ArrayList<Object[]>();
+    for (DataBlockEncoding dataBlockEncoding : DataBlockEncoding.values() ) {
+      parameters.add(new Object[]{dataBlockEncoding});
+    }
+    return parameters;
+  }
+
+  public TestMiniClusterLoadEncoded(DataBlockEncoding encoding) {
+    super(USE_MULTI_PUT, encoding);
+  }
+
+  /**
+   * Use a smaller number of keys in in this test.
+   */
+  @Override
+  protected int numKeys() {
+    return 3000;
+  }
+
+}

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java?rev=1236031&r1=1236030&r2=1236031&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
(original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
Thu Jan 26 02:58:57 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.util;
 import static org.junit.Assert.assertEquals;
 
 import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
@@ -33,18 +34,19 @@ import org.junit.runners.Parameterized;
 public class TestMiniClusterLoadParallel
     extends TestMiniClusterLoadSequential {
 
-  public TestMiniClusterLoadParallel(boolean isMultiPut) {
-    super(isMultiPut);
+  public TestMiniClusterLoadParallel(boolean isMultiPut,
+      DataBlockEncoding encoding) {
+    super(isMultiPut, encoding);
   }
 
-  @Test(timeout=120000)
+  @Test(timeout=TIMEOUT_MS)
   public void loadTest() throws Exception {
     prepareForLoadTest();
 
     readerThreads.linkToWriter(writerThreads);
 
-    writerThreads.start(0, NUM_KEYS, NUM_THREADS);
-    readerThreads.start(0, NUM_KEYS, NUM_THREADS);
+    writerThreads.start(0, numKeys, NUM_THREADS);
+    readerThreads.start(0, numKeys, NUM_THREADS);
 
     writerThreads.waitForFinish();
     readerThreads.waitForFinish();
@@ -52,7 +54,7 @@ public class TestMiniClusterLoadParallel
     assertEquals(0, writerThreads.getNumWriteFailures());
     assertEquals(0, readerThreads.getNumReadFailures());
     assertEquals(0, readerThreads.getNumReadErrors());
-    assertEquals(NUM_KEYS, readerThreads.getNumUniqueKeysVerified());
+    assertEquals(numKeys, readerThreads.getNumUniqueKeysVerified());
   }
 
 }

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java?rev=1236031&r1=1236030&r2=1236031&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
(original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
Thu Jan 26 02:58:57 2012
@@ -19,14 +19,21 @@ package org.apache.hadoop.hbase.util;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Collection;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -48,25 +55,39 @@ public class TestMiniClusterLoadSequenti
 
   protected static final byte[] TABLE = Bytes.toBytes("load_test_tbl");
   protected static final byte[] CF = Bytes.toBytes("load_test_cf");
-  protected static final long NUM_KEYS = 10000;
   protected static final int NUM_THREADS = 8;
   protected static final int NUM_RS = 2;
+  protected static final int TIMEOUT_MS = 120000;
   protected static final HBaseTestingUtility TEST_UTIL =
       new HBaseTestingUtility();
 
   protected final Configuration conf = TEST_UTIL.getConfiguration();
   protected final boolean isMultiPut;
+  protected final DataBlockEncoding dataBlockEncoding;
 
   protected MultiThreadedWriter writerThreads;
   protected MultiThreadedReader readerThreads;
+  protected int numKeys;
 
-  public TestMiniClusterLoadSequential(boolean isMultiPut) {
+  protected Compression.Algorithm compression = Compression.Algorithm.NONE;
+
+  public TestMiniClusterLoadSequential(boolean isMultiPut,
+      DataBlockEncoding dataBlockEncoding) {
     this.isMultiPut = isMultiPut;
+    this.dataBlockEncoding = dataBlockEncoding;
+    conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
   }
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    return HBaseTestingUtility.BOOLEAN_PARAMETERIZED;
+    List<Object[]> parameters = new ArrayList<Object[]>();
+    for (boolean multiPut : new boolean[]{false, true}) {
+      for (DataBlockEncoding dataBlockEncoding : new DataBlockEncoding[] {
+          DataBlockEncoding.NONE, DataBlockEncoding.PREFIX }) {
+        parameters.add(new Object[]{multiPut, dataBlockEncoding});
+      }
+    }
+    return parameters;
   }
 
   @Before
@@ -81,22 +102,28 @@ public class TestMiniClusterLoadSequenti
     TEST_UTIL.shutdownMiniCluster();
   }
 
-  @Test(timeout=120000)
+  @Test(timeout=TIMEOUT_MS)
   public void loadTest() throws Exception {
     prepareForLoadTest();
+    runLoadTestOnExistingTable();
+  }
 
-    writerThreads.start(0, NUM_KEYS, NUM_THREADS);
+  protected void runLoadTestOnExistingTable() throws IOException {
+    writerThreads.start(0, numKeys, NUM_THREADS);
     writerThreads.waitForFinish();
     assertEquals(0, writerThreads.getNumWriteFailures());
 
-    readerThreads.start(0, NUM_KEYS, NUM_THREADS);
+    readerThreads.start(0, numKeys, NUM_THREADS);
     readerThreads.waitForFinish();
     assertEquals(0, readerThreads.getNumReadFailures());
     assertEquals(0, readerThreads.getNumReadErrors());
-    assertEquals(NUM_KEYS, readerThreads.getNumKeysVerified());
+    assertEquals(numKeys, readerThreads.getNumKeysVerified());
   }
 
   protected void prepareForLoadTest() throws IOException {
+    LOG.info("Starting load test: dataBlockEncoding=" + dataBlockEncoding +
+        ", isMultiPut=" + isMultiPut);
+    numKeys = numKeys();
     HBaseAdmin admin = new HBaseAdmin(conf);
     while (admin.getClusterStatus().getServers().size() < NUM_RS) {
       LOG.info("Sleeping until " + NUM_RS + " RSs are online");
@@ -104,8 +131,9 @@ public class TestMiniClusterLoadSequenti
     }
     admin.close();
 
-    int numRegions =
-        HBaseTestingUtility.createPreSplitLoadTestTable(conf, TABLE, CF);
+    int numRegions = HBaseTestingUtility.createPreSplitLoadTestTable(conf,
+        TABLE, CF, compression, dataBlockEncoding);
+
     TEST_UTIL.waitUntilAllRegionsAssigned(numRegions);
 
     writerThreads = new MultiThreadedWriter(conf, TABLE, CF);
@@ -113,4 +141,13 @@ public class TestMiniClusterLoadSequenti
     readerThreads = new MultiThreadedReader(conf, TABLE, CF, 100);
   }
 
+  protected int numKeys() {
+    return 10000;
+  }
+
+  protected HColumnDescriptor getColumnDesc(HBaseAdmin admin)
+      throws TableNotFoundException, IOException {
+    return admin.getTableDescriptor(TABLE).getFamily(CF);
+  }
+
 }



Mime
View raw message