hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ramkris...@apache.org
Subject svn commit: r1525269 [8/8] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apa...
Date Sat, 21 Sep 2013 18:01:35 GMT
Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java?rev=1525269&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
(added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
Sat Sep 21 18:01:32 2013
@@ -0,0 +1,83 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.codec.Codec.Decoder;
+import org.apache.hadoop.hbase.codec.Codec.Encoder;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestWALCellCodecWithCompression {
+
+  @Test
+  public void testEncodeDecodeKVsWithTags() throws Exception {
+    WALCellCodec codec = new WALCellCodec(new Configuration(false), new CompressionContext(
+        LRUDictionary.class, false));
+    ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+    Encoder encoder = codec.getEncoder(bos);
+    encoder.write(createKV(1));
+    encoder.write(createKV(0));
+    encoder.write(createKV(2));
+
+    InputStream is = new ByteArrayInputStream(bos.toByteArray());
+    Decoder decoder = codec.getDecoder(is);
+    decoder.advance();
+    KeyValue kv = (KeyValue) decoder.current();
+    List<Tag> tags = kv.getTags();
+    assertEquals(1, tags.size());
+    assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue()));
+    decoder.advance();
+    kv = (KeyValue) decoder.current();
+    tags = kv.getTags();
+    assertEquals(0, tags.size());
+    decoder.advance();
+    kv = (KeyValue) decoder.current();
+    tags = kv.getTags();
+    assertEquals(2, tags.size());
+    assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue()));
+    assertEquals("tagValue2", Bytes.toString(tags.get(1).getValue()));
+  }
+
+  private KeyValue createKV(int noOfTags) {
+    byte[] row = Bytes.toBytes("myRow");
+    byte[] cf = Bytes.toBytes("myCF");
+    byte[] q = Bytes.toBytes("myQualifier");
+    byte[] value = Bytes.toBytes("myValue");
+    List<Tag> tags = new ArrayList<Tag>(noOfTags);
+    for (int i = 1; i <= noOfTags; i++) {
+      tags.add(new Tag((byte) i, Bytes.toBytes("tagValue" + i)));
+    }
+    return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
+  }
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
Sat Sep 21 18:01:32 2013
@@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;
@@ -326,8 +327,10 @@ public class TestWALReplay {
     HLog wal = createWAL(this.conf);
     HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf);
     Path f =  new Path(basedir, "hfile");
+    HFileContext context = new HFileContext();
     HFile.Writer writer =
-      HFile.getWriterFactoryNoCache(conf).withPath(fs, f).create();
+    HFile.getWriterFactoryNoCache(conf).withPath(fs, f)
+        .withFileContext(context).create();
     byte [] family = htd.getFamilies().iterator().next().getName();
     byte [] row = tableName.getName();
     writer.append(new KeyValue(row, family, family, row));

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
Sat Sep 21 18:01:32 2013
@@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.coprocess
 import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -842,8 +843,10 @@ public class TestAccessController extend
       HFile.Writer writer = null;
       long now = System.currentTimeMillis();
       try {
+        HFileContext context = new HFileContext();
         writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
             .withPath(fs, path)
+            .withFileContext(context)
             .create();
         // subtract 2 since numRows doesn't include boundary keys
         for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows-2)) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
Sat Sep 21 18:01:32 2013
@@ -21,18 +21,19 @@ import java.io.InterruptedIOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Random;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.PerformanceEvaluation;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -101,6 +102,13 @@ public class LoadTestTool extends Abstra
   public static final String OPT_INMEMORY = "in_memory";
   public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF "
+
   		"inmemory as far as possible.  Not guaranteed that reads are always served from inmemory";
+  public static final String OPT_USETAGS = "usetags";
+  public static final String OPT_USAGE_USETAG = "Adds tags with every KV.  This option would
be used" +
+  		" only if the HFileV3 version is used";
+
+  public static final String OPT_NUM_TAGS = "num_tags";
+  public static final String OPT_USAGE_NUM_TAGS = "Specifies the minimum and number of tags
to be" 
+      +      " added per KV";
 
   protected static final String OPT_KEY_WINDOW = "key_window";
   protected static final String OPT_WRITE = "write";
@@ -136,6 +144,9 @@ public class LoadTestTool extends Abstra
   protected Compression.Algorithm compressAlgo;
   protected BloomType bloomType;
   private boolean inMemoryCF;
+  private boolean useTags;
+  private int minNumTags = 1;
+  private int maxNumTags = 1;
   // Writer options
   protected int numWriterThreads = DEFAULT_NUM_THREADS;
   protected int minColsPerKey, maxColsPerKey;
@@ -241,6 +252,8 @@ public class LoadTestTool extends Abstra
         "separate updates for every column in a row");
     addOptNoArg(OPT_ENCODE_IN_CACHE_ONLY, OPT_ENCODE_IN_CACHE_ONLY_USAGE);
     addOptNoArg(OPT_INMEMORY, OPT_USAGE_IN_MEMORY);
+    addOptNoArg(OPT_USETAGS, OPT_USAGE_USETAG);
+    addOptWithArg(OPT_NUM_TAGS,  OPT_USAGE_NUM_TAGS + " The default is 1:1");
 
     addOptWithArg(OPT_NUM_KEYS, "The number of keys to read/write");
     addOptWithArg(OPT_START_KEY, "The first key to read/write " +
@@ -379,6 +392,19 @@ public class LoadTestTool extends Abstra
         BloomType.valueOf(bloomStr);
     
     inMemoryCF = cmd.hasOption(OPT_INMEMORY);
+    useTags = cmd.hasOption(OPT_USETAGS);
+    if (useTags) {
+      if (cmd.hasOption(OPT_NUM_TAGS)) {
+        String[] readOpts = splitColonSeparated(OPT_NUM_TAGS, 1, 2);
+        int colIndex = 0;
+        minNumTags = parseInt(readOpts[colIndex++], 1, 100);
+        if (colIndex < readOpts.length) {
+          maxNumTags = parseInt(readOpts[colIndex++], 1, 100);
+        }
+      }
+      System.out.println("Using tags, number of tags per KV: min=" + minNumTags + ", max="
+          + maxNumTags);
+    }
     
   }
 
@@ -445,17 +471,20 @@ public class LoadTestTool extends Abstra
 
     if (isWrite) {
       System.out.println("Starting to write data...");
-      writerThreads.start(startKey, endKey, numWriterThreads);
+      writerThreads.start(startKey, endKey, numWriterThreads, useTags, minNumTags, maxNumTags);
     }
 
     if (isUpdate) {
+      LOG.info("Starting to mutate data...");
       System.out.println("Starting to mutate data...");
-      updaterThreads.start(startKey, endKey, numUpdaterThreads);
+      // TODO : currently append and increment operations not tested with tags
+      // Will update this aftet it is done
+      updaterThreads.start(startKey, endKey, numUpdaterThreads, true, minNumTags, maxNumTags);
     }
 
     if (isRead) {
       System.out.println("Starting to read data...");
-      readerThreads.start(startKey, endKey, numReaderThreads);
+      readerThreads.start(startKey, endKey, numReaderThreads, useTags, 0, 0);
     }
 
     if (isWrite) {
@@ -484,6 +513,27 @@ public class LoadTestTool extends Abstra
     return success ? EXIT_SUCCESS : EXIT_FAILURE;
   }
 
+  static byte[] generateData(final Random r, int length) {
+    byte [] b = new byte [length];
+    int i = 0;
+
+    for(i = 0; i < (length-8); i += 8) {
+      b[i] = (byte) (65 + r.nextInt(26));
+      b[i+1] = b[i];
+      b[i+2] = b[i];
+      b[i+3] = b[i];
+      b[i+4] = b[i];
+      b[i+5] = b[i];
+      b[i+6] = b[i];
+      b[i+7] = b[i];
+    }
+
+    byte a = (byte) (65 + r.nextInt(26));
+    for(; i < length; i++) {
+      b[i] = a;
+    }
+    return b;
+  }
   public static void main(String[] args) {
     new LoadTestTool().doStaticMain(args);
   }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
Sat Sep 21 18:01:32 2013
@@ -68,6 +68,10 @@ public abstract class MultiThreadedActio
   protected AtomicLong totalOpTimeMs = new AtomicLong();
   protected boolean verbose = false;
 
+  protected boolean useTags = false;
+  protected int minNumTags = 1;
+  protected int maxNumTags = 1;
+
   protected LoadTestDataGenerator dataGenerator = null;
 
   /**
@@ -149,11 +153,14 @@ public abstract class MultiThreadedActio
     this.actionLetter = actionLetter;
   }
 
-  public void start(long startKey, long endKey, int numThreads)
-      throws IOException {
+  public void start(long startKey, long endKey, int numThreads, boolean useTags, int minNumTags,
+      int maxNumTags) throws IOException {
     this.startKey = startKey;
     this.endKey = endKey;
     this.numThreads = numThreads;
+    this.useTags = useTags;
+    this.minNumTags = minNumTags;
+    this.maxNumTags = maxNumTags;
     (new Thread(new ProgressReporter(actionLetter))).start();
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
Sat Sep 21 18:01:32 2013
@@ -91,9 +91,9 @@ public class MultiThreadedReader extends
   }
 
   @Override
-  public void start(long startKey, long endKey, int numThreads)
-      throws IOException {
-    super.start(startKey, endKey, numThreads);
+  public void start(long startKey, long endKey, int numThreads, boolean useTags,
+    int minNumTags, int maxNumTags) throws IOException {
+    super.start(startKey, endKey, numThreads, useTags, minNumTags, maxNumTags);
     if (verbose) {
       LOG.debug("Reading keys [" + startKey + ", " + endKey + ")");
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
Sat Sep 21 18:01:32 2013
@@ -79,9 +79,9 @@ public class MultiThreadedUpdater extend
   }
 
   @Override
-  public void start(long startKey, long endKey, int numThreads)
-      throws IOException {
-    super.start(startKey, endKey, numThreads);
+  public void start(long startKey, long endKey, int numThreads, boolean useTags, int minNumTags,
+      int maxNumTags) throws IOException {
+    super.start(startKey, endKey, numThreads, useTags, minNumTags, maxNumTags);
 
     if (verbose) {
       LOG.debug("Updating keys [" + startKey + ", " + endKey + ")");

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
Sat Sep 21 18:01:32 2013
@@ -24,6 +24,7 @@ import static org.apache.hadoop.hbase.ut
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
+import java.util.Random;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -31,6 +32,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
@@ -43,6 +45,11 @@ public class MultiThreadedWriter extends
 
   private boolean isMultiPut = false;
 
+  private Random random = new Random();
+  // TODO: Make this configurable
+  private int minTagLength = 16;
+  private int maxTagLength = 512;
+
   public MultiThreadedWriter(LoadTestDataGenerator dataGen, Configuration conf,
       TableName tableName) {
     super(dataGen, conf, tableName, "W");
@@ -54,9 +61,9 @@ public class MultiThreadedWriter extends
   }
 
   @Override
-  public void start(long startKey, long endKey, int numThreads)
-      throws IOException {
-    super.start(startKey, endKey, numThreads);
+  public void start(long startKey, long endKey, int numThreads, boolean useTags,
+       int minNumTags, int maxNumTags) throws IOException {
+    super.start(startKey, endKey, numThreads, useTags, minNumTags, maxNumTags);
 
     if (verbose) {
       LOG.debug("Inserting keys [" + startKey + ", " + endKey + ")");
@@ -89,9 +96,26 @@ public class MultiThreadedWriter extends
           int columnCount = 0;
           for (byte[] cf : columnFamilies) {
             byte[][] columns = dataGenerator.generateColumnsForCf(rowKey, cf);
+            int numTags;
+            if (minNumTags == maxNumTags) {
+              numTags = minNumTags;
+            } else {
+              numTags = minNumTags + random.nextInt(maxNumTags - minNumTags);
+            }
+            Tag[] tags = new Tag[numTags];
             for (byte[] column : columns) {
               byte[] value = dataGenerator.generateValue(rowKey, cf, column);
-              put.add(cf, column, value);
+              byte[] tag = LoadTestTool.generateData(random,
+                    minTagLength + random.nextInt(maxTagLength - minTagLength));
+              if(useTags) {
+                for (int n = 0; n < numTags; n++) {
+                  Tag t = new Tag((byte) n, tag);
+                  tags[n] = t;
+                }
+                put.add(cf, column, value, tags);
+              } else {
+                put.add(cf, column, value);
+              }
               ++columnCount;
               if (!isMultiPut) {
                 insert(table, put, rowKeyBase);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
Sat Sep 21 18:01:32 2013
@@ -83,9 +83,9 @@ public abstract class MultiThreadedWrite
   }
 
   @Override
-  public void start(long startKey, long endKey, int numThreads)
-      throws IOException {
-    super.start(startKey, endKey, numThreads);
+  public void start(long startKey, long endKey, int numThreads, boolean useTags, int minNumTags,
+      int maxNumTags) throws IOException {
+    super.start(startKey, endKey, numThreads, useTags, minNumTags, maxNumTags);
 
     nextKeyToWrite.set(startKey);
     wroteUpToKey.set(startKey - 1);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
Sat Sep 21 18:01:32 2013
@@ -81,7 +81,7 @@ public class RestartMetaTest extends Abs
       minColDataSize, maxColDataSize, minColsPerKey, maxColsPerKey, LoadTestTool.COLUMN_FAMILY);
     MultiThreadedWriter writer = new MultiThreadedWriter(dataGen, conf, TABLE_NAME);
     writer.setMultiPut(true);
-    writer.start(startKey, endKey, numThreads);
+    writer.start(startKey, endKey, numThreads, false, 0, 0);
     System.out.printf("Started loading data...");
     writer.waitForFinish();
     System.out.printf("Finished loading data...");

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
Sat Sep 21 18:01:32 2013
@@ -45,8 +45,8 @@ public class TestMiniClusterLoadParallel
 
     readerThreads.linkToWriter(writerThreads);
 
-    writerThreads.start(0, numKeys, NUM_THREADS);
-    readerThreads.start(0, numKeys, NUM_THREADS);
+    writerThreads.start(0, numKeys, NUM_THREADS, false, 0, 0);
+    readerThreads.start(0, numKeys, NUM_THREADS, false, 0, 0);
 
     writerThreads.waitForFinish();
     readerThreads.waitForFinish();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
Sat Sep 21 18:01:32 2013
@@ -129,11 +129,11 @@ public class TestMiniClusterLoadSequenti
   }
 
   protected void runLoadTestOnExistingTable() throws IOException {
-    writerThreads.start(0, numKeys, NUM_THREADS);
+    writerThreads.start(0, numKeys, NUM_THREADS, false, 0, 0);
     writerThreads.waitForFinish();
     assertEquals(0, writerThreads.getNumWriteFailures());
 
-    readerThreads.start(0, numKeys, NUM_THREADS);
+    readerThreads.start(0, numKeys, NUM_THREADS, false, 0, 0);
     readerThreads.waitForFinish();
     assertEquals(0, readerThreads.getNumReadFailures());
     assertEquals(0, readerThreads.getNumReadErrors());

Modified: hbase/trunk/hbase-server/src/test/resources/mapred-site.xml
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/resources/mapred-site.xml?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
Binary files - no diff available.



Mime
View raw message