hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ramkris...@apache.org
Subject svn commit: r1530567 [2/2] - in /hbase/trunk: hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ hbase-server/src/main/...
Date Wed, 09 Oct 2013 11:21:04 GMT
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
Wed Oct  9 11:21:03 2013
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.KeyValue.
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Writables;
@@ -94,9 +93,10 @@ public class TestHFileWriterV2 {
   private void writeDataAndReadFromHFile(Path hfilePath,
       Algorithm compressAlgo, int entryCount, boolean findMidKey) throws IOException {
 
-    HFileContext context = new HFileContext();
-    context.setBlocksize(4096);
-    context.setCompressAlgo(compressAlgo);
+    HFileContext context = new HFileContextBuilder()
+                           .withBlockSize(4096)
+                           .withCompressionAlgo(compressAlgo)
+                           .build();
     HFileWriterV2 writer = (HFileWriterV2)
         new HFileWriterV2.WriterFactoryV2(conf, new CacheConfig(conf))
             .withPath(fs, hfilePath)
@@ -137,11 +137,12 @@ public class TestHFileWriterV2 {
     assertEquals(2, trailer.getMajorVersion());
     assertEquals(entryCount, trailer.getEntryCount());
 
-    HFileContext meta = new HFileContext();
-    meta.setUsesHBaseChecksum(true);
-    meta.setIncludesMvcc(false);
-    meta.setIncludesTags(false);
-    meta.setCompressAlgo(compressAlgo);
+    HFileContext meta = new HFileContextBuilder()
+                        .withHBaseCheckSum(true)
+                        .withIncludesMvcc(false)
+                        .withIncludesTags(false)
+                        .withCompressionAlgo(compressAlgo)
+                        .build();
     
     HFileBlock.FSReader blockReader = new HFileBlock.FSReaderV2(fsdis, fileSize, meta);
     // Comparator class name is stored in the trailer in version 2.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
Wed Oct  9 11:21:03 2013
@@ -115,9 +115,9 @@ public class TestHFileWriterV3 {
 
   private void writeDataAndReadFromHFile(Path hfilePath,
       Algorithm compressAlgo, int entryCount, boolean findMidKey, boolean useTags) throws
IOException {
-    HFileContext context = new HFileContext();
-    context.setBlocksize(4096);
-    context.setCompressAlgo(compressAlgo);
+    HFileContext context = new HFileContextBuilder()
+                           .withBlockSize(4096)
+                           .withCompressionAlgo(compressAlgo).build();
     HFileWriterV3 writer = (HFileWriterV3)
         new HFileWriterV3.WriterFactoryV3(conf, new CacheConfig(conf))
             .withPath(fs, hfilePath)
@@ -168,11 +168,11 @@ public class TestHFileWriterV3 {
 
     assertEquals(3, trailer.getMajorVersion());
     assertEquals(entryCount, trailer.getEntryCount());
-    HFileContext meta = new HFileContext();
-    meta.setCompressAlgo(compressAlgo);
-    meta.setIncludesMvcc(false);
-    meta.setIncludesTags(useTags);
-    meta.setUsesHBaseChecksum(true);
+    HFileContext meta = new HFileContextBuilder()
+                        .withCompressionAlgo(compressAlgo)
+                        .withIncludesMvcc(false)
+                        .withIncludesTags(useTags)
+                        .withHBaseCheckSum(true).build();
     HFileBlock.FSReader blockReader =
         new HFileBlock.FSReaderV2(fsdis, fileSize, meta);
  // Comparator class name is stored in the trailer in version 2.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
Wed Oct  9 11:21:03 2013
@@ -57,8 +57,7 @@ public class TestReseekTo {
       TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
     }
     CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
-    HFileContext context = new HFileContext();
-    context.setBlocksize(4000);
+    HFileContext context = new HFileContextBuilder().withBlockSize(4000).build();
     HFile.Writer writer = HFile.getWriterFactory(
         TEST_UTIL.getConfiguration(), cacheConf)
             .withOutputStream(fout)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
Wed Oct  9 11:21:03 2013
@@ -22,9 +22,12 @@ import java.io.IOException;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.RawComparator;
 import org.junit.experimental.categories.Category;
 
 /**
@@ -73,8 +76,7 @@ public class TestSeekTo extends HBaseTes
     }
     FSDataOutputStream fout = this.fs.create(ncTFile);
     int blocksize = toKV("a", tagUsage).getLength() * 3;
-    HFileContext context = new HFileContext();
-    context.setBlocksize(blocksize);
+    HFileContext context = new HFileContextBuilder().withBlockSize(blocksize).build();
     HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout)
         .withFileContext(context)
         // NOTE: This test is dependent on this deprecated nonstandard

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
Wed Oct  9 11:21:03 2013
@@ -18,10 +18,22 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.TreeMap;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.io.compress.Compression;
@@ -29,6 +41,7 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
@@ -38,11 +51,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
-import java.util.TreeMap;
-
-import static org.junit.Assert.*;
-
 /**
  * Test cases for the "load" half of the HFileOutputFormat bulk load
  * functionality. These tests run faster than the full MR cluster
@@ -262,9 +270,10 @@ public class TestLoadIncrementalHFiles {
       byte[] family, byte[] qualifier,
       byte[] startKey, byte[] endKey, int numRows) throws IOException
   {
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(BLOCKSIZE);
-    meta.setCompressAlgo(COMPRESSION);
+    HFileContext meta = new HFileContextBuilder()
+                        .withBlockSize(BLOCKSIZE)
+                        .withCompressionAlgo(COMPRESSION)
+                        .build();
     HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
         .withPath(fs, path)
         .withFileContext(meta)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
Wed Oct  9 11:21:03 2013
@@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.HConstant
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.io.BytesWritable;
 
@@ -183,9 +183,8 @@ public class CreateRandomStoreFile {
           Integer.valueOf(cmdLine.getOptionValue(INDEX_BLOCK_SIZE_OPTION)));
     }
 
-    HFileContext meta = new HFileContext();
-    meta.setCompressAlgo(compr);
-    meta.setBlocksize(blockSize);
+    HFileContext meta = new HFileContextBuilder().withCompressionAlgo(compr)
+                        .withBlockSize(blockSize).build();
     StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf,
         new CacheConfig(conf), fs)
             .withOutputDir(outputDir)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
Wed Oct  9 11:21:03 2013
@@ -45,9 +45,10 @@ import org.apache.hadoop.hbase.io.compre
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.encoding.EncodedDataBlock;
-import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.HFileReaderV2;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -214,10 +215,10 @@ public class DataBlockEncodingTool {
         continue;
       }
       DataBlockEncoder d = encoding.getEncoder();
-      HFileContext meta = new HFileContext();
-      meta.setCompressAlgo(Compression.Algorithm.NONE);
-      meta.setIncludesMvcc(includesMemstoreTS);
-      meta.setIncludesTags(useTag);
+      HFileContext meta = new HFileContextBuilder()
+                          .withCompressionAlgo(Compression.Algorithm.NONE)
+                          .withIncludesMvcc(includesMemstoreTS)
+                          .withIncludesTags(useTag).build();
       codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta ));
     }
   }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
Wed Oct  9 11:21:03 2013
@@ -36,13 +36,16 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.ByteBloomFilter;
@@ -292,8 +295,7 @@ public class TestCompoundBloomFilter {
         BLOOM_BLOCK_SIZES[t]);
     conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
     cacheConf = new CacheConfig(conf);
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(BLOCK_SIZES[t]);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();
     StoreFile.Writer w = new StoreFile.WriterBuilder(conf, cacheConf, fs)
             .withOutputDir(TEST_UTIL.getDataTestDir())
             .withBloomType(bt)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
Wed Oct  9 11:21:03 2013
@@ -37,12 +37,19 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PositionedReadable;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -73,8 +80,7 @@ public class TestFSErrorsExposed {
     FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
     FileSystem fs = new HFileSystem(faultyfs);
     CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(2*1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
     StoreFile.Writer writer = new StoreFile.WriterBuilder(
         util.getConfiguration(), cacheConf, hfs)
             .withOutputDir(hfilePath)
@@ -125,8 +131,7 @@ public class TestFSErrorsExposed {
     FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
     HFileSystem fs = new HFileSystem(faultyfs);
     CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(2 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
     StoreFile.Writer writer = new StoreFile.WriterBuilder(
         util.getConfiguration(), cacheConf, hfs)
             .withOutputDir(hfilePath)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
Wed Oct  9 11:21:03 2013
@@ -27,9 +27,16 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
+import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.RegionServerCallable;
@@ -38,12 +45,12 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.client.RpcRetryingCaller;
 import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
@@ -51,9 +58,9 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import com.google.common.collect.Lists;
-import org.junit.experimental.categories.Category;
 
 /**
  * Tests bulk loading of HFiles and shows the atomicity or lack of atomicity of
@@ -89,9 +96,9 @@ public class TestHRegionServerBulkLoad {
    */
   public static void createHFile(FileSystem fs, Path path, byte[] family,
       byte[] qualifier, byte[] value, int numRows) throws IOException {
-    HFileContext context = new HFileContext();
-    context.setBlocksize(BLOCKSIZE);
-    context.setCompressAlgo(COMPRESSION);
+    HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE)
+                            .withCompressionAlgo(COMPRESSION)
+                            .build();
     HFile.Writer writer = HFile
         .getWriterFactory(conf, new CacheConfig(conf))
         .withPath(fs, path)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
Wed Oct  9 11:21:03 2013
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.io.encodi
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
@@ -321,8 +322,7 @@ public class TestStore extends TestCase 
     long seqid = f.getMaxSequenceId();
     Configuration c = HBaseConfiguration.create();
     FileSystem fs = FileSystem.get(c);
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(StoreFile.DEFAULT_BLOCKSIZE_SMALL);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL).build();
     StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
         fs)
             .withOutputDir(storedir)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
Wed Oct  9 11:21:03 2013
@@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.CacheStats;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@@ -96,8 +97,7 @@ public class TestStoreFile extends HBase
     HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
       conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri);
 
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(2 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build();
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(regionFs.createTempName())
             .withFileContext(meta)
@@ -148,8 +148,7 @@ public class TestStoreFile extends HBase
     HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
       conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri);
 
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(8 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(regionFs.createTempName())
@@ -194,8 +193,7 @@ public class TestStoreFile extends HBase
     FSUtils.setRootDir(testConf, this.testDir);
     HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
       testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri);
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(8 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
 
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
@@ -240,8 +238,7 @@ public class TestStoreFile extends HBase
     HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
       testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri);
 
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(8 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
     // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
     StoreFile.Writer writer = new StoreFile.WriterBuilder(testConf, cacheConf, this.fs)
             .withFilePath(regionFs.createTempName())
@@ -503,10 +500,9 @@ public class TestStoreFile extends HBase
 
     // write the file
     Path f = new Path(ROOT_DIR, getName());
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(StoreFile.DEFAULT_BLOCKSIZE_SMALL);
-    meta.setChecksumType(CKTYPE);
-    meta.setBytesPerChecksum(CKBYTES);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
+                        .withChecksumType(CKTYPE)
+                        .withBytesPerCheckSum(CKBYTES).build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(f)
@@ -526,10 +522,10 @@ public class TestStoreFile extends HBase
     // write the file
     Path f = new Path(ROOT_DIR, getName());
 
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(StoreFile.DEFAULT_BLOCKSIZE_SMALL);
-    meta.setChecksumType(CKTYPE);
-    meta.setBytesPerChecksum(CKBYTES);
+    HFileContext meta = new HFileContextBuilder()
+                        .withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
+                        .withChecksumType(CKTYPE)
+                        .withBytesPerCheckSum(CKBYTES).build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(f)
@@ -582,8 +578,7 @@ public class TestStoreFile extends HBase
   public void testReseek() throws Exception {
     // write the file
     Path f = new Path(ROOT_DIR, getName());
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(8 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(f)
@@ -626,10 +621,9 @@ public class TestStoreFile extends HBase
     for (int x : new int[]{0,1}) {
       // write the file
       Path f = new Path(ROOT_DIR, getName() + x);
-      HFileContext meta = new HFileContext();
-      meta.setBlocksize(StoreFile.DEFAULT_BLOCKSIZE_SMALL);
-      meta.setChecksumType(CKTYPE);
-      meta.setBytesPerChecksum(CKBYTES);
+      HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
+          .withChecksumType(CKTYPE)
+          .withBytesPerCheckSum(CKBYTES).build();
       // Make a store file and write data to it.
       StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
               .withFilePath(f)
@@ -782,8 +776,7 @@ public class TestStoreFile extends HBase
     // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
     Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname");
     Path dir = new Path(storedir, "1234567890");
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(8 * 1024);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withOutputDir(dir)
@@ -969,10 +962,10 @@ public class TestStoreFile extends HBase
       totalSize += kv.getLength() + 1;
     }
     int blockSize = totalSize / numBlocks;
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(blockSize);
-    meta.setChecksumType(CKTYPE);
-    meta.setBytesPerChecksum(CKBYTES);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize)
+                        .withChecksumType(CKTYPE)
+                        .withBytesPerCheckSum(CKBYTES)
+                        .build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(path)
@@ -1005,12 +998,12 @@ public class TestStoreFile extends HBase
             dataBlockEncoderAlgo,
             dataBlockEncoderAlgo);
     cacheConf = new CacheConfig(conf);
-    HFileContext meta = new HFileContext();
-    meta.setBlocksize(HConstants.DEFAULT_BLOCKSIZE);
-    meta.setChecksumType(CKTYPE);
-    meta.setBytesPerChecksum(CKBYTES);
-    meta.setEncodingOnDisk(dataBlockEncoderAlgo);
-    meta.setEncodingInCache(dataBlockEncoderAlgo);
+    HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
+        .withChecksumType(CKTYPE)
+        .withBytesPerCheckSum(CKBYTES)
+        .withDataBlockEncodingInCache(dataBlockEncoderAlgo)
+        .withDataBlockEncodingOnDisk(dataBlockEncoderAlgo)
+        .build();
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
             .withFilePath(path)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
Wed Oct  9 11:21:03 2013
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;
@@ -327,7 +328,7 @@ public class TestWALReplay {
     HLog wal = createWAL(this.conf);
     HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf);
     Path f =  new Path(basedir, "hfile");
-    HFileContext context = new HFileContext();
+    HFileContext context = new HFileContextBuilder().build();
     HFile.Writer writer =
     HFile.getWriterFactoryNoCache(conf).withPath(fs, f)
         .withFileContext(context).create();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1530567&r1=1530566&r2=1530567&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
Wed Oct  9 11:21:03 2013
@@ -68,6 +68,7 @@ import org.apache.hadoop.hbase.coprocess
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -843,7 +844,7 @@ public class TestAccessController extend
       HFile.Writer writer = null;
       long now = System.currentTimeMillis();
       try {
-        HFileContext context = new HFileContext();
+        HFileContext context = new HFileContextBuilder().build();
         writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
             .withPath(fs, path)
             .withFileContext(context)



Mime
View raw message