Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 456B0DF29 for ; Fri, 16 Nov 2012 17:29:46 +0000 (UTC) Received: (qmail 12225 invoked by uid 500); 16 Nov 2012 17:29:46 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 12010 invoked by uid 500); 16 Nov 2012 17:29:45 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 11999 invoked by uid 99); 16 Nov 2012 17:29:45 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 16 Nov 2012 17:29:45 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 16 Nov 2012 17:29:41 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 9E627238897F for ; Fri, 16 Nov 2012 17:29:20 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1410496 - in /hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase: regionserver/TestStore.java util/TestCompressionTest.java Date: Fri, 16 Nov 2012 17:29:20 -0000 To: commits@hbase.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20121116172920.9E627238897F@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: stack Date: Fri Nov 16 17:29:19 2012 New Revision: 1410496 URL: http://svn.apache.org/viewvc?rev=1410496&view=rev Log: HBASE-7178 Compression tests Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1410496&r1=1410495&r2=1410496&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Fri Nov 16 17:29:19 2012 @@ -51,7 +51,10 @@ import org.apache.hadoop.hbase.HTableDes import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; +import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.wal.HLog; @@ -151,6 +154,35 @@ public class TestStore extends TestCase store = new HStore(basedir, region, hcd, fs, conf); } + /** + * Verify that compression and data block encoding are respected by the + * Store.createWriterInTmp() method, used on store flush. + */ + public void testCreateWriter() throws Exception { + Configuration conf = HBaseConfiguration.create(); + FileSystem fs = FileSystem.get(conf); + + HColumnDescriptor hcd = new HColumnDescriptor(family); + hcd.setCompressionType(Compression.Algorithm.GZ); + hcd.setDataBlockEncoding(DataBlockEncoding.DIFF); + init(getName(), conf, hcd); + + // Test createWriterInTmp() + StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false); + Path path = writer.getPath(); + writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1))); + writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2))); + writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3))); + writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4))); + writer.close(); + + // Verify that compression and encoding settings are respected + HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf)); + assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm()); + assertEquals(hcd.getDataBlockEncoding(), reader.getEncodingOnDisk()); + reader.close(); + } + public void testDeleteExpiredStoreFiles() throws Exception { int storeFileNum = 4; int ttl = 4; Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java?rev=1410496&r1=1410495&r2=1410496&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java Fri Nov 16 17:29:19 2012 @@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.util; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.io.compress.Compression; @@ -38,10 +40,10 @@ import static org.junit.Assert.*; @Category(SmallTests.class) public class TestCompressionTest { + static final Log LOG = LogFactory.getLog(TestCompressionTest.class); @Test - public void testTestCompression() { - + public void testExceptionCaching() { // This test will fail if you run the tests with LZO compression available. try { CompressionTest.testCompression(Compression.Algorithm.LZO); @@ -60,73 +62,81 @@ public class TestCompressionTest { assertNull(e.getCause()); } - assertFalse(CompressionTest.testCompression("LZO")); + } + + @Test + public void testTestCompression() { assertTrue(CompressionTest.testCompression("NONE")); assertTrue(CompressionTest.testCompression("GZ")); - if (isCompressionAvailable("org.apache.hadoop.io.compress.SnappyCodec")) { - if (NativeCodeLoader.isNativeCodeLoaded()) { - try { - System.loadLibrary("snappy"); + if (NativeCodeLoader.isNativeCodeLoaded()) { + nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec"); + nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec"); + nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec"); + } else { + // Hadoop nativelib is not available + LOG.debug("Native code not loaded"); + assertFalse(CompressionTest.testCompression("LZO")); + assertFalse(CompressionTest.testCompression("LZ4")); + assertFalse(CompressionTest.testCompression("SNAPPY")); + } + } + + private boolean isCompressionAvailable(String codecClassName) { + try { + Thread.currentThread().getContextClassLoader().loadClass(codecClassName); + return true; + } catch (Exception ex) { + return false; + } + } - try { + /** + * Verify CompressionTest.testCompression() on a native codec. + */ + private void nativeCodecTest(String codecName, String libName, String codecClassName) { + if (isCompressionAvailable(codecClassName)) { + try { + if (libName != null) { + System.loadLibrary(libName); + } + + try { Configuration conf = new Configuration(); CompressionCodec codec = (CompressionCodec) - ReflectionUtils.newInstance( - conf.getClassByName("org.apache.hadoop.io.compress.SnappyCodec"), conf); + ReflectionUtils.newInstance(conf.getClassByName(codecClassName), conf); DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); - CompressionOutputStream deflateFilter = - codec.createOutputStream(compressedDataBuffer); + CompressionOutputStream deflateFilter = codec.createOutputStream(compressedDataBuffer); byte[] data = new byte[1024]; - DataOutputStream deflateOut = new DataOutputStream( - new BufferedOutputStream(deflateFilter)); + DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter)); deflateOut.write(data, 0, data.length); deflateOut.flush(); deflateFilter.finish(); - // Snappy Codec class, Snappy nativelib and Hadoop nativelib with - // Snappy JNIs are present - assertTrue(CompressionTest.testCompression("SNAPPY")); - } - catch (UnsatisfiedLinkError ex) { - // Hadoop nativelib does not have Snappy JNIs - - // cannot assert the codec here because the current logic of - // CompressionTest checks only classloading, not the codec - // usage. - } - catch (Exception ex) { - } + // Codec class, codec nativelib and Hadoop nativelib with codec JNIs are present + assertTrue(CompressionTest.testCompression(codecName)); + } catch (UnsatisfiedLinkError e) { + // Hadoop nativelib does not have codec JNIs. + // cannot assert the codec here because the current logic of + // CompressionTest checks only classloading, not the codec + // usage. + LOG.debug("No JNI for codec '" + codecName + "' " + e.getMessage()); + } catch (Exception e) { + LOG.error(codecName, e); } - catch (UnsatisfiedLinkError ex) { - // Snappy nativelib is not available - assertFalse(CompressionTest.testCompression("SNAPPY")); - } - } - else { - // Hadoop nativelib is not available - assertFalse(CompressionTest.testCompression("SNAPPY")); + } catch (UnsatisfiedLinkError e) { + // nativelib is not available + LOG.debug("Native lib not available: " + codecName); + assertFalse(CompressionTest.testCompression(codecName)); } - } - else { - // Snappy Codec class is not available - assertFalse(CompressionTest.testCompression("SNAPPY")); - } - } - - private boolean isCompressionAvailable(String codecClassName) { - try { - Thread.currentThread().getContextClassLoader().loadClass(codecClassName); - return true; - } - catch (Exception ex) { - return false; + } else { + // Compression Codec class is not available + LOG.debug("Codec class not available: " + codecName); + assertFalse(CompressionTest.testCompression(codecName)); } } - - }