Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id D2981200C3E for ; Tue, 21 Mar 2017 17:36:00 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id D11C3160B9D; Tue, 21 Mar 2017 16:36:00 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id F205F160B81 for ; Tue, 21 Mar 2017 17:35:58 +0100 (CET) Received: (qmail 10112 invoked by uid 500); 21 Mar 2017 16:35:58 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 10091 invoked by uid 99); 21 Mar 2017 16:35:58 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 21 Mar 2017 16:35:58 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id DF10BDFF36; Tue, 21 Mar 2017 16:35:57 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: misty@apache.org To: commits@hbase.apache.org Date: Tue, 21 Mar 2017 16:35:58 -0000 Message-Id: <67d4ed219cf640d98c62c56b015b7597@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [02/52] [partial] hbase-site git commit: Published site at 1cfd22bf43c9b64afae35d9bf16f764d0da80cab. archived-at: Tue, 21 Mar 2017 16:36:01 -0000 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html index 608d19a..f42fb90 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html @@ -606,119 +606,118 @@ 598 Path snapshotDir = SnapshotDescriptionUtils.getSnapshotsDir(rootDir); 599 FileStatus[] snapshots = fs.listStatus(snapshotDir, 600 new SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter(fs)); -601 List<SnapshotDescription> snapshotLists = -602 new ArrayList<SnapshotDescription>(snapshots.length); -603 for (FileStatus snapshotDirStat: snapshots) { -604 HBaseProtos.SnapshotDescription snapshotDesc = -605 SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDirStat.getPath()); -606 snapshotLists.add(ProtobufUtil.createSnapshotDesc(snapshotDesc)); -607 } -608 return snapshotLists; -609 } -610 -611 /** -612 * Gets the store files map for snapshot -613 * @param conf the {@link Configuration} to use -614 * @param snapshot {@link SnapshotDescription} to get stats from -615 * @param exec the {@link ExecutorService} to use -616 * @param filesMap {@link Map} the map to put the mapping entries -617 * @param uniqueHFilesArchiveSize {@link AtomicLong} the accumulated store file size in archive -618 * @param uniqueHFilesSize {@link AtomicLong} the accumulated store file size shared -619 * @param uniqueHFilesMobSize {@link AtomicLong} the accumulated mob store file size shared -620 * @return the snapshot stats -621 */ -622 private static void getSnapshotFilesMap(final Configuration conf, -623 final SnapshotDescription snapshot, final ExecutorService exec, -624 final ConcurrentHashMap<Path, Integer> filesMap, -625 final AtomicLong uniqueHFilesArchiveSize, final AtomicLong uniqueHFilesSize, -626 final AtomicLong uniqueHFilesMobSize) throws IOException { -627 HBaseProtos.SnapshotDescription snapshotDesc = -628 ProtobufUtil.createHBaseProtosSnapshotDesc(snapshot); -629 Path rootDir = FSUtils.getRootDir(conf); -630 final FileSystem fs = FileSystem.get(rootDir.toUri(), conf); -631 -632 Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotDesc, rootDir); -633 SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, snapshotDesc); -634 SnapshotReferenceUtil.concurrentVisitReferencedFiles(conf, fs, manifest, exec, -635 new SnapshotReferenceUtil.SnapshotVisitor() { -636 @Override public void storeFile(final HRegionInfo regionInfo, final String family, -637 final SnapshotRegionManifest.StoreFile storeFile) throws IOException { -638 if (!storeFile.hasReference()) { -639 HFileLink link = HFileLink.build(conf, snapshot.getTableName(), -640 regionInfo.getEncodedName(), family, storeFile.getName()); -641 long size; -642 Integer count; -643 Path p; -644 AtomicLong al; -645 int c = 0; -646 -647 if (fs.exists(link.getArchivePath())) { -648 p = link.getArchivePath(); -649 al = uniqueHFilesArchiveSize; -650 size = fs.getFileStatus(p).getLen(); -651 } else if (fs.exists(link.getMobPath())) { -652 p = link.getMobPath(); -653 al = uniqueHFilesMobSize; -654 size = fs.getFileStatus(p).getLen(); -655 } else { -656 p = link.getOriginPath(); -657 al = uniqueHFilesSize; -658 size = link.getFileStatus(fs).getLen(); -659 } -660 -661 // If it has been counted, do not double count -662 count = filesMap.get(p); -663 if (count != null) { -664 c = count.intValue(); -665 } else { -666 al.addAndGet(size); -667 } -668 -669 filesMap.put(p, ++c); -670 } -671 } -672 }); -673 } -674 -675 /** -676 * Returns the map of store files based on path for all snapshots -677 * @param conf the {@link Configuration} to use -678 * @param uniqueHFilesArchiveSize pass out the size for store files in archive -679 * @param uniqueHFilesSize pass out the size for store files shared -680 * @param uniqueHFilesMobSize pass out the size for mob store files shared -681 * @return the map of store files -682 */ -683 public static Map<Path, Integer> getSnapshotsFilesMap(final Configuration conf, -684 AtomicLong uniqueHFilesArchiveSize, AtomicLong uniqueHFilesSize, -685 AtomicLong uniqueHFilesMobSize) throws IOException { -686 List<SnapshotDescription> snapshotList = getSnapshotList(conf); +601 List<SnapshotDescription> snapshotLists = new ArrayList<>(snapshots.length); +602 for (FileStatus snapshotDirStat: snapshots) { +603 HBaseProtos.SnapshotDescription snapshotDesc = +604 SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDirStat.getPath()); +605 snapshotLists.add(ProtobufUtil.createSnapshotDesc(snapshotDesc)); +606 } +607 return snapshotLists; +608 } +609 +610 /** +611 * Gets the store files map for snapshot +612 * @param conf the {@link Configuration} to use +613 * @param snapshot {@link SnapshotDescription} to get stats from +614 * @param exec the {@link ExecutorService} to use +615 * @param filesMap {@link Map} the map to put the mapping entries +616 * @param uniqueHFilesArchiveSize {@link AtomicLong} the accumulated store file size in archive +617 * @param uniqueHFilesSize {@link AtomicLong} the accumulated store file size shared +618 * @param uniqueHFilesMobSize {@link AtomicLong} the accumulated mob store file size shared +619 * @return the snapshot stats +620 */ +621 private static void getSnapshotFilesMap(final Configuration conf, +622 final SnapshotDescription snapshot, final ExecutorService exec, +623 final ConcurrentHashMap<Path, Integer> filesMap, +624 final AtomicLong uniqueHFilesArchiveSize, final AtomicLong uniqueHFilesSize, +625 final AtomicLong uniqueHFilesMobSize) throws IOException { +626 HBaseProtos.SnapshotDescription snapshotDesc = +627 ProtobufUtil.createHBaseProtosSnapshotDesc(snapshot); +628 Path rootDir = FSUtils.getRootDir(conf); +629 final FileSystem fs = FileSystem.get(rootDir.toUri(), conf); +630 +631 Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotDesc, rootDir); +632 SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, snapshotDesc); +633 SnapshotReferenceUtil.concurrentVisitReferencedFiles(conf, fs, manifest, exec, +634 new SnapshotReferenceUtil.SnapshotVisitor() { +635 @Override public void storeFile(final HRegionInfo regionInfo, final String family, +636 final SnapshotRegionManifest.StoreFile storeFile) throws IOException { +637 if (!storeFile.hasReference()) { +638 HFileLink link = HFileLink.build(conf, snapshot.getTableName(), +639 regionInfo.getEncodedName(), family, storeFile.getName()); +640 long size; +641 Integer count; +642 Path p; +643 AtomicLong al; +644 int c = 0; +645 +646 if (fs.exists(link.getArchivePath())) { +647 p = link.getArchivePath(); +648 al = uniqueHFilesArchiveSize; +649 size = fs.getFileStatus(p).getLen(); +650 } else if (fs.exists(link.getMobPath())) { +651 p = link.getMobPath(); +652 al = uniqueHFilesMobSize; +653 size = fs.getFileStatus(p).getLen(); +654 } else { +655 p = link.getOriginPath(); +656 al = uniqueHFilesSize; +657 size = link.getFileStatus(fs).getLen(); +658 } +659 +660 // If it has been counted, do not double count +661 count = filesMap.get(p); +662 if (count != null) { +663 c = count.intValue(); +664 } else { +665 al.addAndGet(size); +666 } +667 +668 filesMap.put(p, ++c); +669 } +670 } +671 }); +672 } +673 +674 /** +675 * Returns the map of store files based on path for all snapshots +676 * @param conf the {@link Configuration} to use +677 * @param uniqueHFilesArchiveSize pass out the size for store files in archive +678 * @param uniqueHFilesSize pass out the size for store files shared +679 * @param uniqueHFilesMobSize pass out the size for mob store files shared +680 * @return the map of store files +681 */ +682 public static Map<Path, Integer> getSnapshotsFilesMap(final Configuration conf, +683 AtomicLong uniqueHFilesArchiveSize, AtomicLong uniqueHFilesSize, +684 AtomicLong uniqueHFilesMobSize) throws IOException { +685 List<SnapshotDescription> snapshotList = getSnapshotList(conf); +686 687 -688 -689 if (snapshotList.isEmpty()) { -690 return Collections.emptyMap(); -691 } -692 -693 ConcurrentHashMap<Path, Integer> fileMap = new ConcurrentHashMap<>(); -694 -695 ExecutorService exec = SnapshotManifest.createExecutor(conf, "SnapshotsFilesMapping"); -696 -697 try { -698 for (final SnapshotDescription snapshot : snapshotList) { -699 getSnapshotFilesMap(conf, snapshot, exec, fileMap, uniqueHFilesArchiveSize, -700 uniqueHFilesSize, uniqueHFilesMobSize); -701 } -702 } finally { -703 exec.shutdown(); -704 } -705 -706 return fileMap; -707 } +688 if (snapshotList.isEmpty()) { +689 return Collections.emptyMap(); +690 } +691 +692 ConcurrentHashMap<Path, Integer> fileMap = new ConcurrentHashMap<>(); +693 +694 ExecutorService exec = SnapshotManifest.createExecutor(conf, "SnapshotsFilesMapping"); +695 +696 try { +697 for (final SnapshotDescription snapshot : snapshotList) { +698 getSnapshotFilesMap(conf, snapshot, exec, fileMap, uniqueHFilesArchiveSize, +699 uniqueHFilesSize, uniqueHFilesMobSize); +700 } +701 } finally { +702 exec.shutdown(); +703 } +704 +705 return fileMap; +706 } +707 708 -709 -710 public static void main(String[] args) { -711 new SnapshotInfo().doStaticMain(args); -712 } -713} +709 public static void main(String[] args) { +710 new SnapshotInfo().doStaticMain(args); +711 } +712} http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/types/StructBuilder.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/types/StructBuilder.html b/apidocs/src-html/org/apache/hadoop/hbase/types/StructBuilder.html index e22a251..81d9266 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/types/StructBuilder.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/types/StructBuilder.html @@ -38,7 +38,7 @@ 030@InterfaceStability.Evolving 031public class StructBuilder { 032 -033 protected final List<DataType<?>> fields = new ArrayList<DataType<?>>(); +033 protected final List<DataType<?>> fields = new ArrayList<>(); 034 035 /** 036 * Create an empty {@code StructBuilder}. http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/util/ByteRangeUtils.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteRangeUtils.html b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteRangeUtils.html index f6d2601..91f2d1c 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/ByteRangeUtils.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/ByteRangeUtils.html @@ -57,7 +57,7 @@ 049 050 public static ArrayList<byte[]> copyToNewArrays(Collection<ByteRange> ranges) { 051 if (ranges == null) { -052 return new ArrayList<byte[]>(0); +052 return new ArrayList<>(0); 053 } 054 ArrayList<byte[]> arrays = Lists.newArrayListWithCapacity(ranges.size()); 055 for (ByteRange range : ranges) { @@ -68,7 +68,7 @@ 060 061 public static ArrayList<ByteRange> fromArrays(Collection<byte[]> arrays) { 062 if (arrays == null) { -063 return new ArrayList<ByteRange>(0); +063 return new ArrayList<>(0); 064 } 065 ArrayList<ByteRange> ranges = Lists.newArrayListWithCapacity(arrays.size()); 066 for (byte[] array : arrays) { http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html index 9eaa194..fb1cd2e 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Counter.html @@ -102,7 +102,7 @@ 094 } 095 096 private Counter(Cell initCell) { -097 containerRef = new AtomicReference<Container>(new Container(initCell)); +097 containerRef = new AtomicReference<>(new Container(initCell)); 098 } 099 100 private static int hash() { http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/util/EncryptionTest.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/EncryptionTest.html b/apidocs/src-html/org/apache/hadoop/hbase/util/EncryptionTest.html index b719325..580ed2a 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/EncryptionTest.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/EncryptionTest.html @@ -48,122 +48,121 @@ 040public class EncryptionTest { 041 private static final Log LOG = LogFactory.getLog(EncryptionTest.class); 042 -043 static final Map<String, Boolean> keyProviderResults = new ConcurrentHashMap<String, Boolean>(); -044 static final Map<String, Boolean> cipherProviderResults = -045 new ConcurrentHashMap<String, Boolean>(); -046 static final Map<String, Boolean> cipherResults = new ConcurrentHashMap<String, Boolean>(); -047 -048 private EncryptionTest() { -049 } -050 -051 /** -052 * Check that the configured key provider can be loaded and initialized, or -053 * throw an exception. -054 * -055 * @param conf -056 * @throws IOException -057 */ -058 public static void testKeyProvider(final Configuration conf) throws IOException { -059 String providerClassName = conf.get(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, -060 KeyStoreKeyProvider.class.getName()); -061 Boolean result = keyProviderResults.get(providerClassName); -062 if (result == null) { -063 try { -064 Encryption.getKeyProvider(conf); -065 keyProviderResults.put(providerClassName, true); -066 } catch (Exception e) { // most likely a RuntimeException -067 keyProviderResults.put(providerClassName, false); -068 throw new IOException("Key provider " + providerClassName + " failed test: " + -069 e.getMessage(), e); -070 } -071 } else if (result.booleanValue() == false) { -072 throw new IOException("Key provider " + providerClassName + " previously failed test"); -073 } -074 } -075 -076 /** -077 * Check that the configured cipher provider can be loaded and initialized, or -078 * throw an exception. -079 * -080 * @param conf -081 * @throws IOException -082 */ -083 public static void testCipherProvider(final Configuration conf) throws IOException { -084 String providerClassName = conf.get(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, -085 DefaultCipherProvider.class.getName()); -086 Boolean result = cipherProviderResults.get(providerClassName); -087 if (result == null) { -088 try { -089 Encryption.getCipherProvider(conf); -090 cipherProviderResults.put(providerClassName, true); -091 } catch (Exception e) { // most likely a RuntimeException -092 cipherProviderResults.put(providerClassName, false); -093 throw new IOException("Cipher provider " + providerClassName + " failed test: " + -094 e.getMessage(), e); -095 } -096 } else if (result.booleanValue() == false) { -097 throw new IOException("Cipher provider " + providerClassName + " previously failed test"); -098 } -099 } -100 -101 /** -102 * Check that the specified cipher can be loaded and initialized, or throw -103 * an exception. Verifies key and cipher provider configuration as a -104 * prerequisite for cipher verification. -105 * -106 * @param conf -107 * @param cipher -108 * @param key -109 * @throws IOException -110 */ -111 public static void testEncryption(final Configuration conf, final String cipher, -112 byte[] key) throws IOException { -113 if (cipher == null) { -114 return; -115 } -116 testKeyProvider(conf); -117 testCipherProvider(conf); -118 Boolean result = cipherResults.get(cipher); -119 if (result == null) { -120 try { -121 Encryption.Context context = Encryption.newContext(conf); -122 context.setCipher(Encryption.getCipher(conf, cipher)); -123 if (key == null) { -124 // Make a random key since one was not provided -125 context.setKey(context.getCipher().getRandomKey()); -126 } else { -127 // This will be a wrapped key from schema -128 context.setKey(EncryptionUtil.unwrapKey(conf, -129 conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"), -130 key)); -131 } -132 byte[] iv = null; -133 if (context.getCipher().getIvLength() > 0) { -134 iv = new byte[context.getCipher().getIvLength()]; -135 Bytes.random(iv); -136 } -137 byte[] plaintext = new byte[1024]; -138 Bytes.random(plaintext); -139 ByteArrayOutputStream out = new ByteArrayOutputStream(); -140 Encryption.encrypt(out, new ByteArrayInputStream(plaintext), context, iv); -141 byte[] ciphertext = out.toByteArray(); -142 out.reset(); -143 Encryption.decrypt(out, new ByteArrayInputStream(ciphertext), plaintext.length, -144 context, iv); -145 byte[] test = out.toByteArray(); -146 if (!Bytes.equals(plaintext, test)) { -147 throw new IOException("Did not pass encrypt/decrypt test"); -148 } -149 cipherResults.put(cipher, true); -150 } catch (Exception e) { -151 cipherResults.put(cipher, false); -152 throw new IOException("Cipher " + cipher + " failed test: " + e.getMessage(), e); -153 } -154 } else if (result.booleanValue() == false) { -155 throw new IOException("Cipher " + cipher + " previously failed test"); -156 } -157 } -158} +043 static final Map<String, Boolean> keyProviderResults = new ConcurrentHashMap<>(); +044 static final Map<String, Boolean> cipherProviderResults = new ConcurrentHashMap<>(); +045 static final Map<String, Boolean> cipherResults = new ConcurrentHashMap<>(); +046 +047 private EncryptionTest() { +048 } +049 +050 /** +051 * Check that the configured key provider can be loaded and initialized, or +052 * throw an exception. +053 * +054 * @param conf +055 * @throws IOException +056 */ +057 public static void testKeyProvider(final Configuration conf) throws IOException { +058 String providerClassName = conf.get(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, +059 KeyStoreKeyProvider.class.getName()); +060 Boolean result = keyProviderResults.get(providerClassName); +061 if (result == null) { +062 try { +063 Encryption.getKeyProvider(conf); +064 keyProviderResults.put(providerClassName, true); +065 } catch (Exception e) { // most likely a RuntimeException +066 keyProviderResults.put(providerClassName, false); +067 throw new IOException("Key provider " + providerClassName + " failed test: " + +068 e.getMessage(), e); +069 } +070 } else if (result.booleanValue() == false) { +071 throw new IOException("Key provider " + providerClassName + " previously failed test"); +072 } +073 } +074 +075 /** +076 * Check that the configured cipher provider can be loaded and initialized, or +077 * throw an exception. +078 * +079 * @param conf +080 * @throws IOException +081 */ +082 public static void testCipherProvider(final Configuration conf) throws IOException { +083 String providerClassName = conf.get(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, +084 DefaultCipherProvider.class.getName()); +085 Boolean result = cipherProviderResults.get(providerClassName); +086 if (result == null) { +087 try { +088 Encryption.getCipherProvider(conf); +089 cipherProviderResults.put(providerClassName, true); +090 } catch (Exception e) { // most likely a RuntimeException +091 cipherProviderResults.put(providerClassName, false); +092 throw new IOException("Cipher provider " + providerClassName + " failed test: " + +093 e.getMessage(), e); +094 } +095 } else if (result.booleanValue() == false) { +096 throw new IOException("Cipher provider " + providerClassName + " previously failed test"); +097 } +098 } +099 +100 /** +101 * Check that the specified cipher can be loaded and initialized, or throw +102 * an exception. Verifies key and cipher provider configuration as a +103 * prerequisite for cipher verification. +104 * +105 * @param conf +106 * @param cipher +107 * @param key +108 * @throws IOException +109 */ +110 public static void testEncryption(final Configuration conf, final String cipher, +111 byte[] key) throws IOException { +112 if (cipher == null) { +113 return; +114 } +115 testKeyProvider(conf); +116 testCipherProvider(conf); +117 Boolean result = cipherResults.get(cipher); +118 if (result == null) { +119 try { +120 Encryption.Context context = Encryption.newContext(conf); +121 context.setCipher(Encryption.getCipher(conf, cipher)); +122 if (key == null) { +123 // Make a random key since one was not provided +124 context.setKey(context.getCipher().getRandomKey()); +125 } else { +126 // This will be a wrapped key from schema +127 context.setKey(EncryptionUtil.unwrapKey(conf, +128 conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"), +129 key)); +130 } +131 byte[] iv = null; +132 if (context.getCipher().getIvLength() > 0) { +133 iv = new byte[context.getCipher().getIvLength()]; +134 Bytes.random(iv); +135 } +136 byte[] plaintext = new byte[1024]; +137 Bytes.random(plaintext); +138 ByteArrayOutputStream out = new ByteArrayOutputStream(); +139 Encryption.encrypt(out, new ByteArrayInputStream(plaintext), context, iv); +140 byte[] ciphertext = out.toByteArray(); +141 out.reset(); +142 Encryption.decrypt(out, new ByteArrayInputStream(ciphertext), plaintext.length, +143 context, iv); +144 byte[] test = out.toByteArray(); +145 if (!Bytes.equals(plaintext, test)) { +146 throw new IOException("Did not pass encrypt/decrypt test"); +147 } +148 cipherResults.put(cipher, true); +149 } catch (Exception e) { +150 cipherResults.put(cipher, false); +151 throw new IOException("Cipher " + cipher + " failed test: " + e.getMessage(), e); +152 } +153 } else if (result.booleanValue() == false) { +154 throw new IOException("Cipher " + cipher + " previously failed test"); +155 } +156 } +157} http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/util/OrderedBytes.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/OrderedBytes.html b/apidocs/src-html/org/apache/hadoop/hbase/util/OrderedBytes.html index 804d5fb..cd07094 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/OrderedBytes.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/OrderedBytes.html @@ -349,7 +349,7 @@ 341 342 /** 343 * Perform unsigned comparison between two long values. Conforms to the same interface as -344 * {@link Comparator#compare(Object, Object)}. +344 * {@link org.apache.hadoop.hbase.CellComparator#COMPARATOR#compare(Object, Object)}. 345 */ 346 private static int unsignedCmp(long x1, long x2) { 347 int cmp; http://git-wip-us.apache.org/repos/asf/hbase-site/blob/22cff34f/apidocs/src-html/org/apache/hadoop/hbase/util/Pair.html ---------------------------------------------------------------------- diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Pair.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Pair.html index 1fc3f98..dae63a9 100644 --- a/apidocs/src-html/org/apache/hadoop/hbase/util/Pair.html +++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Pair.html @@ -72,7 +72,7 @@ 064 * @return a new pair containing the passed arguments 065 */ 066 public static <T1,T2> Pair<T1,T2> newPair(T1 a, T2 b) { -067 return new Pair<T1,T2>(a, b); +067 return new Pair<>(a, b); 068 } 069 070 /**