hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r1545536 [2/4] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/security/ hbase-client/src/test/java/org/apache/hadoop/hbase/security/ hbase-common/src/main/java/org/a...
Date Tue, 26 Nov 2013 04:22:35 GMT
Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java Tue Nov 26 04:22:33 2013
@@ -18,15 +18,22 @@ package org.apache.hadoop.hbase.io.encod
 
 import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE;
 
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.security.SecureRandom;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.TagCompressionContext;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.crypto.Cipher;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.Encryptor;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
 import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.apache.hadoop.io.compress.Compressor;
 
@@ -42,29 +49,35 @@ import com.google.common.base.Preconditi
 @InterfaceAudience.Private
 public class HFileBlockDefaultEncodingContext implements
     HFileBlockEncodingContext {
-
   private byte[] onDiskBytesWithHeader;
   private byte[] uncompressedBytesWithHeader;
   private BlockType blockType;
   private final DataBlockEncoding encodingAlgo;
 
+  private ByteArrayOutputStream encodedStream = new ByteArrayOutputStream();
+  private DataOutputStream dataOut = new DataOutputStream(encodedStream);
+
+  private byte[] dummyHeader;
+
+  // Compression state
+
   /** Compressor, which is also reused between consecutive blocks. */
   private Compressor compressor;
-
   /** Compression output stream */
   private CompressionOutputStream compressionStream;
-
   /** Underlying stream to write compressed bytes to */
   private ByteArrayOutputStream compressedByteStream;
 
-  private ByteArrayOutputStream encodedStream = new ByteArrayOutputStream();
-  private DataOutputStream dataOut = new DataOutputStream(encodedStream);
-
-  private byte[] dummyHeader;
-
   private HFileContext fileContext;
   private TagCompressionContext tagCompressionContext;
 
+  // Encryption state
+
+  /** Underlying stream to write encrypted bytes to */
+  private ByteArrayOutputStream cryptoByteStream;
+  /** Initialization vector */
+  private byte[] iv;
+
   /**
    * @param encoding encoding used
    * @param headerBytes dummy header bytes
@@ -73,9 +86,9 @@ public class HFileBlockDefaultEncodingCo
   public HFileBlockDefaultEncodingContext(DataBlockEncoding encoding, byte[] headerBytes,
       HFileContext fileContext) {
     this.encodingAlgo = encoding;
+    this.fileContext = fileContext;
     Compression.Algorithm compressionAlgorithm =
         fileContext.getCompression() == null ? NONE : fileContext.getCompression();
-    this.fileContext = fileContext;
     if (compressionAlgorithm != NONE) {
       compressor = compressionAlgorithm.getCompressor();
       compressedByteStream = new ByteArrayOutputStream();
@@ -89,6 +102,14 @@ public class HFileBlockDefaultEncodingCo
                 + compressionAlgorithm, e);
       }
     }
+
+    Encryption.Context cryptoContext = fileContext.getEncryptionContext();
+    if (cryptoContext != Encryption.Context.NONE) {
+      cryptoByteStream = new ByteArrayOutputStream();
+      iv = new byte[cryptoContext.getCipher().getIvLength()];
+      new SecureRandom().nextBytes(iv);
+    }
+
     dummyHeader = Preconditions.checkNotNull(headerBytes,
       "Please pass HConstants.HFILEBLOCK_DUMMY_HEADER instead of null for param headerBytes");
   }
@@ -138,20 +159,91 @@ public class HFileBlockDefaultEncodingCo
   protected void compressAfterEncoding(byte[] uncompressedBytesWithHeader,
       BlockType blockType, byte[] headerBytes) throws IOException {
     this.uncompressedBytesWithHeader = uncompressedBytesWithHeader;
-    if (this.fileContext.getCompression() != NONE) {
-      compressedByteStream.reset();
-      compressedByteStream.write(headerBytes);
-      compressionStream.resetState();
-      compressionStream.write(uncompressedBytesWithHeader,
+
+    Encryption.Context cryptoContext = fileContext.getEncryptionContext();
+    if (cryptoContext != Encryption.Context.NONE) {
+
+      // Encrypted block format:
+      // +--------------------------+
+      // | vint plaintext length    |
+      // +--------------------------+
+      // | vint iv length           |
+      // +--------------------------+
+      // | iv data ...              |
+      // +--------------------------+
+      // | encrypted block data ... |
+      // +--------------------------+
+
+      cryptoByteStream.reset();
+      // Write the block header (plaintext)
+      cryptoByteStream.write(headerBytes);
+
+      InputStream in;
+      int plaintextLength;
+      // Run any compression before encryption
+      if (fileContext.getCompression() != Compression.Algorithm.NONE) {
+        compressedByteStream.reset();
+        compressionStream.resetState();
+        compressionStream.write(uncompressedBytesWithHeader,
+            headerBytes.length, uncompressedBytesWithHeader.length - headerBytes.length);
+        compressionStream.flush();
+        compressionStream.finish();
+        byte[] plaintext = compressedByteStream.toByteArray();
+        plaintextLength = plaintext.length;
+        in = new ByteArrayInputStream(plaintext);
+      } else {
+        plaintextLength = uncompressedBytesWithHeader.length - headerBytes.length;
+        in = new ByteArrayInputStream(uncompressedBytesWithHeader,
+          headerBytes.length, plaintextLength);
+      }
+
+      if (plaintextLength > 0) {
+
+        Cipher cipher = cryptoContext.getCipher();
+        Encryptor encryptor = cipher.getEncryptor();
+        encryptor.setKey(cryptoContext.getKey());
+
+        // Write the encryption header and IV (plaintext)
+        int ivLength = iv.length;
+        StreamUtils.writeRawVInt32(cryptoByteStream, plaintextLength);
+        StreamUtils.writeRawVInt32(cryptoByteStream, ivLength);
+        if (ivLength > 0) {
+          Encryption.incrementIv(iv);
+          encryptor.setIv(iv);
+          cryptoByteStream.write(iv);
+        }
+
+        // Write the block contents (ciphertext)
+        Encryption.encrypt(cryptoByteStream, in, encryptor);
+
+        onDiskBytesWithHeader = cryptoByteStream.toByteArray();
+
+      } else {
+
+        StreamUtils.writeRawVInt32(cryptoByteStream, 0);
+        StreamUtils.writeRawVInt32(cryptoByteStream, 0);
+        onDiskBytesWithHeader = cryptoByteStream.toByteArray();
+
+      }
+
+    } else {
+
+      if (this.fileContext.getCompression() != NONE) {
+        compressedByteStream.reset();
+        compressedByteStream.write(headerBytes);
+        compressionStream.resetState();
+        compressionStream.write(uncompressedBytesWithHeader,
           headerBytes.length, uncompressedBytesWithHeader.length
               - headerBytes.length);
+        compressionStream.flush();
+        compressionStream.finish();
+        onDiskBytesWithHeader = compressedByteStream.toByteArray();
+      } else {
+        onDiskBytesWithHeader = uncompressedBytesWithHeader;
+      }
 
-      compressionStream.flush();
-      compressionStream.finish();
-      onDiskBytesWithHeader = compressedByteStream.toByteArray();
-    } else {
-      onDiskBytesWithHeader = uncompressedBytesWithHeader;
     }
+
     this.blockType = blockType;
   }
 

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java Tue Nov 26 04:22:33 2013
@@ -19,7 +19,8 @@ package org.apache.hadoop.hbase.io.hfile
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.io.HeapSize;
-import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -43,7 +44,7 @@ public class HFileContext implements Hea
   /**Whether tags are to be included in the Read/Write**/
   private boolean includesTags;
   /**Compression algorithm used**/
-  private Algorithm compressAlgo = Algorithm.NONE;
+  private Compression.Algorithm compressAlgo = Compression.Algorithm.NONE;
   /** Whether tags to be compressed or not**/
   private boolean compressTags;
   /** the checksum type **/
@@ -53,10 +54,13 @@ public class HFileContext implements Hea
   /** Number of uncompressed bytes we allow per block. */
   private int blocksize = HConstants.DEFAULT_BLOCKSIZE;
   private DataBlockEncoding encoding = DataBlockEncoding.NONE;
+  /** Encryption algorithm and key used */
+  private Encryption.Context cryptoContext = Encryption.Context.NONE;
 
   //Empty constructor.  Go with setters
   public HFileContext() {
   }
+
   /**
    * Copy constructor
    * @param context
@@ -71,11 +75,13 @@ public class HFileContext implements Hea
     this.bytesPerChecksum = context.bytesPerChecksum;
     this.blocksize = context.blocksize;
     this.encoding = context.encoding;
+    this.cryptoContext = context.cryptoContext;
   }
 
   public HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags,
-      Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
-      int bytesPerChecksum, int blockSize, DataBlockEncoding encoding) {
+      Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
+      int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
+      Encryption.Context cryptoContext) {
     this.usesHBaseChecksum = useHBaseChecksum;
     this.includesMvcc =  includesMvcc;
     this.includesTags = includesTags;
@@ -87,12 +93,17 @@ public class HFileContext implements Hea
     if (encoding != null) {
       this.encoding = encoding;
     }
+    this.cryptoContext = cryptoContext;
   }
 
-  public Algorithm getCompression() {
+  public Compression.Algorithm getCompression() {
     return compressAlgo;
   }
 
+  public void setCompresssion(Compression.Algorithm compressAlgo) {
+    this.compressAlgo = compressAlgo;
+  }
+
   public boolean isUseHBaseChecksum() {
     return usesHBaseChecksum;
   }
@@ -137,6 +148,18 @@ public class HFileContext implements Hea
     return encoding;
   }
 
+  public void setDataBlockEncoding(DataBlockEncoding encoding) {
+    this.encoding = encoding;
+  }
+
+  public Encryption.Context getEncryptionContext() {
+    return cryptoContext;
+  }
+
+  public void setEncryptionContext(Encryption.Context cryptoContext) {
+    this.cryptoContext = cryptoContext;
+  }
+
   /**
    * HeapSize implementation
    * NOTE : The heapsize should be altered as and when new state variable are added
@@ -145,8 +168,8 @@ public class HFileContext implements Hea
   @Override
   public long heapSize() {
     long size = ClassSize.align(ClassSize.OBJECT +
-        // Algorithm reference, encoding, checksumtype
-        3 * ClassSize.REFERENCE +
+        // Algorithm reference, encodingon, checksumtype, Encryption.Context reference
+        4 * ClassSize.REFERENCE +
         2 * Bytes.SIZEOF_INT +
         // usesHBaseChecksum, includesMvcc, includesTags and compressTags
         4 * Bytes.SIZEOF_BOOLEAN);
@@ -161,4 +184,23 @@ public class HFileContext implements Hea
       throw new AssertionError(); // Won't happen
     }
   }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("HFileContext [");
+    sb.append(" usesHBaseChecksum="); sb.append(usesHBaseChecksum);
+    sb.append(" checksumType=");      sb.append(checksumType);
+    sb.append(" bytesPerChecksum=");  sb.append(bytesPerChecksum);
+    sb.append(" blocksize=");         sb.append(blocksize);
+    sb.append(" encoding=");          sb.append(encoding);
+    sb.append(" includesMvcc=");      sb.append(includesMvcc);
+    sb.append(" includesTags=");      sb.append(includesTags);
+    sb.append(" compressAlgo=");      sb.append(compressAlgo);
+    sb.append(" compressTags=");      sb.append(compressTags);
+    sb.append(" cryptoContext=[ ");   sb.append(cryptoContext);      sb.append(" ]");
+    sb.append(" ]");
+    return sb.toString();
+  }
+
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java Tue Nov 26 04:22:33 2013
@@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.io.hfile
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.util.ChecksumType;
+
 /**
  * A builder that helps in building up the HFileContext 
  */
@@ -48,6 +50,8 @@ public class HFileContextBuilder {
   /** Number of uncompressed bytes we allow per block. */
   private int blocksize = HConstants.DEFAULT_BLOCKSIZE;
   private DataBlockEncoding encoding = DataBlockEncoding.NONE;
+  /** Crypto context */
+  private Encryption.Context cryptoContext = Encryption.Context.NONE;
 
   public HFileContextBuilder withHBaseCheckSum(boolean useHBaseCheckSum) {
     this.usesHBaseChecksum = useHBaseCheckSum;
@@ -94,8 +98,13 @@ public class HFileContextBuilder {
     return this;
   }
 
+  public HFileContextBuilder withEncryptionContext(Encryption.Context cryptoContext) {
+    this.cryptoContext = cryptoContext;
+    return this;
+  }
+
   public HFileContext build() {
     return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression,
-      compressTags, checksumType, bytesPerChecksum, blocksize, encoding);
+      compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext);
   }
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java Tue Nov 26 04:22:33 2013
@@ -1984,4 +1984,32 @@ public class Bytes {
     RNG.nextBytes(buf);
     System.arraycopy(buf, 0, b, offset, length);
   }
+
+  /**
+   * Convert a byte array into a hex string
+   * @param b
+   */
+  public static String toHex(byte[] b) {
+    checkArgument(b.length > 0, "length must be greater than 0");
+    return String.format("%x", new BigInteger(1, b));
+  }
+
+  /**
+   * Create a byte array from a string of hash digits. The length of the
+   * string must be a multiple of 2
+   * @param hex
+   */
+  public static byte[] fromHex(String hex) {
+    checkArgument(hex.length() > 0, "length must be greater than 0");
+    checkArgument(hex.length() % 2 == 0, "length must be a multiple of 2");
+    // Make sure letters are upper case
+    hex = hex.toUpperCase();
+    byte[] b = new byte[hex.length() / 2];
+    for (int i = 0; i < b.length; i++) {
+      b[i] = (byte)((toBinaryFromHex((byte)hex.charAt(2 * i)) << 4) +
+        toBinaryFromHex((byte)hex.charAt((2 * i + 1))));
+    }
+    return b;
+  }
+
 }

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import java.security.Key;
+import javax.crypto.spec.SecretKeySpec;
+
+/**
+ * Return a fixed secret key for AES for testing.
+ */
+public class KeyProviderForTesting implements KeyProvider {
+
+  @Override
+  public void init(String parameters) { }
+
+  @Override
+  public Key getKey(String name) {
+    return new SecretKeySpec(Encryption.hash128(name), "AES");
+  }
+
+  @Override
+  public Key[] getKeys(String[] aliases) {
+    Key[] result = new Key[aliases.length];
+    for (int i = 0; i < aliases.length; i++) {
+      result[i] = new SecretKeySpec(Encryption.hash128(aliases[i]), "AES");;
+    }
+    return result;
+  }
+
+}

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.security.Key;
+import java.util.Arrays;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestCipherProvider {
+
+  public static class MyCipherProvider implements CipherProvider {
+    private Configuration conf;
+    @Override
+    public Configuration getConf() {
+      return conf;
+    }
+
+    @Override
+    public void setConf(Configuration conf) {
+      this.conf = conf;
+    }
+
+    @Override
+    public String getName() {
+      return MyCipherProvider.class.getName();
+    }
+
+    @Override
+    public String[] getSupportedCiphers() {
+      return new String[] { "TEST" };
+    }
+
+    @Override
+    public Cipher getCipher(String name) {
+      if (name.equals("TEST")) {
+        return new Cipher(this) {
+          @Override
+          public String getName() {
+            return "TEST";
+          }
+
+          @Override
+          public int getKeyLength() {
+            return 0;
+          }
+
+          @Override
+          public int getIvLength() {
+            return 0;
+          }
+
+          @Override
+          public Key getRandomKey() {
+            return null;
+          }
+
+          @Override
+          public Encryptor getEncryptor() {
+            return null;
+          }
+
+          @Override
+          public Decryptor getDecryptor() {
+            return null;
+          }
+
+          @Override
+          public OutputStream createEncryptionStream(OutputStream out, Context context, byte[] iv)
+              throws IOException {
+            return null;
+          }
+
+          @Override
+          public OutputStream createEncryptionStream(OutputStream out, Encryptor encryptor)
+              throws IOException {
+            return null;
+          }
+
+          @Override
+          public InputStream createDecryptionStream(InputStream in, Context context, byte[] iv)
+              throws IOException {
+            return null;
+          }
+
+          @Override
+          public InputStream createDecryptionStream(InputStream in, Decryptor decryptor)
+              throws IOException {
+            return null;
+          }
+        };
+      }
+      return null;
+    }
+  }
+
+  @Test
+  public void testCustomProvider() {
+    Configuration conf = HBaseConfiguration.create();
+    conf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, MyCipherProvider.class.getName());
+    CipherProvider provider = Encryption.getCipherProvider(conf);
+    assertTrue(provider instanceof MyCipherProvider);
+    assertTrue(Arrays.asList(provider.getSupportedCiphers()).contains("TEST"));
+    Cipher a = Encryption.getCipher(conf, "TEST");
+    assertNotNull(a);
+    assertTrue(a.getProvider() instanceof MyCipherProvider);
+    assertEquals(a.getName(), "TEST");
+    assertEquals(a.getKeyLength(), 0);
+  }
+
+  @Test
+  public void testDefaultProvider() {
+    Configuration conf = HBaseConfiguration.create();
+    CipherProvider provider = Encryption.getCipherProvider(conf);
+    assertTrue(provider instanceof DefaultCipherProvider);
+    assertTrue(Arrays.asList(provider.getSupportedCiphers()).contains("AES"));
+    Cipher a = Encryption.getCipher(conf, "AES");
+    assertNotNull(a);
+    assertTrue(a.getProvider() instanceof DefaultCipherProvider);
+    assertEquals(a.getName(), "AES");
+    assertEquals(a.getKeyLength(), AES.KEY_LENGTH);
+  }
+
+}

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.security.Key;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestEncryption {
+
+  private static final Log LOG = LogFactory.getLog(TestEncryption.class);
+
+  @Test
+  public void testSmallBlocks() throws Exception {
+    byte[] key = new byte[16];
+    Bytes.random(key);
+    byte[] iv = new byte[16];
+    Bytes.random(iv);
+    for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 } ) {
+      checkTransformSymmetry(key, iv, getRandomBlock(size));
+    }
+  }
+
+  @Test
+  public void testLargeBlocks() throws Exception {
+    byte[] key = new byte[16];
+    Bytes.random(key);
+    byte[] iv = new byte[16];
+    Bytes.random(iv);
+    for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 } ) {
+      checkTransformSymmetry(key, iv, getRandomBlock(size));
+    }
+  }
+
+  @Test
+  public void testOddSizedBlocks() throws Exception {
+    byte[] key = new byte[16];
+    Bytes.random(key);
+    byte[] iv = new byte[16];
+    Bytes.random(iv);
+    for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 } ) {
+      checkTransformSymmetry(key, iv, getRandomBlock(size));
+    }
+  }
+
+  @Test
+  public void testTypicalHFileBlocks() throws Exception {
+    byte[] key = new byte[16];
+    Bytes.random(key);
+    byte[] iv = new byte[16];
+    Bytes.random(iv);
+    for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 } ) {
+      checkTransformSymmetry(key, iv, getRandomBlock(size));
+    }
+  }
+
+  private void checkTransformSymmetry(byte[] keyBytes, byte[] iv, byte[] plaintext)
+      throws Exception {
+    LOG.info("checkTransformSymmetry: AES, plaintext length = " + plaintext.length);
+
+    Configuration conf = HBaseConfiguration.create();
+    Cipher aes = Encryption.getCipher(conf, "AES");
+    Key key = new SecretKeySpec(keyBytes, "AES");
+
+    Encryptor e = aes.getEncryptor();
+    e.setKey(key);
+    e.setIv(iv);
+    e.reset();
+    ByteArrayOutputStream encOut = new ByteArrayOutputStream();
+    Encryption.encrypt(encOut, plaintext, 0, plaintext.length, e);
+    byte[] encrypted = encOut.toByteArray();
+
+    Decryptor d = aes.getDecryptor();
+    d.setKey(key);
+    d.setIv(iv);
+    d.reset();
+    ByteArrayInputStream encIn = new ByteArrayInputStream(encrypted);
+    ByteArrayOutputStream decOut = new ByteArrayOutputStream();
+    Encryption.decrypt(decOut, encIn, plaintext.length, d);
+
+    byte[] result = decOut.toByteArray();
+    assertEquals("Decrypted result has different length than plaintext",
+      result.length, plaintext.length);
+    assertTrue("Transformation was not symmetric",
+      Bytes.equals(result, plaintext));
+  }
+
+  private byte[] getRandomBlock(int size) {
+    byte[] b = new byte[size];
+    Bytes.random(b);
+    return b;
+  }
+
+}

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.security.Key;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestKeyProvider {
+
+  @Test
+  public void testTestProvider() {
+    Configuration conf = HBaseConfiguration.create();
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    KeyProvider provider = Encryption.getKeyProvider(conf);
+    assertNotNull("Null returned for provider", provider);
+    assertTrue("Provider is not the expected type", provider instanceof KeyProviderForTesting);
+
+    Key key = provider.getKey("foo");
+    assertNotNull("Test provider did not return a key as expected", key);
+    assertEquals("Test provider did not create a key for AES", key.getAlgorithm(), "AES");
+    assertEquals("Test provider did not create a key of adequate length",
+      key.getEncoded().length, AES.KEY_LENGTH);
+  }
+
+}

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.net.URLEncoder;
+import java.security.Key;
+import java.security.KeyStore;
+import java.security.MessageDigest;
+import java.util.Properties;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.SmallTests;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestKeyStoreKeyProvider {
+
+  static final Log LOG = LogFactory.getLog(TestKeyStoreKeyProvider.class);
+  static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
+  static final String ALIAS = "test";
+  static final String PASSWORD = "password";
+
+  static byte[] KEY;
+  static File storeFile;
+  static File passwordFile;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes());
+    // Create a JKECS store containing a test secret key
+    KeyStore store = KeyStore.getInstance("JCEKS");
+    store.load(null, PASSWORD.toCharArray());
+    store.setEntry(ALIAS,
+      new KeyStore.SecretKeyEntry(new SecretKeySpec(KEY, "AES")),
+      new KeyStore.PasswordProtection(PASSWORD.toCharArray()));
+    // Create the test directory
+    String dataDir = TEST_UTIL.getDataTestDir().toString();
+    new File(dataDir).mkdirs();
+    // Write the keystore file
+    storeFile = new File(dataDir, "keystore.jks");
+    FileOutputStream os = new FileOutputStream(storeFile);
+    try {
+      store.store(os, PASSWORD.toCharArray());
+    } finally {
+      os.close();
+    }
+    // Write the password file
+    Properties p = new Properties();
+    p.setProperty(ALIAS, PASSWORD);
+    passwordFile = new File(dataDir, "keystore.pw");
+    os = new FileOutputStream(passwordFile);
+    try {
+      p.store(os, "");
+    } finally {
+      os.close();
+    }
+  }
+
+  @Test(timeout=30000)
+  public void testKeyStoreKeyProviderWithPassword() throws Exception {
+    KeyProvider provider = new KeyStoreKeyProvider();
+    provider.init("jceks://" + storeFile.getAbsolutePath() + "?password=" + PASSWORD);
+    Key key = provider.getKey(ALIAS);
+    assertNotNull(key);
+    byte[] keyBytes = key.getEncoded();
+    assertEquals(keyBytes.length, KEY.length);
+    for (int i = 0; i < KEY.length; i++) {
+      assertEquals(keyBytes[i], KEY[i]);
+    }
+  }
+
+  @Test(timeout=30000)
+  public void testKeyStoreKeyProviderWithPasswordFile() throws Exception {
+    KeyProvider provider = new KeyStoreKeyProvider();
+    provider.init("jceks://" + storeFile.getAbsolutePath() + "?passwordFile=" +
+      URLEncoder.encode(passwordFile.getAbsolutePath(), "UTF-8"));
+    Key key = provider.getKey(ALIAS);
+    assertNotNull(key);
+    byte[] keyBytes = key.getEncoded();
+    assertEquals(keyBytes.length, KEY.length);
+    for (int i = 0; i < KEY.length; i++) {
+      assertEquals(keyBytes[i], KEY[i]);
+    }
+  }
+}

Added: hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java (added)
+++ hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hbase.io.crypto.aes;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.security.AccessController;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivilegedAction;
+import java.security.Provider;
+import java.security.SecureRandom;
+import java.security.SecureRandomSpi;
+import java.security.Security;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.io.crypto.Cipher;
+import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.Encryptor;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestAES {
+
+  // Validation for AES in CTR mode with a 128 bit key
+  // From NIST Special Publication 800-38A
+  @Test
+  public void testAESAlgorithm() throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    Cipher aes = Encryption.getCipher(conf, "AES");
+    assertEquals(aes.getKeyLength(), AES.KEY_LENGTH);
+    assertEquals(aes.getIvLength(), AES.IV_LENGTH);
+    Encryptor e = aes.getEncryptor();
+    e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES"));
+    e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff"));
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    OutputStream cout = e.createEncryptionStream(out);
+    cout.write(Bytes.fromHex("6bc1bee22e409f96e93d7e117393172a"));
+    cout.write(Bytes.fromHex("ae2d8a571e03ac9c9eb76fac45af8e51"));
+    cout.write(Bytes.fromHex("30c81c46a35ce411e5fbc1191a0a52ef"));
+    cout.write(Bytes.fromHex("f69f2445df4f9b17ad2b417be66c3710"));
+    cout.close();
+
+    ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
+    byte[] b = new byte[16];
+    IOUtils.readFully(in, b);
+    assertTrue("Failed #1", Bytes.equals(b, Bytes.fromHex("874d6191b620e3261bef6864990db6ce")));
+    IOUtils.readFully(in, b);
+    assertTrue("Failed #2", Bytes.equals(b, Bytes.fromHex("9806f66b7970fdff8617187bb9fffdff")));
+    IOUtils.readFully(in, b);
+    assertTrue("Failed #3", Bytes.equals(b, Bytes.fromHex("5ae4df3edbd5d35e5b4f09020db03eab")));
+    IOUtils.readFully(in, b);
+    assertTrue("Failed #4", Bytes.equals(b, Bytes.fromHex("1e031dda2fbe03d1792170a0f3009cee")));
+  }
+
+  @Test
+  public void testAlternateRNG() throws Exception {
+    Security.addProvider(new TestProvider());
+
+    Configuration conf = new Configuration();
+    conf.set(AES.RNG_ALGORITHM_KEY, "TestRNG");
+    conf.set(AES.RNG_PROVIDER_KEY, "TEST");
+    DefaultCipherProvider.getInstance().setConf(conf);
+
+    AES aes = new AES(DefaultCipherProvider.getInstance());
+    assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(),
+      "TestRNG");
+  }
+
+  static class TestProvider extends Provider {
+    private static final long serialVersionUID = 1L;
+    public TestProvider() {
+      super("TEST", 1.0, "Test provider");
+      AccessController.doPrivileged(new PrivilegedAction<Object>() {
+        public Object run() {
+          put("SecureRandom.TestRNG", TestAES.class.getName() + "$TestRNG");
+          return null;
+        }
+      });
+    }
+  }
+
+  // Must be public for instantiation by the SecureRandom SPI
+  public static class TestRNG extends SecureRandomSpi {
+    private static final long serialVersionUID = 1L;
+    private SecureRandom rng;
+
+    public TestRNG() {
+      try {
+        rng = java.security.SecureRandom.getInstance("SHA1PRNG");
+      } catch (NoSuchAlgorithmException e) {
+        fail("Unable to create SecureRandom instance");
+      }
+    }
+
+    @Override
+    protected void engineSetSeed(byte[] seed) {
+      rng.setSeed(seed);
+    }
+
+    @Override
+    protected void engineNextBytes(byte[] bytes) {
+      rng.nextBytes(bytes);
+    }
+
+    @Override
+    protected byte[] engineGenerateSeed(int numBytes) {
+      return rng.generateSeed(numBytes);
+    }
+  }
+
+}

Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.security.Key;
+import java.security.SecureRandom;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Waiter.Predicate;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
+import org.apache.hadoop.hbase.io.hfile.HFileReaderV3;
+import org.apache.hadoop.hbase.io.hfile.HFileWriterV3;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.wal.HLog;
+import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
+import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
+import org.apache.hadoop.hbase.security.EncryptionUtil;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import org.junit.Before;
+import org.junit.experimental.categories.Category;
+
+@Category(IntegrationTests.class)
+public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
+
+  private static final Log LOG;
+  static {
+    Logger.getLogger(HFileReaderV3.class).setLevel(Level.TRACE);
+    Logger.getLogger(HFileWriterV3.class).setLevel(Level.TRACE);
+    Logger.getLogger(SecureProtobufLogReader.class).setLevel(Level.TRACE);
+    Logger.getLogger(SecureProtobufLogWriter.class).setLevel(Level.TRACE);
+    LOG = LogFactory.getLog(IntegrationTestIngestWithEncryption.class);
+  }
+
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    // Inject test key provider
+    // Set up configuration
+    IntegrationTestingUtility testUtil = getTestingUtil(conf);
+    testUtil.getConfiguration().setInt("hfile.format.version", 3);
+    testUtil.getConfiguration().set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY,
+      KeyProviderForTesting.class.getName());
+    testUtil.getConfiguration().set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+    testUtil.getConfiguration().setClass("hbase.regionserver.hlog.reader.impl",
+      SecureProtobufLogReader.class, HLog.Reader.class);
+    testUtil.getConfiguration().setClass("hbase.regionserver.hlog.writer.impl",
+      SecureProtobufLogWriter.class, HLog.Writer.class);
+    testUtil.getConfiguration().setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
+    // Flush frequently
+    testUtil.getConfiguration().setInt(HRegion.MEMSTORE_PERIODIC_FLUSH_INTERVAL, 120000);
+
+    // Initialize the cluster. This invokes LoadTestTool -init_only, which
+    // will create the test table, appropriately pre-split
+    super.setUp();
+
+    // Create the test encryption key
+    SecureRandom rng = new SecureRandom();
+    byte[] keyBytes = new byte[AES.KEY_LENGTH];
+    rng.nextBytes(keyBytes);
+    Key cfKey = new SecretKeySpec(keyBytes, "AES");
+
+    // Update the test table schema so HFiles from this point will be written with
+    // encryption features enabled.
+    final HBaseAdmin admin = testUtil.getHBaseAdmin();
+    HTableDescriptor tableDescriptor =
+        new HTableDescriptor(admin.getTableDescriptor(Bytes.toBytes(getTablename())));
+    for (HColumnDescriptor columnDescriptor: tableDescriptor.getColumnFamilies()) {
+      columnDescriptor.setEncryptionType("AES");
+      columnDescriptor.setEncryptionKey(EncryptionUtil.wrapKey(testUtil.getConfiguration(),
+        "hbase", cfKey));
+      LOG.info("Updating CF schema for " + getTablename() + "." +
+        columnDescriptor.getNameAsString());
+      admin.disableTable(getTablename());
+      admin.modifyColumn(getTablename(), columnDescriptor);
+      admin.enableTable(getTablename());
+      testUtil.waitFor(10000, 1000, true, new Predicate<IOException>() {
+        @Override
+        public boolean evaluate() throws IOException {
+          return admin.isTableAvailable(getTablename());
+        }
+      });
+    }
+
+  }
+
+}

Added: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/EncryptionProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/EncryptionProtos.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/EncryptionProtos.java (added)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/EncryptionProtos.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,951 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: Encryption.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class EncryptionProtos {
+  private EncryptionProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface WrappedKeyOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required string algorithm = 1;
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    boolean hasAlgorithm();
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    java.lang.String getAlgorithm();
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getAlgorithmBytes();
+
+    // required uint32 length = 2;
+    /**
+     * <code>required uint32 length = 2;</code>
+     */
+    boolean hasLength();
+    /**
+     * <code>required uint32 length = 2;</code>
+     */
+    int getLength();
+
+    // required bytes data = 3;
+    /**
+     * <code>required bytes data = 3;</code>
+     */
+    boolean hasData();
+    /**
+     * <code>required bytes data = 3;</code>
+     */
+    com.google.protobuf.ByteString getData();
+
+    // optional bytes iv = 4;
+    /**
+     * <code>optional bytes iv = 4;</code>
+     */
+    boolean hasIv();
+    /**
+     * <code>optional bytes iv = 4;</code>
+     */
+    com.google.protobuf.ByteString getIv();
+
+    // optional fixed32 crc = 5;
+    /**
+     * <code>optional fixed32 crc = 5;</code>
+     */
+    boolean hasCrc();
+    /**
+     * <code>optional fixed32 crc = 5;</code>
+     */
+    int getCrc();
+  }
+  /**
+   * Protobuf type {@code WrappedKey}
+   */
+  public static final class WrappedKey extends
+      com.google.protobuf.GeneratedMessage
+      implements WrappedKeyOrBuilder {
+    // Use WrappedKey.newBuilder() to construct.
+    private WrappedKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private WrappedKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final WrappedKey defaultInstance;
+    public static WrappedKey getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public WrappedKey getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private WrappedKey(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              algorithm_ = input.readBytes();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              length_ = input.readUInt32();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              data_ = input.readBytes();
+              break;
+            }
+            case 34: {
+              bitField0_ |= 0x00000008;
+              iv_ = input.readBytes();
+              break;
+            }
+            case 45: {
+              bitField0_ |= 0x00000010;
+              crc_ = input.readFixed32();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.class, org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<WrappedKey> PARSER =
+        new com.google.protobuf.AbstractParser<WrappedKey>() {
+      public WrappedKey parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new WrappedKey(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<WrappedKey> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required string algorithm = 1;
+    public static final int ALGORITHM_FIELD_NUMBER = 1;
+    private java.lang.Object algorithm_;
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    public boolean hasAlgorithm() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    public java.lang.String getAlgorithm() {
+      java.lang.Object ref = algorithm_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          algorithm_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string algorithm = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getAlgorithmBytes() {
+      java.lang.Object ref = algorithm_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        algorithm_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // required uint32 length = 2;
+    public static final int LENGTH_FIELD_NUMBER = 2;
+    private int length_;
+    /**
+     * <code>required uint32 length = 2;</code>
+     */
+    public boolean hasLength() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>required uint32 length = 2;</code>
+     */
+    public int getLength() {
+      return length_;
+    }
+
+    // required bytes data = 3;
+    public static final int DATA_FIELD_NUMBER = 3;
+    private com.google.protobuf.ByteString data_;
+    /**
+     * <code>required bytes data = 3;</code>
+     */
+    public boolean hasData() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>required bytes data = 3;</code>
+     */
+    public com.google.protobuf.ByteString getData() {
+      return data_;
+    }
+
+    // optional bytes iv = 4;
+    public static final int IV_FIELD_NUMBER = 4;
+    private com.google.protobuf.ByteString iv_;
+    /**
+     * <code>optional bytes iv = 4;</code>
+     */
+    public boolean hasIv() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional bytes iv = 4;</code>
+     */
+    public com.google.protobuf.ByteString getIv() {
+      return iv_;
+    }
+
+    // optional fixed32 crc = 5;
+    public static final int CRC_FIELD_NUMBER = 5;
+    private int crc_;
+    /**
+     * <code>optional fixed32 crc = 5;</code>
+     */
+    public boolean hasCrc() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional fixed32 crc = 5;</code>
+     */
+    public int getCrc() {
+      return crc_;
+    }
+
+    private void initFields() {
+      algorithm_ = "";
+      length_ = 0;
+      data_ = com.google.protobuf.ByteString.EMPTY;
+      iv_ = com.google.protobuf.ByteString.EMPTY;
+      crc_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasAlgorithm()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasLength()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasData()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getAlgorithmBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeUInt32(2, length_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, data_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(4, iv_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeFixed32(5, crc_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getAlgorithmBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(2, length_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, data_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(4, iv_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFixed32Size(5, crc_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey other = (org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey) obj;
+
+      boolean result = true;
+      result = result && (hasAlgorithm() == other.hasAlgorithm());
+      if (hasAlgorithm()) {
+        result = result && getAlgorithm()
+            .equals(other.getAlgorithm());
+      }
+      result = result && (hasLength() == other.hasLength());
+      if (hasLength()) {
+        result = result && (getLength()
+            == other.getLength());
+      }
+      result = result && (hasData() == other.hasData());
+      if (hasData()) {
+        result = result && getData()
+            .equals(other.getData());
+      }
+      result = result && (hasIv() == other.hasIv());
+      if (hasIv()) {
+        result = result && getIv()
+            .equals(other.getIv());
+      }
+      result = result && (hasCrc() == other.hasCrc());
+      if (hasCrc()) {
+        result = result && (getCrc()
+            == other.getCrc());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasAlgorithm()) {
+        hash = (37 * hash) + ALGORITHM_FIELD_NUMBER;
+        hash = (53 * hash) + getAlgorithm().hashCode();
+      }
+      if (hasLength()) {
+        hash = (37 * hash) + LENGTH_FIELD_NUMBER;
+        hash = (53 * hash) + getLength();
+      }
+      if (hasData()) {
+        hash = (37 * hash) + DATA_FIELD_NUMBER;
+        hash = (53 * hash) + getData().hashCode();
+      }
+      if (hasIv()) {
+        hash = (37 * hash) + IV_FIELD_NUMBER;
+        hash = (53 * hash) + getIv().hashCode();
+      }
+      if (hasCrc()) {
+        hash = (37 * hash) + CRC_FIELD_NUMBER;
+        hash = (53 * hash) + getCrc();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code WrappedKey}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKeyOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.class, org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        algorithm_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        length_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        data_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        iv_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000008);
+        crc_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey build() {
+        org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey result = new org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.algorithm_ = algorithm_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.length_ = length_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.data_ = data_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.iv_ = iv_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        result.crc_ = crc_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.getDefaultInstance()) return this;
+        if (other.hasAlgorithm()) {
+          bitField0_ |= 0x00000001;
+          algorithm_ = other.algorithm_;
+          onChanged();
+        }
+        if (other.hasLength()) {
+          setLength(other.getLength());
+        }
+        if (other.hasData()) {
+          setData(other.getData());
+        }
+        if (other.hasIv()) {
+          setIv(other.getIv());
+        }
+        if (other.hasCrc()) {
+          setCrc(other.getCrc());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasAlgorithm()) {
+          
+          return false;
+        }
+        if (!hasLength()) {
+          
+          return false;
+        }
+        if (!hasData()) {
+          
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required string algorithm = 1;
+      private java.lang.Object algorithm_ = "";
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public boolean hasAlgorithm() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public java.lang.String getAlgorithm() {
+        java.lang.Object ref = algorithm_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          algorithm_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getAlgorithmBytes() {
+        java.lang.Object ref = algorithm_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          algorithm_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public Builder setAlgorithm(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        algorithm_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public Builder clearAlgorithm() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        algorithm_ = getDefaultInstance().getAlgorithm();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string algorithm = 1;</code>
+       */
+      public Builder setAlgorithmBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        algorithm_ = value;
+        onChanged();
+        return this;
+      }
+
+      // required uint32 length = 2;
+      private int length_ ;
+      /**
+       * <code>required uint32 length = 2;</code>
+       */
+      public boolean hasLength() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>required uint32 length = 2;</code>
+       */
+      public int getLength() {
+        return length_;
+      }
+      /**
+       * <code>required uint32 length = 2;</code>
+       */
+      public Builder setLength(int value) {
+        bitField0_ |= 0x00000002;
+        length_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required uint32 length = 2;</code>
+       */
+      public Builder clearLength() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        length_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // required bytes data = 3;
+      private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>required bytes data = 3;</code>
+       */
+      public boolean hasData() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>required bytes data = 3;</code>
+       */
+      public com.google.protobuf.ByteString getData() {
+        return data_;
+      }
+      /**
+       * <code>required bytes data = 3;</code>
+       */
+      public Builder setData(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        data_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required bytes data = 3;</code>
+       */
+      public Builder clearData() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        data_ = getDefaultInstance().getData();
+        onChanged();
+        return this;
+      }
+
+      // optional bytes iv = 4;
+      private com.google.protobuf.ByteString iv_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes iv = 4;</code>
+       */
+      public boolean hasIv() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional bytes iv = 4;</code>
+       */
+      public com.google.protobuf.ByteString getIv() {
+        return iv_;
+      }
+      /**
+       * <code>optional bytes iv = 4;</code>
+       */
+      public Builder setIv(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        iv_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bytes iv = 4;</code>
+       */
+      public Builder clearIv() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        iv_ = getDefaultInstance().getIv();
+        onChanged();
+        return this;
+      }
+
+      // optional fixed32 crc = 5;
+      private int crc_ ;
+      /**
+       * <code>optional fixed32 crc = 5;</code>
+       */
+      public boolean hasCrc() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional fixed32 crc = 5;</code>
+       */
+      public int getCrc() {
+        return crc_;
+      }
+      /**
+       * <code>optional fixed32 crc = 5;</code>
+       */
+      public Builder setCrc(int value) {
+        bitField0_ |= 0x00000010;
+        crc_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional fixed32 crc = 5;</code>
+       */
+      public Builder clearCrc() {
+        bitField0_ = (bitField0_ & ~0x00000010);
+        crc_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:WrappedKey)
+    }
+
+    static {
+      defaultInstance = new WrappedKey(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:WrappedKey)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_WrappedKey_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_WrappedKey_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\020Encryption.proto\"V\n\nWrappedKey\022\021\n\talgo" +
+      "rithm\030\001 \002(\t\022\016\n\006length\030\002 \002(\r\022\014\n\004data\030\003 \002(" +
+      "\014\022\n\n\002iv\030\004 \001(\014\022\013\n\003crc\030\005 \001(\007BC\n*org.apache" +
+      ".hadoop.hbase.protobuf.generatedB\020Encryp" +
+      "tionProtosH\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_WrappedKey_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_WrappedKey_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_WrappedKey_descriptor,
+              new java.lang.String[] { "Algorithm", "Length", "Data", "Iv", "Crc", });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java Tue Nov 26 04:22:33 2013
@@ -864,6 +864,16 @@ public final class HFileProtos {
      * <code>optional uint32 compression_codec = 12;</code>
      */
     int getCompressionCodec();
+
+    // optional bytes encryption_key = 13;
+    /**
+     * <code>optional bytes encryption_key = 13;</code>
+     */
+    boolean hasEncryptionKey();
+    /**
+     * <code>optional bytes encryption_key = 13;</code>
+     */
+    com.google.protobuf.ByteString getEncryptionKey();
   }
   /**
    * Protobuf type {@code FileTrailerProto}
@@ -980,6 +990,11 @@ public final class HFileProtos {
               compressionCodec_ = input.readUInt32();
               break;
             }
+            case 106: {
+              bitField0_ |= 0x00001000;
+              encryptionKey_ = input.readBytes();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -1239,6 +1254,22 @@ public final class HFileProtos {
       return compressionCodec_;
     }
 
+    // optional bytes encryption_key = 13;
+    public static final int ENCRYPTION_KEY_FIELD_NUMBER = 13;
+    private com.google.protobuf.ByteString encryptionKey_;
+    /**
+     * <code>optional bytes encryption_key = 13;</code>
+     */
+    public boolean hasEncryptionKey() {
+      return ((bitField0_ & 0x00001000) == 0x00001000);
+    }
+    /**
+     * <code>optional bytes encryption_key = 13;</code>
+     */
+    public com.google.protobuf.ByteString getEncryptionKey() {
+      return encryptionKey_;
+    }
+
     private void initFields() {
       fileInfoOffset_ = 0L;
       loadOnOpenDataOffset_ = 0L;
@@ -1252,6 +1283,7 @@ public final class HFileProtos {
       lastDataBlockOffset_ = 0L;
       comparatorClassName_ = "";
       compressionCodec_ = 0;
+      encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -1301,6 +1333,9 @@ public final class HFileProtos {
       if (((bitField0_ & 0x00000800) == 0x00000800)) {
         output.writeUInt32(12, compressionCodec_);
       }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        output.writeBytes(13, encryptionKey_);
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -1358,6 +1393,10 @@ public final class HFileProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt32Size(12, compressionCodec_);
       }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(13, encryptionKey_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -1441,6 +1480,11 @@ public final class HFileProtos {
         result = result && (getCompressionCodec()
             == other.getCompressionCodec());
       }
+      result = result && (hasEncryptionKey() == other.hasEncryptionKey());
+      if (hasEncryptionKey()) {
+        result = result && getEncryptionKey()
+            .equals(other.getEncryptionKey());
+      }
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -1502,6 +1546,10 @@ public final class HFileProtos {
         hash = (37 * hash) + COMPRESSION_CODEC_FIELD_NUMBER;
         hash = (53 * hash) + getCompressionCodec();
       }
+      if (hasEncryptionKey()) {
+        hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
+        hash = (53 * hash) + getEncryptionKey().hashCode();
+      }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -1639,6 +1687,8 @@ public final class HFileProtos {
         bitField0_ = (bitField0_ & ~0x00000400);
         compressionCodec_ = 0;
         bitField0_ = (bitField0_ & ~0x00000800);
+        encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00001000);
         return this;
       }
 
@@ -1715,6 +1765,10 @@ public final class HFileProtos {
           to_bitField0_ |= 0x00000800;
         }
         result.compressionCodec_ = compressionCodec_;
+        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+          to_bitField0_ |= 0x00001000;
+        }
+        result.encryptionKey_ = encryptionKey_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -1769,6 +1823,9 @@ public final class HFileProtos {
         if (other.hasCompressionCodec()) {
           setCompressionCodec(other.getCompressionCodec());
         }
+        if (other.hasEncryptionKey()) {
+          setEncryptionKey(other.getEncryptionKey());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -2233,6 +2290,42 @@ public final class HFileProtos {
         return this;
       }
 
+      // optional bytes encryption_key = 13;
+      private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes encryption_key = 13;</code>
+       */
+      public boolean hasEncryptionKey() {
+        return ((bitField0_ & 0x00001000) == 0x00001000);
+      }
+      /**
+       * <code>optional bytes encryption_key = 13;</code>
+       */
+      public com.google.protobuf.ByteString getEncryptionKey() {
+        return encryptionKey_;
+      }
+      /**
+       * <code>optional bytes encryption_key = 13;</code>
+       */
+      public Builder setEncryptionKey(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00001000;
+        encryptionKey_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bytes encryption_key = 13;</code>
+       */
+      public Builder clearEncryptionKey() {
+        bitField0_ = (bitField0_ & ~0x00001000);
+        encryptionKey_ = getDefaultInstance().getEncryptionKey();
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:FileTrailerProto)
     }
 
@@ -2265,7 +2358,7 @@ public final class HFileProtos {
     java.lang.String[] descriptorData = {
       "\n\013HFile.proto\032\013HBase.proto\"3\n\rFileInfoPr" +
       "oto\022\"\n\tmap_entry\030\001 \003(\0132\017.BytesBytesPair\"" +
-      "\371\002\n\020FileTrailerProto\022\030\n\020file_info_offset" +
+      "\221\003\n\020FileTrailerProto\022\030\n\020file_info_offset" +
       "\030\001 \001(\004\022 \n\030load_on_open_data_offset\030\002 \001(\004" +
       "\022$\n\034uncompressed_data_index_size\030\003 \001(\004\022 " +
       "\n\030total_uncompressed_bytes\030\004 \001(\004\022\030\n\020data" +
@@ -2274,9 +2367,9 @@ public final class HFileProtos {
       "dex_levels\030\010 \001(\r\022\037\n\027first_data_block_off" +
       "set\030\t \001(\004\022\036\n\026last_data_block_offset\030\n \001(",
       "\004\022\035\n\025comparator_class_name\030\013 \001(\t\022\031\n\021comp" +
-      "ression_codec\030\014 \001(\rBA\n*org.apache.hadoop" +
-      ".hbase.protobuf.generatedB\013HFileProtosH\001" +
-      "\210\001\001\240\001\001"
+      "ression_codec\030\014 \001(\r\022\026\n\016encryption_key\030\r " +
+      "\001(\014BA\n*org.apache.hadoop.hbase.protobuf." +
+      "generatedB\013HFileProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -2294,7 +2387,7 @@ public final class HFileProtos {
           internal_static_FileTrailerProto_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_FileTrailerProto_descriptor,
-              new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", });
+              new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", "EncryptionKey", });
           return null;
         }
       };



Mime
View raw message