hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject hbase git commit: HBASE-15707 ImportTSV bulk output does not support tags with hfile.format.version=3 (huaxiang sun)
Date Wed, 04 May 2016 21:11:49 GMT
Repository: hbase
Updated Branches:
  refs/heads/0.98 20c67123f -> 03e6b75a2


HBASE-15707 ImportTSV bulk output does not support tags with hfile.format.version=3 (huaxiang
sun)

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03e6b75a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03e6b75a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03e6b75a

Branch: refs/heads/0.98
Commit: 03e6b75a2cd1bca1c2291fa714fa8f861a023149
Parents: 20c6712
Author: tedyu <yuzhihong@gmail.com>
Authored: Wed Apr 27 13:40:54 2016 -0700
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Wed May 4 14:11:33 2016 -0700

----------------------------------------------------------------------
 .../hbase/mapreduce/HFileOutputFormat2.java     |  6 ++
 .../hbase/mapreduce/TestHFileOutputFormat2.java | 78 +++++++++++++++++++-
 2 files changed, 83 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/03e6b75a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index 579ed56..425b5ce 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -294,6 +295,11 @@ public class HFileOutputFormat2
                                     .withChecksumType(HStore.getChecksumType(conf))
                                     .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
                                     .withBlockSize(blockSize);
+
+        if (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
+          contextBuilder.withIncludesTags(true);
+        }
+
         contextBuilder.withDataBlockEncoding(encoding);
         HFileContext hFileContext = contextBuilder.build();
                                     

http://git-wip-us.apache.org/repos/asf/hbase/blob/03e6b75a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 407e23e..efa52a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -26,12 +26,17 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.NoSuchElementException;
 import java.util.Random;
 import java.util.Set;
+import java.util.Stack;
 import java.util.concurrent.Callable;
 
 import org.apache.commons.logging.Log;
@@ -55,6 +60,8 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.PerformanceEvaluation;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -67,6 +74,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
+import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
@@ -233,7 +241,7 @@ public class TestHFileOutputFormat2  {
     TaskAttemptContext context = null;
     Path dir =
       util.getDataTestDir("test_TIMERANGE_present");
-    LOG.info("Timerange dir writing to dir: "+ dir);
+    LOG.info("Timerange dir writing to dir: " + dir);
     try {
       // build a record writer using HFileOutputFormat2
       Job job = new Job(conf);
@@ -329,6 +337,74 @@ public class TestHFileOutputFormat2  {
     assertTrue(files.length > 0);
   }
 
+  /**
+   * Test that {@link HFileOutputFormat2} RecordWriter writes tags such as ttl into
+   * hfile.
+   */
+  @Test
+  public void test_WritingTagData()
+      throws Exception {
+    Configuration conf = new Configuration(this.util.getConfiguration());
+    final String HFILE_FORMAT_VERSION_CONF_KEY = "hfile.format.version";
+    conf.setInt(HFILE_FORMAT_VERSION_CONF_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS);
+    RecordWriter<ImmutableBytesWritable, Cell> writer = null;
+    TaskAttemptContext context = null;
+    Path dir =
+        util.getDataTestDir("WritingTagData");
+    try {
+      Job job = new Job(conf);
+      FileOutputFormat.setOutputPath(job, dir);
+      context = createTestTaskAttemptContext(job);
+      HFileOutputFormat2 hof = new HFileOutputFormat2();
+      writer = hof.getRecordWriter(context);
+      final byte [] b = Bytes.toBytes("b");
+
+      KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, new Tag[] {
+          new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670)) });
+      writer.write(new ImmutableBytesWritable(), kv);
+      writer.close(context);
+      writer = null;
+      FileSystem fs = dir.getFileSystem(conf);
+      for (FileStatus keyFileStatus: listFiles(fs, dir)) {
+        HFile.Reader reader = HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf),
+            conf);
+        HFileScanner scanner = reader.getScanner(false, false, false);
+        scanner.seekTo();
+        Cell cell = scanner.getKeyValue();
+
+        Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(),
+            cell.getTagsOffset(), cell.getTagsLengthUnsigned());
+        assertTrue(tagsIterator.hasNext());
+        assertTrue(tagsIterator.next().getType() == TagType.TTL_TAG_TYPE);
+      }
+    } finally {
+      if (writer != null && context != null) writer.close(context);
+      dir.getFileSystem(conf).delete(dir, true);
+    }
+  }
+
+  private List<FileStatus> listFiles(FileSystem fs, Path dir) throws IOException {
+    List<FileStatus> list = new ArrayList<FileStatus>();
+    Stack<Path> stack = new Stack<Path>();
+    stack.push(dir);
+    while (!stack.isEmpty()) {
+      Path p = stack.pop();
+      FileStatus stat = fs.getFileStatus(p);
+      if (stat.isDir()) {
+        for (FileStatus s: fs.listStatus(p)) {
+          if (s.isDir()) {
+            stack.push(s.getPath());
+          } else {
+            list.add(s);
+          }
+        }
+      } else {
+        list.add(stat);
+      }
+    }
+    return list;
+  }
+
   @Test
   public void testJobConfiguration() throws Exception {
     Configuration conf = new Configuration(this.util.getConfiguration());


Mime
View raw message