hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bus...@apache.org
Subject [16/21] hbase git commit: HBASE-12522 Backport of write-ahead-log refactoring and follow-ons.
Date Tue, 02 Dec 2014 17:20:55 GMT
http://git-wip-us.apache.org/repos/asf/hbase/blob/8959828f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
index bda1972..9fd2a37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
@@ -22,39 +22,22 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.EOFException;
 import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.TreeMap;
 import java.util.UUID;
-import java.util.concurrent.CountDownLatch;
 
-
-import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope;
-import org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType;
-import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
-import org.apache.hadoop.hbase.regionserver.SequenceId;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ByteString;
 
 /**
  * A Key for an entry in the change log.
@@ -65,97 +48,29 @@ import com.google.protobuf.ByteString;
  *
  * <p>Some Transactional edits (START, COMMIT, ABORT) will not have an
  * associated row.
+ * @deprecated use WALKey
  */
-// TODO: Key and WALEdit are never used separately, or in one-to-many relation, for practical
-//       purposes. They need to be merged into HLogEntry.
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
-public class HLogKey implements WritableComparable<HLogKey>, SequenceId {
+@Deprecated
+public class HLogKey extends WALKey implements Writable {
   public static final Log LOG = LogFactory.getLog(HLogKey.class);
 
-  // should be < 0 (@see #readFields(DataInput))
-  // version 2 supports HLog compression
-  enum Version {
-    UNVERSIONED(0),
-    // Initial number we put on HLogKey when we introduced versioning.
-    INITIAL(-1),
-    // Version -2 introduced a dictionary compression facility.  Only this
-    // dictionary-based compression is available in version -2.
-    COMPRESSED(-2);
-
-    final int code;
-    static final Version[] byCode;
-    static {
-      byCode = Version.values();
-      for (int i = 0; i < byCode.length; i++) {
-        if (byCode[i].code != -1 * i) {
-          throw new AssertionError("Values in this enum should be descending by one");
-        }
-      }
-    }
-
-    Version(int code) {
-      this.code = code;
-    }
-
-    boolean atLeast(Version other) {
-      return code <= other.code;
-    }
-
-    static Version fromCode(int code) {
-      return byCode[code * -1];
-    }
-  }
-
-  /*
-   * This is used for reading the log entries created by the previous releases
-   * (0.94.11) which write the clusters information to the scopes of WALEdit.
-   */
-  private static final String PREFIX_CLUSTER_KEY = ".";
-
-
-  private static final Version VERSION = Version.COMPRESSED;
-
-  //  The encoded region name.
-  private byte [] encodedRegionName;
-  private TableName tablename;
-  private long logSeqNum;
-  private long origLogSeqNum = 0;
-  private CountDownLatch seqNumAssignedLatch = new CountDownLatch(1);
-  // Time at which this edit was written.
-  private long writeTime;
-
-  // The first element in the list is the cluster id on which the change has originated
-  private List<UUID> clusterIds;
-
-  private NavigableMap<byte[], Integer> scopes;
-
-  private long nonceGroup = HConstants.NO_NONCE;
-  private long nonce = HConstants.NO_NONCE;
-  static final List<UUID> EMPTY_UUIDS = Collections.unmodifiableList(new ArrayList<UUID>());
-
-  private CompressionContext compressionContext;
-
   public HLogKey() {
-    init(null, null, 0L, HConstants.LATEST_TIMESTAMP,
-        new ArrayList<UUID>(), HConstants.NO_NONCE, HConstants.NO_NONCE);
+    super();
   }
 
   @VisibleForTesting
   public HLogKey(final byte[] encodedRegionName, final TableName tablename, long logSeqNum,
       final long now, UUID clusterId) {
-    List<UUID> clusterIds = new ArrayList<UUID>();
-    clusterIds.add(clusterId);
-    init(encodedRegionName, tablename, logSeqNum, now, clusterIds,
-        HConstants.NO_NONCE, HConstants.NO_NONCE);
+    super(encodedRegionName, tablename, logSeqNum, now, clusterId);
   }
 
   public HLogKey(final byte[] encodedRegionName, final TableName tablename) {
-    this(encodedRegionName, tablename, System.currentTimeMillis());
+    super(encodedRegionName, tablename);
   }
 
   public HLogKey(final byte[] encodedRegionName, final TableName tablename, final long now)
{
-    init(encodedRegionName, tablename, HLog.NO_SEQUENCE_ID, now,
-        EMPTY_UUIDS, HConstants.NO_NONCE, HConstants.NO_NONCE);
+    super(encodedRegionName, tablename, now);
   }
 
   /**
@@ -173,7 +88,7 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceId
{
    */
   public HLogKey(final byte [] encodedRegionName, final TableName tablename,
       long logSeqNum, final long now, List<UUID> clusterIds, long nonceGroup, long
nonce) {
-    init(encodedRegionName, tablename, logSeqNum, now, clusterIds, nonceGroup, nonce);
+    super(encodedRegionName, tablename, logSeqNum, now, clusterIds, nonceGroup, nonce);
   }
 
   /**
@@ -191,8 +106,7 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceId
{
    */
   public HLogKey(final byte [] encodedRegionName, final TableName tablename,
       final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
-    init(encodedRegionName, tablename, HLog.NO_SEQUENCE_ID, now, clusterIds,
-      nonceGroup, nonce);
+    super(encodedRegionName, tablename, now, clusterIds, nonceGroup, nonce);
   }
 
   /**
@@ -209,240 +123,7 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceId
{
    */
   public HLogKey(final byte [] encodedRegionName, final TableName tablename, long logSeqNum,
       long nonceGroup, long nonce) {
-    init(encodedRegionName, tablename, logSeqNum, EnvironmentEdgeManager.currentTime(),
-      EMPTY_UUIDS, nonceGroup, nonce);
-  }
-
-  protected void init(final byte [] encodedRegionName, final TableName tablename,
-      long logSeqNum, final long now, List<UUID> clusterIds, long nonceGroup, long
nonce) {
-    this.logSeqNum = logSeqNum;
-    this.writeTime = now;
-    this.clusterIds = clusterIds;
-    this.encodedRegionName = encodedRegionName;
-    this.tablename = tablename;
-    this.nonceGroup = nonceGroup;
-    this.nonce = nonce;
-  }
-
-  /**
-   * @param compressionContext Compression context to use
-   */
-  public void setCompressionContext(CompressionContext compressionContext) {
-    this.compressionContext = compressionContext;
-  }
-
-  /** @return encoded region name */
-  public byte [] getEncodedRegionName() {
-    return encodedRegionName;
-  }
-
-  /** @return table name */
-  public TableName getTablename() {
-    return tablename;
-  }
-
-  /** @return log sequence number */
-  public long getLogSeqNum() {
-    return this.logSeqNum;
-  }
-
-  /**
-   * Allow that the log sequence id to be set post-construction and release all waiters on
assigned
-   * sequence number.
-   * @param sequence
-   */
-  void setLogSeqNum(final long sequence) {
-    this.logSeqNum = sequence;
-    this.seqNumAssignedLatch.countDown();
-  }
-
-  /**
-   * Used to set original seq Id for HLogKey during wal replay
-   * @param seqId
-   */
-  public void setOrigLogSeqNum(final long seqId) {
-    this.origLogSeqNum = seqId;
-  }
-  
-  /**
-   * Return a positive long if current HLogKey is created from a replay edit
-   * @return original sequence number of the WALEdit
-   */
-  public long getOrigLogSeqNum() {
-    return this.origLogSeqNum;
-  }
-  
-  /**
-   * Wait for sequence number is assigned & return the assigned value
-   * @return long the new assigned sequence number
-   * @throws InterruptedException
-   */
-  @Override
-  public long getSequenceId() throws IOException {
-    try {
-      this.seqNumAssignedLatch.await();
-    } catch (InterruptedException ie) {
-      LOG.warn("Thread interrupted waiting for next log sequence number");
-      InterruptedIOException iie = new InterruptedIOException();
-      iie.initCause(ie);
-      throw iie;
-    }
-    return this.logSeqNum;
-  }
-
-  /**
-   * @return the write time
-   */
-  public long getWriteTime() {
-    return this.writeTime;
-  }
-
-  public NavigableMap<byte[], Integer> getScopes() {
-    return scopes;
-  }
-
-  /** @return The nonce group */
-  public long getNonceGroup() {
-    return nonceGroup;
-  }
-
-  /** @return The nonce */
-  public long getNonce() {
-    return nonce;
-  }
-
-  public void setScopes(NavigableMap<byte[], Integer> scopes) {
-    this.scopes = scopes;
-  }
-
-  public void readOlderScopes(NavigableMap<byte[], Integer> scopes) {
-    if (scopes != null) {
-      Iterator<Map.Entry<byte[], Integer>> iterator = scopes.entrySet()
-          .iterator();
-      while (iterator.hasNext()) {
-        Map.Entry<byte[], Integer> scope = iterator.next();
-        String key = Bytes.toString(scope.getKey());
-        if (key.startsWith(PREFIX_CLUSTER_KEY)) {
-          addClusterId(UUID.fromString(key.substring(PREFIX_CLUSTER_KEY
-              .length())));
-          iterator.remove();
-        }
-      }
-      if (scopes.size() > 0) {
-        this.scopes = scopes;
-      }
-    }
-  }
-
-  /**
-   * Marks that the cluster with the given clusterId has consumed the change
-   */
-  public void addClusterId(UUID clusterId) {
-    if (!clusterIds.contains(clusterId)) {
-      clusterIds.add(clusterId);
-    }
-  }
-
-  /**
-   * @return the set of cluster Ids that have consumed the change
-   */
-  public List<UUID> getClusterIds() {
-    return clusterIds;
-  }
-
-  /**
-   * @return the cluster id on which the change has originated. It there is no such cluster,
it
-   *         returns DEFAULT_CLUSTER_ID (cases where replication is not enabled)
-   */
-  public UUID getOriginatingClusterId(){
-    return clusterIds.isEmpty() ? HConstants.DEFAULT_CLUSTER_ID : clusterIds.get(0);
-  }
-
-  @Override
-  public String toString() {
-    return tablename + "/" + Bytes.toString(encodedRegionName) + "/" +
-      logSeqNum;
-  }
-
-  /**
-   * Produces a string map for this key. Useful for programmatic use and
-   * manipulation of the data stored in an HLogKey, for example, printing
-   * as JSON.
-   *
-   * @return a Map containing data from this key
-   */
-  public Map<String, Object> toStringMap() {
-    Map<String, Object> stringMap = new HashMap<String, Object>();
-    stringMap.put("table", tablename);
-    stringMap.put("region", Bytes.toStringBinary(encodedRegionName));
-    stringMap.put("sequence", logSeqNum);
-    return stringMap;
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null || getClass() != obj.getClass()) {
-      return false;
-    }
-    return compareTo((HLogKey)obj) == 0;
-  }
-
-  @Override
-  public int hashCode() {
-    int result = Bytes.hashCode(this.encodedRegionName);
-    result ^= this.logSeqNum;
-    result ^= this.writeTime;
-    return result;
-  }
-
-  @Override
-  public int compareTo(HLogKey o) {
-    int result = Bytes.compareTo(this.encodedRegionName, o.encodedRegionName);
-    if (result == 0) {
-      if (this.logSeqNum < o.logSeqNum) {
-        result = -1;
-      } else if (this.logSeqNum  > o.logSeqNum) {
-        result = 1;
-      }
-      if (result == 0) {
-        if (this.writeTime < o.writeTime) {
-          result = -1;
-        } else if (this.writeTime > o.writeTime) {
-          return 1;
-        }
-      }
-    }
-    // why isn't cluster id accounted for?
-    return result;
-  }
-
-  /**
-   * Drop this instance's tablename byte array and instead
-   * hold a reference to the provided tablename. This is not
-   * meant to be a general purpose setter - it's only used
-   * to collapse references to conserve memory.
-   */
-  void internTableName(TableName tablename) {
-    // We should not use this as a setter - only to swap
-    // in a new reference to the same table name.
-    assert tablename.equals(this.tablename);
-    this.tablename = tablename;
-  }
-
-  /**
-   * Drop this instance's region name byte array and instead
-   * hold a reference to the provided region name. This is not
-   * meant to be a general purpose setter - it's only used
-   * to collapse references to conserve memory.
-   */
-  void internEncodedRegionName(byte []encodedRegionName) {
-    // We should not use this as a setter - only to swap
-    // in a new reference to the same table name.
-    assert Bytes.equals(this.encodedRegionName, encodedRegionName);
-    this.encodedRegionName = encodedRegionName;
+    super(encodedRegionName, tablename, logSeqNum, nonceGroup, nonce);
   }
 
   @Override
@@ -486,13 +167,13 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceId
{
     // encodes the length of encodedRegionName.
     // If < 0 we just read the version and the next vint is the length.
     // @see Bytes#readByteArray(DataInput)
-    this.scopes = null; // writable HLogKey does not contain scopes
+    setScopes(null); // writable HLogKey does not contain scopes
     int len = WritableUtils.readVInt(in);
     byte[] tablenameBytes = null;
     if (len < 0) {
       // what we just read was the version
       version = Version.fromCode(len);
-      // We only compress V2 of HLogkey.
+      // We only compress V2 of WALkey.
       // If compression is on, the length is handled by the dictionary
       if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
         len = WritableUtils.readVInt(in);
@@ -541,87 +222,4 @@ public class HLogKey implements WritableComparable<HLogKey>, SequenceId
{
     // Do not need to read the clusters information as we are using protobufs from 0.95
   }
 
-  public WALKey.Builder getBuilder(WALCellCodec.ByteStringCompressor compressor)
-  throws IOException {
-    WALKey.Builder builder = WALKey.newBuilder();
-    if (compressionContext == null) {
-      builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName));
-      builder.setTableName(ByteStringer.wrap(this.tablename.getName()));
-    } else {
-      builder.setEncodedRegionName(compressor.compress(this.encodedRegionName,
-        compressionContext.regionDict));
-      builder.setTableName(compressor.compress(this.tablename.getName(),
-        compressionContext.tableDict));
-    }
-    builder.setLogSequenceNumber(this.logSeqNum);
-    builder.setWriteTime(writeTime);
-    if(this.origLogSeqNum > 0) {
-      builder.setOrigSequenceNumber(this.origLogSeqNum);
-    }
-    if (this.nonce != HConstants.NO_NONCE) {
-      builder.setNonce(nonce);
-    }
-    if (this.nonceGroup != HConstants.NO_NONCE) {
-      builder.setNonceGroup(nonceGroup);
-    }
-    HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
-    for (UUID clusterId : clusterIds) {
-      uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
-      uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
-      builder.addClusterIds(uuidBuilder.build());
-    }
-    if (scopes != null) {
-      for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
-        ByteString family = (compressionContext == null) ? ByteStringer.wrap(e.getKey())
-            : compressor.compress(e.getKey(), compressionContext.familyDict);
-        builder.addScopes(FamilyScope.newBuilder()
-            .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
-      }
-    }
-    return builder;
-  }
-
-  public void readFieldsFromPb(
-      WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException
{
-    if (this.compressionContext != null) {
-      this.encodedRegionName = uncompressor.uncompress(
-          walKey.getEncodedRegionName(), compressionContext.regionDict);
-      byte[] tablenameBytes = uncompressor.uncompress(
-          walKey.getTableName(), compressionContext.tableDict);
-      this.tablename = TableName.valueOf(tablenameBytes);
-    } else {
-      this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
-      this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
-    }
-    clusterIds.clear();
-    if (walKey.hasClusterId()) {
-      //When we are reading the older log (0.95.1 release)
-      //This is definitely the originating cluster
-      clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
-          .getLeastSigBits()));
-    }
-    for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
-      clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
-    }
-    if (walKey.hasNonceGroup()) {
-      this.nonceGroup = walKey.getNonceGroup();
-    }
-    if (walKey.hasNonce()) {
-      this.nonce = walKey.getNonce();
-    }
-    this.scopes = null;
-    if (walKey.getScopesCount() > 0) {
-      this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
-      for (FamilyScope scope : walKey.getScopesList()) {
-        byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
-          uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
-        this.scopes.put(family, scope.getScopeType().getNumber());
-      }
-    }
-    this.logSeqNum = walKey.getLogSequenceNumber();
-    this.writeTime = walKey.getWriteTime();
-    if(walKey.hasOrigSequenceNumber()) {
-      this.origLogSeqNum = walKey.getOrigSequenceNumber();
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8959828f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
index 48915cf..914ac47 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
@@ -23,7 +23,6 @@ import java.io.PrintStream;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -38,13 +37,11 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.wal.WALPrettyPrinter;
 import org.codehaus.jackson.map.ObjectMapper;
 
 /**
@@ -59,36 +56,18 @@ import org.codehaus.jackson.map.ObjectMapper;
  * 
  * It can also toggle output of values.
  * 
+ * @deprecated use the "hbase wal" command
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HLogPrettyPrinter {
-  private boolean outputValues;
-  private boolean outputJSON;
-  // The following enable filtering by sequence, region, and row, respectively
-  private long sequence;
-  private String region;
-  private String row;
-  // enable in order to output a single list of transactions from several files
-  private boolean persistentOutput;
-  private boolean firstTxn;
-  // useful for programatic capture of JSON output
-  private PrintStream out;
-  // for JSON encoding
-  private static final ObjectMapper MAPPER = new ObjectMapper();
+@Deprecated
+public class HLogPrettyPrinter extends WALPrettyPrinter {
 
   /**
    * Basic constructor that simply initializes values to reasonable defaults.
    */
   public HLogPrettyPrinter() {
-    outputValues = false;
-    outputJSON = false;
-    sequence = -1;
-    region = null;
-    row = null;
-    persistentOutput = false;
-    firstTxn = true;
-    out = System.out;
+    this(false, false, -1l, null, null, false, System.out);
   }
 
   /**
@@ -119,289 +98,11 @@ public class HLogPrettyPrinter {
   public HLogPrettyPrinter(boolean outputValues, boolean outputJSON,
       long sequence, String region, String row, boolean persistentOutput,
       PrintStream out) {
-    this.outputValues = outputValues;
-    this.outputJSON = outputJSON;
-    this.sequence = sequence;
-    this.region = region;
-    this.row = row;
-    this.persistentOutput = persistentOutput;
-    if (persistentOutput) {
-      beginPersistentOutput();
-    }
-    this.out = out;
-    this.firstTxn = true;
-  }
-
-  /**
-   * turns value output on
-   */
-  public void enableValues() {
-    outputValues = true;
-  }
-
-  /**
-   * turns value output off
-   */
-  public void disableValues() {
-    outputValues = false;
-  }
-
-  /**
-   * turns JSON output on
-   */
-  public void enableJSON() {
-    outputJSON = true;
-  }
-
-  /**
-   * turns JSON output off, and turns on "pretty strings" for human consumption
-   */
-  public void disableJSON() {
-    outputJSON = false;
-  }
-
-  /**
-   * sets the region by which output will be filtered
-   * 
-   * @param sequence
-   *          when nonnegative, serves as a filter; only log entries with this
-   *          sequence id will be printed
-   */
-  public void setSequenceFilter(long sequence) {
-    this.sequence = sequence;
-  }
-
-  /**
-   * sets the region by which output will be filtered
-   * 
-   * @param region
-   *          when not null, serves as a filter; only log entries from this
-   *          region will be printed
-   */
-  public void setRegionFilter(String region) {
-    this.region = region;
-  }
-
-  /**
-   * sets the region by which output will be filtered
-   * 
-   * @param row
-   *          when not null, serves as a filter; only log entries from this row
-   *          will be printed
-   */
-  public void setRowFilter(String row) {
-    this.row = row;
-  }
-
-  /**
-   * enables output as a single, persistent list. at present, only relevant in
-   * the case of JSON output.
-   */
-  public void beginPersistentOutput() {
-    if (persistentOutput)
-      return;
-    persistentOutput = true;
-    firstTxn = true;
-    if (outputJSON)
-      out.print("[");
-  }
-
-  /**
-   * ends output of a single, persistent list. at present, only relevant in the
-   * case of JSON output.
-   */
-  public void endPersistentOutput() {
-    if (!persistentOutput)
-      return;
-    persistentOutput = false;
-    if (outputJSON)
-      out.print("]");
-  }
-
-  /**
-   * reads a log file and outputs its contents, one transaction at a time, as
-   * specified by the currently configured options
-   * 
-   * @param conf
-   *          the HBase configuration relevant to this log file
-   * @param p
-   *          the path of the log file to be read
-   * @throws IOException
-   *           may be unable to access the configured filesystem or requested
-   *           file.
-   */
-  public void processFile(final Configuration conf, final Path p)
-      throws IOException {
-    FileSystem fs = FileSystem.get(conf);
-    if (!fs.exists(p)) {
-      throw new FileNotFoundException(p.toString());
-    }
-    if (!fs.isFile(p)) {
-      throw new IOException(p + " is not a file");
-    }
-    if (outputJSON && !persistentOutput) {
-      out.print("[");
-      firstTxn = true;
-    }
-    Reader log = HLogFactory.createReader(fs, p, conf);
-    try {
-      FSHLog.Entry entry;
-      while ((entry = log.next()) != null) {
-        HLogKey key = entry.getKey();
-        WALEdit edit = entry.getEdit();
-        // begin building a transaction structure
-        Map<String, Object> txn = key.toStringMap();
-        long writeTime = key.getWriteTime();
-        // check output filters
-        if (sequence >= 0 && ((Long) txn.get("sequence")) != sequence)
-          continue;
-        if (region != null && !((String) txn.get("region")).equals(region))
-          continue;
-        // initialize list into which we will store atomic actions
-        List<Map> actions = new ArrayList<Map>();
-        for (Cell cell : edit.getCells()) {
-          // add atomic operation to txn
-          Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
-          if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue()));
-          // check row output filter
-          if (row == null || ((String) op.get("row")).equals(row))
-            actions.add(op);
-        }
-        if (actions.size() == 0)
-          continue;
-        txn.put("actions", actions);
-        if (outputJSON) {
-          // JSON output is a straightforward "toString" on the txn object
-          if (firstTxn)
-            firstTxn = false;
-          else
-            out.print(",");
-          // encode and print JSON
-          out.print(MAPPER.writeValueAsString(txn));
-        } else {
-          // Pretty output, complete with indentation by atomic action
-          out.println("Sequence " + txn.get("sequence") + " "
-              + "from region " + txn.get("region") + " " + "in table "
-              + txn.get("table") + " at write timestamp: " + new Date(writeTime));
-          for (int i = 0; i < actions.size(); i++) {
-            Map op = actions.get(i);
-            out.println("  Action:");
-            out.println("    row: " + op.get("row"));
-            out.println("    column: " + op.get("family") + ":"
-                + op.get("qualifier"));
-            out.println("    timestamp: "
-                + (new Date((Long) op.get("timestamp"))));
-            if(op.get("tag") != null) {
-              out.println("    tag: " + op.get("tag"));
-            }
-            if (outputValues)
-              out.println("    value: " + op.get("value"));
-          }
-        }
-      }
-    } finally {
-      log.close();
-    }
-    if (outputJSON && !persistentOutput) {
-      out.print("]");
-    }
-  }
-
-  private static Map<String, Object> toStringMap(Cell cell) {
-    Map<String, Object> stringMap = new HashMap<String, Object>();
-    stringMap.put("row",
-        Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
-    stringMap.put("family", Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
-                cell.getFamilyLength()));
-    stringMap.put("qualifier",
-        Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
-            cell.getQualifierLength()));
-    stringMap.put("timestamp", cell.getTimestamp());
-    stringMap.put("vlen", cell.getValueLength());
-    if (cell.getTagsLength() > 0) {
-      List<String> tagsString = new ArrayList<String>();
-      Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-          cell.getTagsLength());
-      while (tagsIterator.hasNext()) {
-        Tag tag = tagsIterator.next();
-        tagsString.add((tag.getType()) + ":"
-            + Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()));
-      }
-      stringMap.put("tag", tagsString);
-    }
-    return stringMap;
+    super(outputValues, outputJSON, sequence, region, row, persistentOutput, out);
   }
 
   public static void main(String[] args) throws IOException {
-    run(args);
+    WALPrettyPrinter.main(args);
   }
 
-  /**
-   * Pass one or more log file names and formatting options and it will dump out
-   * a text version of the contents on <code>stdout</code>.
-   * 
-   * @param args
-   *          Command line arguments
-   * @throws IOException
-   *           Thrown upon file system errors etc.
-   * @throws ParseException
-   *           Thrown if command-line parsing fails.
-   */
-  public static void run(String[] args) throws IOException {
-    // create options
-    Options options = new Options();
-    options.addOption("h", "help", false, "Output help message");
-    options.addOption("j", "json", false, "Output JSON");
-    options.addOption("p", "printvals", false, "Print values");
-    options.addOption("r", "region", true,
-        "Region to filter by. Pass region name; e.g. 'hbase:meta,,1'");
-    options.addOption("s", "sequence", true,
-        "Sequence to filter by. Pass sequence number.");
-    options.addOption("w", "row", true, "Row to filter by. Pass row name.");
-
-    HLogPrettyPrinter printer = new HLogPrettyPrinter();
-    CommandLineParser parser = new PosixParser();
-    List files = null;
-    try {
-      CommandLine cmd = parser.parse(options, args);
-      files = cmd.getArgList();
-      if (files.size() == 0 || cmd.hasOption("h")) {
-        HelpFormatter formatter = new HelpFormatter();
-        formatter.printHelp("HLog <filename...>", options, true);
-        System.exit(-1);
-      }
-      // configure the pretty printer using command line options
-      if (cmd.hasOption("p"))
-        printer.enableValues();
-      if (cmd.hasOption("j"))
-        printer.enableJSON();
-      if (cmd.hasOption("r"))
-        printer.setRegionFilter(cmd.getOptionValue("r"));
-      if (cmd.hasOption("s"))
-        printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
-      if (cmd.hasOption("w"))
-        printer.setRowFilter(cmd.getOptionValue("w"));
-    } catch (ParseException e) {
-      e.printStackTrace();
-      HelpFormatter formatter = new HelpFormatter();
-      formatter.printHelp("HFile filename(s) ", options, true);
-      System.exit(-1);
-    }
-    // get configuration, file system, and process the given files
-    Configuration conf = HBaseConfiguration.create();
-    FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
-
-    // begin output
-    printer.beginPersistentOutput();
-    for (Object f : files) {
-      Path file = new Path((String) f);
-      FileSystem fs = file.getFileSystem(conf);
-      if (!fs.exists(file)) {
-        System.err.println("ERROR, file doesnt exist: " + file);
-        return;
-      }
-      printer.processFile(conf, file);
-    }
-    printer.endPersistentOutput();
-  }
 }


Mime
View raw message