hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From git-site-r...@apache.org
Subject [34/51] [partial] hbase-site git commit: Published site at .
Date Thu, 28 Sep 2017 15:14:42 GMT
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67deb422/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 38d76b6..5478df1 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -90,115 +90,115 @@
 <span class="sourceLineNo">082</span>import org.apache.hadoop.io.SequenceFile;<a name="line.82"></a>
 <span class="sourceLineNo">083</span>import org.apache.hadoop.io.Text;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>import org.apache.hadoop.mapreduce.Job;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.mapreduce.OutputFormat;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.mapreduce.RecordWriter;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.91"></a>
-<span class="sourceLineNo">092</span><a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>/**<a name="line.95"></a>
-<span class="sourceLineNo">096</span> * Writes HFiles. Passed Cells must arrive in order.<a name="line.96"></a>
-<span class="sourceLineNo">097</span> * Writes current time as the sequence id for the file. Sets the major compacted<a name="line.97"></a>
-<span class="sourceLineNo">098</span> * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * all HFiles being written.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> * &lt;p&gt;<a name="line.100"></a>
-<span class="sourceLineNo">101</span> * Using this class as part of a MapReduce job is best done<a name="line.101"></a>
-<span class="sourceLineNo">102</span> * using {@link #configureIncrementalLoad(Job, TableDescriptor, RegionLocator)}.<a name="line.102"></a>
-<span class="sourceLineNo">103</span> */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>@InterfaceAudience.Public<a name="line.104"></a>
-<span class="sourceLineNo">105</span>public class HFileOutputFormat2<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    extends FileOutputFormat&lt;ImmutableBytesWritable, Cell&gt; {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  static class TableInfo {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    private TableDescriptor tableDesctiptor;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    private RegionLocator regionLocator;<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>    public TableInfo(TableDescriptor tableDesctiptor, RegionLocator regionLocator) {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      this.tableDesctiptor = tableDesctiptor;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      this.regionLocator = regionLocator;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    /**<a name="line.117"></a>
-<span class="sourceLineNo">118</span>     * The modification for the returned HTD doesn't affect the inner TD.<a name="line.118"></a>
-<span class="sourceLineNo">119</span>     * @return A clone of inner table descriptor<a name="line.119"></a>
-<span class="sourceLineNo">120</span>     * @deprecated use {@link #getTableDescriptor}<a name="line.120"></a>
-<span class="sourceLineNo">121</span>     */<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    @Deprecated<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    public HTableDescriptor getHTableDescriptor() {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      return new HTableDescriptor(tableDesctiptor);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>    public TableDescriptor getTableDescriptor() {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      return tableDesctiptor;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>    public RegionLocator getRegionLocator() {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      return regionLocator;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>  protected static final byte[] tableSeparator = ";".getBytes(StandardCharsets.UTF_8);<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  protected static byte[] combineTableNameSuffix(byte[] tableName,<a name="line.138"></a>
-<span class="sourceLineNo">139</span>                                       byte[] suffix ) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    return Bytes.add(tableName, tableSeparator, suffix);<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  // The following constants are private since these are used by<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  // HFileOutputFormat2 to internally transfer data between job setup and<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  // reducer run using conf.<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  // These should not be changed by the client.<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  static final String COMPRESSION_FAMILIES_CONF_KEY =<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      "hbase.hfileoutputformat.families.compression";<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  static final String BLOOM_TYPE_FAMILIES_CONF_KEY =<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      "hbase.hfileoutputformat.families.bloomtype";<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  static final String BLOCK_SIZE_FAMILIES_CONF_KEY =<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      "hbase.mapreduce.hfileoutputformat.blocksize";<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY =<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>  // This constant is public since the client can modify this when setting<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // up their conf object and thus refer to this symbol.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  // It is present for backwards compatibility reasons. Use it only to<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  // override the auto-detection of datablock encoding.<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY =<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      "hbase.mapreduce.hfileoutputformat.datablock.encoding";<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>  /**<a name="line.163"></a>
-<span class="sourceLineNo">164</span>   * Keep locality while generating HFiles for bulkload. See HBASE-12596<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   */<a name="line.165"></a>
-<span class="sourceLineNo">166</span>  public static final String LOCALITY_SENSITIVE_CONF_KEY =<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      "hbase.bulkload.locality.sensitive.enabled";<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private static final boolean DEFAULT_LOCALITY_SENSITIVE = true;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  static final String OUTPUT_TABLE_NAME_CONF_KEY =<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      "hbase.mapreduce.hfileoutputformat.table.name";<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  static final String MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY =<a name="line.171"></a>
-<span class="sourceLineNo">172</span>          "hbase.mapreduce.use.multi.table.hfileoutputformat";<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  public static final String STORAGE_POLICY_PROPERTY = "hbase.hstore.storagepolicy";<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public static final String STORAGE_POLICY_PROPERTY_CF_PREFIX = STORAGE_POLICY_PROPERTY + ".";<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  @Override<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public RecordWriter&lt;ImmutableBytesWritable, Cell&gt; getRecordWriter(<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      final TaskAttemptContext context) throws IOException, InterruptedException {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return createRecordWriter(context);<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  protected static byte[] getTableNameSuffixedWithFamily(byte[] tableName, byte[] family) {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return combineTableNameSuffix(tableName, family);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  static &lt;V extends Cell&gt; RecordWriter&lt;ImmutableBytesWritable, V&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      createRecordWriter(final TaskAttemptContext context)<a name="line.188"></a>
-<span class="sourceLineNo">189</span>          throws IOException {<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>    // Get the path of the temporary output file<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    final Path outputPath = FileOutputFormat.getOutputPath(context);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    final Path outputDir = new FileOutputCommitter(outputPath, context).getWorkPath();<a name="line.193"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.mapreduce.OutputCommitter;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.mapreduce.OutputFormat;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.mapreduce.RecordWriter;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
+<span class="sourceLineNo">095</span><a name="line.95"></a>
+<span class="sourceLineNo">096</span>/**<a name="line.96"></a>
+<span class="sourceLineNo">097</span> * Writes HFiles. Passed Cells must arrive in order.<a name="line.97"></a>
+<span class="sourceLineNo">098</span> * Writes current time as the sequence id for the file. Sets the major compacted<a name="line.98"></a>
+<span class="sourceLineNo">099</span> * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll<a name="line.99"></a>
+<span class="sourceLineNo">100</span> * all HFiles being written.<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * &lt;p&gt;<a name="line.101"></a>
+<span class="sourceLineNo">102</span> * Using this class as part of a MapReduce job is best done<a name="line.102"></a>
+<span class="sourceLineNo">103</span> * using {@link #configureIncrementalLoad(Job, TableDescriptor, RegionLocator)}.<a name="line.103"></a>
+<span class="sourceLineNo">104</span> */<a name="line.104"></a>
+<span class="sourceLineNo">105</span>@InterfaceAudience.Public<a name="line.105"></a>
+<span class="sourceLineNo">106</span>public class HFileOutputFormat2<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    extends FileOutputFormat&lt;ImmutableBytesWritable, Cell&gt; {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  static class TableInfo {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    private TableDescriptor tableDesctiptor;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    private RegionLocator regionLocator;<a name="line.111"></a>
+<span class="sourceLineNo">112</span><a name="line.112"></a>
+<span class="sourceLineNo">113</span>    public TableInfo(TableDescriptor tableDesctiptor, RegionLocator regionLocator) {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      this.tableDesctiptor = tableDesctiptor;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      this.regionLocator = regionLocator;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>    /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>     * The modification for the returned HTD doesn't affect the inner TD.<a name="line.119"></a>
+<span class="sourceLineNo">120</span>     * @return A clone of inner table descriptor<a name="line.120"></a>
+<span class="sourceLineNo">121</span>     * @deprecated use {@link #getTableDescriptor}<a name="line.121"></a>
+<span class="sourceLineNo">122</span>     */<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    @Deprecated<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    public HTableDescriptor getHTableDescriptor() {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      return new HTableDescriptor(tableDesctiptor);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    public TableDescriptor getTableDescriptor() {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      return tableDesctiptor;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>    public RegionLocator getRegionLocator() {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      return regionLocator;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>  protected static final byte[] tableSeparator = ";".getBytes(StandardCharsets.UTF_8);<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  protected static byte[] combineTableNameSuffix(byte[] tableName,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>                                       byte[] suffix ) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    return Bytes.add(tableName, tableSeparator, suffix);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>  // The following constants are private since these are used by<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  // HFileOutputFormat2 to internally transfer data between job setup and<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  // reducer run using conf.<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  // These should not be changed by the client.<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  static final String COMPRESSION_FAMILIES_CONF_KEY =<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      "hbase.hfileoutputformat.families.compression";<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  static final String BLOOM_TYPE_FAMILIES_CONF_KEY =<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      "hbase.hfileoutputformat.families.bloomtype";<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  static final String BLOCK_SIZE_FAMILIES_CONF_KEY =<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      "hbase.mapreduce.hfileoutputformat.blocksize";<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY =<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  // This constant is public since the client can modify this when setting<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  // up their conf object and thus refer to this symbol.<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  // It is present for backwards compatibility reasons. Use it only to<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  // override the auto-detection of datablock encoding.<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY =<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      "hbase.mapreduce.hfileoutputformat.datablock.encoding";<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
+<span class="sourceLineNo">165</span>   * Keep locality while generating HFiles for bulkload. See HBASE-12596<a name="line.165"></a>
+<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  public static final String LOCALITY_SENSITIVE_CONF_KEY =<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      "hbase.bulkload.locality.sensitive.enabled";<a name="line.168"></a>
+<span class="sourceLineNo">169</span>  private static final boolean DEFAULT_LOCALITY_SENSITIVE = true;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  static final String OUTPUT_TABLE_NAME_CONF_KEY =<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      "hbase.mapreduce.hfileoutputformat.table.name";<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  static final String MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY =<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          "hbase.mapreduce.use.multi.table.hfileoutputformat";<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  public static final String STORAGE_POLICY_PROPERTY = "hbase.hstore.storagepolicy";<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  public static final String STORAGE_POLICY_PROPERTY_CF_PREFIX = STORAGE_POLICY_PROPERTY + ".";<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  @Override<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  public RecordWriter&lt;ImmutableBytesWritable, Cell&gt; getRecordWriter(<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      final TaskAttemptContext context) throws IOException, InterruptedException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    return createRecordWriter(context, this.getOutputCommitter(context));<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>  protected static byte[] getTableNameSuffixedWithFamily(byte[] tableName, byte[] family) {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    return combineTableNameSuffix(tableName, family);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  static &lt;V extends Cell&gt; RecordWriter&lt;ImmutableBytesWritable, V&gt;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      createRecordWriter(final TaskAttemptContext context, final OutputCommitter committer)<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          throws IOException {<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>    // Get the path of the temporary output file<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    final Path outputDir = ((FileOutputCommitter)committer).getWorkPath();<a name="line.193"></a>
 <span class="sourceLineNo">194</span>    final Configuration conf = context.getConfiguration();<a name="line.194"></a>
 <span class="sourceLineNo">195</span>    final boolean writeMultipleTables = conf.getBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, false) ;<a name="line.195"></a>
 <span class="sourceLineNo">196</span>    final String writeTableNames = conf.get(OUTPUT_TABLE_NAME_CONF_KEY);<a name="line.196"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/67deb422/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
index bb1b9b6..2ff9932 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.html
@@ -26,195 +26,205 @@
 <span class="sourceLineNo">018</span><a name="line.18"></a>
 <span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import org.apache.hadoop.fs.Path;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.HTableDescriptor;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.client.Result;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.client.metrics.ScanMetrics;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.io.Writable;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.mapreduce.Job;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.36"></a>
-<span class="sourceLineNo">037</span><a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.io.DataInput;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.io.DataOutput;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import java.io.IOException;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import java.lang.reflect.Method;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import java.util.ArrayList;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import java.util.List;<a name="line.43"></a>
+<span class="sourceLineNo">021</span>import java.io.DataInput;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.DataOutput;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.lang.reflect.Method;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.util.ArrayList;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.List;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.fs.Path;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HTableDescriptor;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.Result;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.metrics.ScanMetrics;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.io.Writable;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.mapreduce.Job;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.43"></a>
 <span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * TableSnapshotInputFormat allows a MapReduce job to run over a table snapshot. The job<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * bypasses HBase servers, and directly accesses the underlying files (hfile, recovered edits,<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * wals, etc) directly to provide maximum performance. The snapshot is not required to be<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * restored to the live cluster or cloned. This also allows to run the mapreduce job from an<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * online or offline hbase cluster. The snapshot files can be exported by using the<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * {@link org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, to a pure-hdfs cluster,<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * and this InputFormat can be used to run the mapreduce job directly over the snapshot files.<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * The snapshot should not be deleted while there are jobs reading from snapshot files.<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * &lt;p&gt;<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * Usage is similar to TableInputFormat, and<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * {@link TableMapReduceUtil#initTableSnapshotMapperJob(String, Scan, Class, Class, Class, Job, boolean, Path)}<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * can be used to configure the job.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * &lt;pre&gt;{@code<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * Job job = new Job(conf);<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * Scan scan = new Scan();<a name="line.60"></a>
-<span class="sourceLineNo">061</span> * TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,<a name="line.61"></a>
-<span class="sourceLineNo">062</span> *      scan, MyTableMapper.class, MyMapKeyOutput.class,<a name="line.62"></a>
-<span class="sourceLineNo">063</span> *      MyMapOutputValueWritable.class, job, true);<a name="line.63"></a>
-<span class="sourceLineNo">064</span> * }<a name="line.64"></a>
-<span class="sourceLineNo">065</span> * &lt;/pre&gt;<a name="line.65"></a>
-<span class="sourceLineNo">066</span> * &lt;p&gt;<a name="line.66"></a>
-<span class="sourceLineNo">067</span> * Internally, this input format restores the snapshot into the given tmp directory. Similar to<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * {@link TableInputFormat} an InputSplit is created per region. The region is opened for reading<a name="line.68"></a>
-<span class="sourceLineNo">069</span> * from each RecordReader. An internal RegionScanner is used to execute the<a name="line.69"></a>
-<span class="sourceLineNo">070</span> * {@link org.apache.hadoop.hbase.CellScanner} obtained from the user.<a name="line.70"></a>
-<span class="sourceLineNo">071</span> * &lt;p&gt;<a name="line.71"></a>
-<span class="sourceLineNo">072</span> * HBase owns all the data and snapshot files on the filesystem. Only the 'hbase' user can read from<a name="line.72"></a>
-<span class="sourceLineNo">073</span> * snapshot files and data files.<a name="line.73"></a>
-<span class="sourceLineNo">074</span> * To read from snapshot files directly from the file system, the user who is running the MR job<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * must have sufficient permissions to access snapshot and reference files.<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * This means that to run mapreduce over snapshot files, the MR job has to be run as the HBase<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * user or the user must have group or other privileges in the filesystem (See HBASE-8369).<a name="line.77"></a>
-<span class="sourceLineNo">078</span> * Note that, given other users access to read from snapshot/data files will completely circumvent<a name="line.78"></a>
-<span class="sourceLineNo">079</span> * the access control enforced by HBase.<a name="line.79"></a>
-<span class="sourceLineNo">080</span> * @see org.apache.hadoop.hbase.client.TableSnapshotScanner<a name="line.80"></a>
-<span class="sourceLineNo">081</span> */<a name="line.81"></a>
-<span class="sourceLineNo">082</span>@InterfaceAudience.Public<a name="line.82"></a>
-<span class="sourceLineNo">083</span>public class TableSnapshotInputFormat extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.83"></a>
-<span class="sourceLineNo">084</span><a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public static class TableSnapshotRegionSplit extends InputSplit implements Writable {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    private TableSnapshotInputFormatImpl.InputSplit delegate;<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>    // constructor for mapreduce framework / Writable<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    public TableSnapshotRegionSplit() {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>      this.delegate = new TableSnapshotInputFormatImpl.InputSplit();<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    }<a name="line.91"></a>
-<span class="sourceLineNo">092</span><a name="line.92"></a>
-<span class="sourceLineNo">093</span>    public TableSnapshotRegionSplit(TableSnapshotInputFormatImpl.InputSplit delegate) {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>      this.delegate = delegate;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    }<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>    public TableSnapshotRegionSplit(HTableDescriptor htd, HRegionInfo regionInfo,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        List&lt;String&gt; locations, Scan scan, Path restoreDir) {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      this.delegate =<a name="line.99"></a>
-<span class="sourceLineNo">100</span>          new TableSnapshotInputFormatImpl.InputSplit(htd, regionInfo, locations, scan, restoreDir);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    }<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>    @Override<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    public long getLength() throws IOException, InterruptedException {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      return delegate.getLength();<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    }<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>    @Override<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    public String[] getLocations() throws IOException, InterruptedException {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return delegate.getLocations();<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    @Override<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    public void write(DataOutput out) throws IOException {<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      delegate.write(out);<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
-<span class="sourceLineNo">117</span><a name="line.117"></a>
-<span class="sourceLineNo">118</span>    @Override<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    public void readFields(DataInput in) throws IOException {<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      delegate.readFields(in);<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    }<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    public HRegionInfo getRegionInfo() {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      return delegate.getRegionInfo();<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>  @VisibleForTesting<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  static class TableSnapshotRegionRecordReader extends<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      RecordReader&lt;ImmutableBytesWritable, Result&gt; {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    private TableSnapshotInputFormatImpl.RecordReader delegate =<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      new TableSnapshotInputFormatImpl.RecordReader();<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    private TaskAttemptContext context;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    private Method getCounter;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>    @Override<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException,<a name="line.138"></a>
-<span class="sourceLineNo">139</span>        InterruptedException {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      this.context = context;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      getCounter = TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      delegate.initialize(<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        ((TableSnapshotRegionSplit) split).delegate,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        context.getConfiguration());<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    }<a name="line.145"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;<a name="line.45"></a>
+<span class="sourceLineNo">046</span><a name="line.46"></a>
+<span class="sourceLineNo">047</span>/**<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * TableSnapshotInputFormat allows a MapReduce job to run over a table snapshot. The job<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * bypasses HBase servers, and directly accesses the underlying files (hfile, recovered edits,<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * wals, etc) directly to provide maximum performance. The snapshot is not required to be<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * restored to the live cluster or cloned. This also allows to run the mapreduce job from an<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * online or offline hbase cluster. The snapshot files can be exported by using the<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * {@link org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, to a pure-hdfs cluster,<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * and this InputFormat can be used to run the mapreduce job directly over the snapshot files.<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * The snapshot should not be deleted while there are jobs reading from snapshot files.<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * &lt;p&gt;<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * Usage is similar to TableInputFormat, and<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * {@link TableMapReduceUtil#initTableSnapshotMapperJob(String, Scan, Class, Class, Class, Job, boolean, Path)}<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * can be used to configure the job.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * &lt;pre&gt;{@code<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * Job job = new Job(conf);<a name="line.61"></a>
+<span class="sourceLineNo">062</span> * Scan scan = new Scan();<a name="line.62"></a>
+<span class="sourceLineNo">063</span> * TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,<a name="line.63"></a>
+<span class="sourceLineNo">064</span> *      scan, MyTableMapper.class, MyMapKeyOutput.class,<a name="line.64"></a>
+<span class="sourceLineNo">065</span> *      MyMapOutputValueWritable.class, job, true);<a name="line.65"></a>
+<span class="sourceLineNo">066</span> * }<a name="line.66"></a>
+<span class="sourceLineNo">067</span> * &lt;/pre&gt;<a name="line.67"></a>
+<span class="sourceLineNo">068</span> * &lt;p&gt;<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * Internally, this input format restores the snapshot into the given tmp directory. Similar to<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * {@link TableInputFormat} an InputSplit is created per region. The region is opened for reading<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * from each RecordReader. An internal RegionScanner is used to execute the<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * {@link org.apache.hadoop.hbase.CellScanner} obtained from the user.<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * &lt;p&gt;<a name="line.73"></a>
+<span class="sourceLineNo">074</span> * HBase owns all the data and snapshot files on the filesystem. Only the 'hbase' user can read from<a name="line.74"></a>
+<span class="sourceLineNo">075</span> * snapshot files and data files.<a name="line.75"></a>
+<span class="sourceLineNo">076</span> * To read from snapshot files directly from the file system, the user who is running the MR job<a name="line.76"></a>
+<span class="sourceLineNo">077</span> * must have sufficient permissions to access snapshot and reference files.<a name="line.77"></a>
+<span class="sourceLineNo">078</span> * This means that to run mapreduce over snapshot files, the MR job has to be run as the HBase<a name="line.78"></a>
+<span class="sourceLineNo">079</span> * user or the user must have group or other privileges in the filesystem (See HBASE-8369).<a name="line.79"></a>
+<span class="sourceLineNo">080</span> * Note that, given other users access to read from snapshot/data files will completely circumvent<a name="line.80"></a>
+<span class="sourceLineNo">081</span> * the access control enforced by HBase.<a name="line.81"></a>
+<span class="sourceLineNo">082</span> * @see org.apache.hadoop.hbase.client.TableSnapshotScanner<a name="line.82"></a>
+<span class="sourceLineNo">083</span> */<a name="line.83"></a>
+<span class="sourceLineNo">084</span>@InterfaceAudience.Public<a name="line.84"></a>
+<span class="sourceLineNo">085</span>public class TableSnapshotInputFormat extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static class TableSnapshotRegionSplit extends InputSplit implements Writable {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    private TableSnapshotInputFormatImpl.InputSplit delegate;<a name="line.88"></a>
+<span class="sourceLineNo">089</span><a name="line.89"></a>
+<span class="sourceLineNo">090</span>    // constructor for mapreduce framework / Writable<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    public TableSnapshotRegionSplit() {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      this.delegate = new TableSnapshotInputFormatImpl.InputSplit();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>    public TableSnapshotRegionSplit(TableSnapshotInputFormatImpl.InputSplit delegate) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      this.delegate = delegate;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>    public TableSnapshotRegionSplit(HTableDescriptor htd, HRegionInfo regionInfo,<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        List&lt;String&gt; locations, Scan scan, Path restoreDir) {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      this.delegate =<a name="line.101"></a>
+<span class="sourceLineNo">102</span>          new TableSnapshotInputFormatImpl.InputSplit(htd, regionInfo, locations, scan, restoreDir);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    }<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>    @Override<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    public long getLength() throws IOException, InterruptedException {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      return delegate.getLength();<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    }<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>    @Override<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    public String[] getLocations() throws IOException, InterruptedException {<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      return delegate.getLocations();<a name="line.112"></a>
+<span class="sourceLineNo">113</span>    }<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>    @Override<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    public void write(DataOutput out) throws IOException {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      delegate.write(out);<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>    @Override<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    public void readFields(DataInput in) throws IOException {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      delegate.readFields(in);<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    }<a name="line.123"></a>
+<span class="sourceLineNo">124</span><a name="line.124"></a>
+<span class="sourceLineNo">125</span>    /**<a name="line.125"></a>
+<span class="sourceLineNo">126</span>     * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0<a name="line.126"></a>
+<span class="sourceLineNo">127</span>     *             Use {@link #getRegion()}<a name="line.127"></a>
+<span class="sourceLineNo">128</span>     */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    @Deprecated<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    public HRegionInfo getRegionInfo() {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      return delegate.getRegionInfo();<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>    public RegionInfo getRegion() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      return delegate.getRegionInfo();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  @VisibleForTesting<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  static class TableSnapshotRegionRecordReader extends<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      RecordReader&lt;ImmutableBytesWritable, Result&gt; {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    private TableSnapshotInputFormatImpl.RecordReader delegate =<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      new TableSnapshotInputFormatImpl.RecordReader();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    private TaskAttemptContext context;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    private Method getCounter;<a name="line.145"></a>
 <span class="sourceLineNo">146</span><a name="line.146"></a>
 <span class="sourceLineNo">147</span>    @Override<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    public boolean nextKeyValue() throws IOException, InterruptedException {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      boolean result = delegate.nextKeyValue();<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      if (result) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        if (scanMetrics != null &amp;&amp; context != null) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>          TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      }<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      return result;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    @Override<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      return delegate.getCurrentKey();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>    @Override<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    public Result getCurrentValue() throws IOException, InterruptedException {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      return delegate.getCurrentValue();<a name="line.166"></a>
+<span class="sourceLineNo">148</span>    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException,<a name="line.148"></a>
+<span class="sourceLineNo">149</span>        InterruptedException {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      this.context = context;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      getCounter = TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      delegate.initialize(<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        ((TableSnapshotRegionSplit) split).delegate,<a name="line.153"></a>
+<span class="sourceLineNo">154</span>        context.getConfiguration());<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>    @Override<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    public boolean nextKeyValue() throws IOException, InterruptedException {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      boolean result = delegate.nextKeyValue();<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      if (result) {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>        ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics();<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        if (scanMetrics != null &amp;&amp; context != null) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      return result;<a name="line.166"></a>
 <span class="sourceLineNo">167</span>    }<a name="line.167"></a>
 <span class="sourceLineNo">168</span><a name="line.168"></a>
 <span class="sourceLineNo">169</span>    @Override<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    public float getProgress() throws IOException, InterruptedException {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      return delegate.getProgress();<a name="line.171"></a>
+<span class="sourceLineNo">170</span>    public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException {<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      return delegate.getCurrentKey();<a name="line.171"></a>
 <span class="sourceLineNo">172</span>    }<a name="line.172"></a>
 <span class="sourceLineNo">173</span><a name="line.173"></a>
 <span class="sourceLineNo">174</span>    @Override<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    public void close() throws IOException {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      delegate.close();<a name="line.176"></a>
+<span class="sourceLineNo">175</span>    public Result getCurrentValue() throws IOException, InterruptedException {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      return delegate.getCurrentValue();<a name="line.176"></a>
 <span class="sourceLineNo">177</span>    }<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>  @Override<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  public RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      InputSplit split, TaskAttemptContext context) throws IOException {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    return new TableSnapshotRegionRecordReader();<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  @Override<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  public List&lt;InputSplit&gt; getSplits(JobContext job) throws IOException, InterruptedException {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    List&lt;InputSplit&gt; results = new ArrayList&lt;&gt;();<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    for (TableSnapshotInputFormatImpl.InputSplit split :<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        TableSnapshotInputFormatImpl.getSplits(job.getConfiguration())) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      results.add(new TableSnapshotRegionSplit(split));<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return results;<a name="line.193"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>    @Override<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    public float getProgress() throws IOException, InterruptedException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      return delegate.getProgress();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    @Override<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    public void close() throws IOException {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      delegate.close();<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span>  @Override<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  public RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      InputSplit split, TaskAttemptContext context) throws IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    return new TableSnapshotRegionRecordReader();<a name="line.193"></a>
 <span class="sourceLineNo">194</span>  }<a name="line.194"></a>
 <span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  /**<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * Configures the job to use TableSnapshotInputFormat to read from a snapshot.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * @param job the job to configure<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param snapshotName the name of the snapshot to read from<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param restoreDir a temporary directory to restore the snapshot into. Current user should<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * After the job is finished, restoreDir can be deleted.<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @throws IOException if an error occurs<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  public static void setInput(Job job, String snapshotName, Path restoreDir)<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      throws IOException {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, restoreDir);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>  }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>}<a name="line.209"></a>
+<span class="sourceLineNo">196</span>  @Override<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public List&lt;InputSplit&gt; getSplits(JobContext job) throws IOException, InterruptedException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    List&lt;InputSplit&gt; results = new ArrayList&lt;&gt;();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    for (TableSnapshotInputFormatImpl.InputSplit split :<a name="line.199"></a>
+<span class="sourceLineNo">200</span>        TableSnapshotInputFormatImpl.getSplits(job.getConfiguration())) {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      results.add(new TableSnapshotRegionSplit(split));<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    return results;<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * Configures the job to use TableSnapshotInputFormat to read from a snapshot.<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * @param job the job to configure<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   * @param snapshotName the name of the snapshot to read from<a name="line.209"></a>
+<span class="sourceLineNo">210</span>   * @param restoreDir a temporary directory to restore the snapshot into. Current user should<a name="line.210"></a>
+<span class="sourceLineNo">211</span>   * have write permissions to this directory, and this should not be a subdirectory of rootdir.<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   * After the job is finished, restoreDir can be deleted.<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   * @throws IOException if an error occurs<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  public static void setInput(Job job, String snapshotName, Path restoreDir)<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      throws IOException {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, restoreDir);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>}<a name="line.219"></a>
 
 
 


Mime
View raw message