hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From git-site-r...@apache.org
Subject [hbase-site] branch asf-site updated: Published site at 77229c79e36d72fb0f1a85a80b6814e2ece1e81c.
Date Fri, 31 Jan 2020 14:42:06 GMT
This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 439c2ae  Published site at 77229c79e36d72fb0f1a85a80b6814e2ece1e81c.
439c2ae is described below

commit 439c2ae36073aae164958bc50ac796f4953f8a24
Author: jenkins <builds@apache.org>
AuthorDate: Fri Jan 31 14:41:43 2020 +0000

    Published site at 77229c79e36d72fb0f1a85a80b6814e2ece1e81c.
---
 acid-semantics.html                                |    2 +-
 apache_hbase_reference_guide.pdf                   |    4 +-
 book.html                                          |    2 +-
 bulk-loads.html                                    |    2 +-
 checkstyle-aggregate.html                          |   18 +-
 coc.html                                           |    2 +-
 dependencies.html                                  |    2 +-
 dependency-convergence.html                        |    2 +-
 dependency-info.html                               |    2 +-
 dependency-management.html                         |    2 +-
 devapidocs/constant-values.html                    |   26 +-
 devapidocs/index-all.html                          |   21 +
 .../apache/hadoop/hbase/backup/package-tree.html   |    2 +-
 .../apache/hadoop/hbase/client/package-tree.html   |   20 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |    6 +-
 .../hadoop/hbase/hbtop/terminal/package-tree.html  |    2 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |    2 +-
 .../compress/class-use/Compression.Algorithm.html  |   12 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.html  |  259 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    4 +-
 .../org/apache/hadoop/hbase/ipc/package-tree.html  |    2 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |    4 +-
 .../hbase/master/assignment/package-tree.html      |    2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |    4 +-
 .../hbase/master/procedure/package-tree.html       |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   18 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    4 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |    4 +-
 .../hadoop/hbase/regionserver/HMobStore.html       |    2 +-
 .../regionserver/HStore.StoreFlusherImpl.html      |   34 +-
 .../apache/hadoop/hbase/regionserver/HStore.html   |  490 +--
 .../regionserver/class-use/StoreFileWriter.html    |   12 +-
 .../compactions/Compactor.FileDetails.html         |   18 +-
 .../Compactor.InternalScannerFactory.html          |    6 +-
 .../hbase/regionserver/compactions/Compactor.html  |   26 +-
 .../hadoop/hbase/regionserver/package-tree.html    |   16 +-
 .../regionserver/querymatcher/package-tree.html    |    2 +-
 .../hbase/regionserver/wal/package-tree.html       |    2 +-
 .../hadoop/hbase/replication/package-tree.html     |    2 +-
 .../hadoop/hbase/rest/model/package-tree.html      |    2 +-
 .../hadoop/hbase/security/access/package-tree.html |    4 +-
 .../apache/hadoop/hbase/security/package-tree.html |    2 +-
 .../apache/hadoop/hbase/thrift/package-tree.html   |    4 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |    6 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 .../src-html/org/apache/hadoop/hbase/Version.html  |    4 +-
 .../apache/hadoop/hbase/io/hfile/CacheConfig.html  |  680 ++--
 .../regionserver/HStore.StoreFlusherImpl.html      | 3434 ++++++++++----------
 .../apache/hadoop/hbase/regionserver/HStore.html   | 3434 ++++++++++----------
 .../compactions/Compactor.CellSinkFactory.html     |  743 ++---
 .../compactions/Compactor.FileDetails.html         |  743 ++---
 .../Compactor.InternalScannerFactory.html          |  743 ++---
 .../hbase/regionserver/compactions/Compactor.html  |  743 ++---
 downloads.html                                     |    2 +-
 export_control.html                                |    2 +-
 index.html                                         |    2 +-
 issue-tracking.html                                |    2 +-
 mail-lists.html                                    |    2 +-
 metrics.html                                       |    2 +-
 old_news.html                                      |    2 +-
 plugin-management.html                             |    2 +-
 plugins.html                                       |    2 +-
 poweredbyhbase.html                                |    2 +-
 project-info.html                                  |    2 +-
 project-reports.html                               |    2 +-
 project-summary.html                               |    2 +-
 pseudo-distributed.html                            |    2 +-
 replication.html                                   |    2 +-
 resources.html                                     |    2 +-
 source-repository.html                             |    2 +-
 sponsors.html                                      |    2 +-
 supportingprojects.html                            |    2 +-
 team-list.html                                     |    2 +-
 testdevapidocs/constant-values.html                |   14 +
 testdevapidocs/index-all.html                      |    8 +-
 ...Fencing.BlockCompactionsInCompletionHStore.html |    2 +-
 .../hadoop/hbase/client/TestFromClientSide.html    |   60 +-
 .../TestFromClientSideScanExcpetion.MyHStore.html  |    2 +-
 ...sEndpoint.HStoreWithFaultyRefreshHFilesAPI.html |    2 +-
 .../hfile/TestCacheOnWrite.CacheOnWriteType.html   |   22 +-
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.html    |  102 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   10 +-
 .../hadoop/hbase/procedure/package-tree.html       |    8 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    4 +-
 .../regionserver/TestHRegion.HStoreForTesting.html |    2 +-
 .../hbase/regionserver/TestHStore.MyStore.html     |    2 +-
 .../compactions/TestDateTieredCompactor.html       |    8 +-
 .../TestStripeCompactionPolicy.Scanner.html        |   10 +-
 .../hadoop/hbase/regionserver/package-tree.html    |    4 +-
 .../org/apache/hadoop/hbase/test/package-tree.html |    2 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 .../hadoop/hbase/client/TestFromClientSide.html    | 2926 ++++++++---------
 .../hfile/TestCacheOnWrite.CacheOnWriteType.html   |  853 ++---
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.html    |  853 ++---
 .../compactions/TestDateTieredCompactor.html       |  134 +-
 .../TestStripeCompactionPolicy.Scanner.html        |   83 +-
 .../compactions/TestStripeCompactionPolicy.html    |   83 +-
 .../compactions/TestStripeCompactor.html           |   40 +-
 99 files changed, 8671 insertions(+), 8193 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index ab2205d..d3cd273 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -467,7 +467,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 93788b2..4a0ddf8 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.rc.2, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20200130143155+00'00')
-/CreationDate (D:20200130144245+00'00')
+/ModDate (D:20200131143022+00'00')
+/CreationDate (D:20200131143858+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index 7ccc3fb..572a59a 100644
--- a/book.html
+++ b/book.html
@@ -44444,7 +44444,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2020-01-30 14:31:55 UTC
+Last updated 2020-01-31 14:30:22 UTC
 </div>
 </div>
 </body>
diff --git a/bulk-loads.html b/bulk-loads.html
index 2cf2c83..2c14ce8 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -172,7 +172,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 6367ad8..345f0d7 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -19726,7 +19726,7 @@
 <td>coding</td>
 <td>InnerAssignment</td>
 <td>Inner assignments should be avoided.</td>
-<td>5502</td></tr></table></div>
+<td>5504</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.client.TestFromClientSide3.java">org/apache/hadoop/hbase/client/TestFromClientSide3.java</h3>
 <table border="0" class="table table-striped">
@@ -32674,7 +32674,7 @@
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>199</td></tr></table></div>
+<td>213</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.io.hfile.CacheStats.java">org/apache/hadoop/hbase/io/hfile/CacheStats.java</h3>
 <table border="0" class="table table-striped">
@@ -33883,13 +33883,13 @@
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 106).</td>
-<td>185</td></tr>
+<td>189</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization lcurly' has incorrect indentation level 8, expected level should be one of the following: 4, 6.</td>
-<td>193</td></tr></table></div>
+<td>197</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.io.hfile.TestCachedBlockQueue.java">org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java</h3>
 <table border="0" class="table table-striped">
@@ -64081,19 +64081,19 @@
 <td>coding</td>
 <td>InnerAssignment</td>
 <td>Inner assignments should be avoided.</td>
-<td>193</td></tr>
+<td>199</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 107).</td>
-<td>200</td></tr>
+<td>206</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>234</td></tr></table></div>
+<td>240</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.regionserver.compactions.CurrentHourProvider.java">org/apache/hadoop/hbase/regionserver/compactions/CurrentHourProvider.java</h3>
 <table border="0" class="table table-striped">
@@ -64462,7 +64462,7 @@
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>819</td></tr></table></div>
+<td>822</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler.java">org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java</h3>
 <table border="0" class="table table-striped">
@@ -84994,7 +84994,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/coc.html b/coc.html
index 1749513..d5d5ba8 100644
--- a/coc.html
+++ b/coc.html
@@ -241,7 +241,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index 8559c22..8bf3fa0 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -313,7 +313,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 6c48fff..a5067bb 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -558,7 +558,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index 9a2fefe..4cad6cb 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -194,7 +194,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index ea425c1..3cf7497 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -928,7 +928,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-30</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-31</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 382e389..f28f6f1 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -4109,14 +4109,14 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Thu Jan 30 14:39:19 UTC 2020"</code></td>
+<td class="colLast"><code>"Fri Jan 31 14:35:45 UTC 2020"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"8b00f9f0b160a3191889aa3a80478525c8faf4b3"</code></td>
+<td class="colLast"><code>"77229c79e36d72fb0f1a85a80b6814e2ece1e81c"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
@@ -10015,40 +10015,54 @@
 <td class="colLast"><code>"hbase.rs.cachecompactedblocksonwrite"</code></td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY</a></code></td>
+<td class="colLast"><code>"hbase.rs.cachecompactedblocksonwrite.threshold"</code></td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_DATA_BLOCKS_COMPRESSED_KEY">CACHE_DATA_BLOCKS_COMPRESSED_KEY</a></code></td>
 <td class="colLast"><code>"hbase.block.data.cachecompressed"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_DATA_ON_READ_KEY">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_DATA_ON_READ_KEY">CACHE_DATA_ON_READ_KEY</a></code></td>
 <td class="colLast"><code>"hbase.block.data.cacheonread"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_INDEX_BLOCKS_ON_WRITE_KEY">CACHE_INDEX_BLOCKS_ON_WRITE_KEY</a></code></td>
 <td class="colLast"><code>"hfile.block.index.cacheonwrite"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_BLOOMS_ON_WRITE">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_BLOOMS_ON_WRITE">DEFAULT_CACHE_BLOOMS_ON_WRITE</a></code></td>
 <td class="colLast"><code>false</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
 <td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</a></code></td>
 <td class="colLast"><code>false</code></td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;long</code></td>
+<td><code><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD</a></code></td>
+<td class="colLast"><code>9223372036854775807L</code></td>
+</tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_COMPRESSED">
 <!--   -->
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 6a7016d..c382b2e 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -8599,6 +8599,11 @@
 <dd>
 <div class="block">Configuration key to cache blocks when a compacted file is written</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
+<dd>
+<div class="block">Configuration key to determine total size in bytes of compacted files beyond which we do not
+ cache blocks on compaction</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_DATA_BLOCKS_COMPRESSED_KEY">CACHE_DATA_BLOCKS_COMPRESSED_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>
 <div class="block">Configuration key to cache data blocks in compressed and/or encrypted format.</div>
@@ -8714,6 +8719,10 @@
 <dd>
 <div class="block">Whether data blocks should be cached when compacted file is written</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#cacheCompactedDataOnWriteThreshold">cacheCompactedDataOnWriteThreshold</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
+<dd>
+<div class="block">Determine threshold beyond which we do not cache blocks on compaction</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html#cacheConf">cacheConf</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html#cacheConf">cacheConf</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexWriter.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexWriter</a></dt>
@@ -22052,6 +22061,8 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStore.html#createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-">createWriterInTmp(long, Compression.Algorithm, boolean, boolean, boolean, boolean)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStore.html#createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-long-">createWriterInTmp(long, Compression.Algorithm, boolean, boolean, boolean, boolean, long)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tool/CanaryTool.RegionMonitor.html#createWriteTable-int-">createWriteTable(int)</a></span> - Method in class org.apache.hadoop.hbase.tool.<a href="org/apache/hadoop/hbase/tool/CanaryTool.RegionMonitor.html" title="class in org.apache.hadoop.hbase.tool">CanaryTool.RegionMonitor</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#createZNodeRetryCounterFactory">createZNodeRetryCounterFactory</a></span> - Variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
@@ -23640,6 +23651,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_DATA_COMPRESSED">DEFAULT_CACHE_DATA_COMPRESSED</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HColumnDescriptor.html#DEFAULT_CACHE_DATA_IN_L1">DEFAULT_CACHE_DATA_IN_L1</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase">HColumnDescriptor</a></dt>
@@ -37437,6 +37450,10 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rest/model/ScannerModel.html#getCacheBlocks--">getCacheBlocks()</a></span> - Method in class org.apache.hadoop.hbase.rest.model.<a href="org/apache/hadoop/hbase/rest/model/ScannerModel.html" title="class in org.apache.hadoop.hbase.rest.model">ScannerModel</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#getCacheCompactedBlocksOnWriteThreshold--">getCacheCompactedBlocksOnWriteThreshold()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#getCacheCompactedBlocksOnWriteThreshold-org.apache.hadoop.conf.Configuration-">getCacheCompactedBlocksOnWriteThreshold(Configuration)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStoreFile.html#getCacheConf--">getCacheConf()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStore.html#getCacheConfig--">getCacheConfig()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></dt>
@@ -121392,6 +121409,10 @@ service.</div>
 <div class="block">Calculate the number of bytes required to store all the checksums
  for this block.</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html#totalCompactedFilesSize">totalCompactedFilesSize</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.compactions.<a href="org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a></dt>
+<dd>
+<div class="block">Total size of the compacted files</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html#totalCompactedSize">totalCompactedSize</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.compactions.<a href="org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a></dt>
 <dd>
 <div class="block">the total size of data processed by the currently running compaction, in bytes</div>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index ddf4887..75749e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -168,8 +168,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 336c2ef..3c6f09f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -426,21 +426,21 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index e10a205..7298e17 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -190,12 +190,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
index b8f3866..007e90f 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
@@ -107,8 +107,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/KeyPress.Type.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">KeyPress.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index 12ea6aa..2dbe190 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -140,9 +140,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html
index fa0cfd4..78fbe54 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/compress/class-use/Compression.Algorithm.html
@@ -638,6 +638,16 @@ the order they are declared.</div>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-long-">createWriterInTmp</a></span>(long&nbsp;maxKeyCount,
+                 <a href="../../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+                 boolean&nbsp;isCompaction,
+                 boolean&nbsp;includeMVCCReadpoint,
+                 boolean&nbsp;includesTag,
+                 boolean&nbsp;shouldDropBehind,
+                 long&nbsp;totalCompactedFilesSize)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HMobStore.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/HMobStore.html#createWriterInTmp-org.apache.hadoop.hbase.mob.MobFileName-org.apache.hadoop.fs.Path-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-">createWriterInTmp</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/mob/MobFileName.html" title="class in org.apache.hadoop.hbase.mob">Mob [...]
                  org.apache.hadoop.fs.Path&nbsp;basePath,
                  long&nbsp;maxKeyCount,
@@ -646,7 +656,7 @@ the order they are declared.</div>
 <div class="block">Creates the writer for the mob file in temp directory.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HMobStore.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/regionserver/HMobStore.html#createWriterInTmp-java.lang.String-org.apache.hadoop.fs.Path-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-byte:A-boolean-">createWriterInTmp</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a [...]
                  org.apache.hadoop.fs.Path&nbsp;basePath,
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index 5031073..bc80f38 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -159,35 +159,48 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY</a></span></code>
+<div class="block">Configuration key to determine total size in bytes of compacted files beyond which we do not
+ cache blocks on compaction</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_DATA_BLOCKS_COMPRESSED_KEY">CACHE_DATA_BLOCKS_COMPRESSED_KEY</a></span></code>
 <div class="block">Configuration key to cache data blocks in compressed and/or encrypted format.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_DATA_ON_READ_KEY">CACHE_DATA_ON_READ_KEY</a></span></code>
 <div class="block">Configuration key to cache data blocks on read.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CACHE_INDEX_BLOCKS_ON_WRITE_KEY">CACHE_INDEX_BLOCKS_ON_WRITE_KEY</a></span></code>
 <div class="block">Configuration key to cache leaf and intermediate-level index blocks on
  write.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#cacheBloomsOnWrite">cacheBloomsOnWrite</a></span></code>
 <div class="block">Whether compound bloom filter blocks should be cached on write</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#cacheCompactedDataOnWrite">cacheCompactedDataOnWrite</a></span></code>
 <div class="block">Whether data blocks should be cached when compacted file is written</div>
 </td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#cacheCompactedDataOnWriteThreshold">cacheCompactedDataOnWriteThreshold</a></span></code>
+<div class="block">Determine threshold beyond which we do not cache blocks on compaction</div>
+</td>
+</tr>
 <tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#cacheDataCompressed">cacheDataCompressed</a></span></code>
@@ -222,82 +235,86 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code>static long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD</a></span></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_DATA_COMPRESSED">DEFAULT_CACHE_DATA_COMPRESSED</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_DATA_ON_READ">DEFAULT_CACHE_DATA_ON_READ</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_DATA_ON_WRITE">DEFAULT_CACHE_DATA_ON_WRITE</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_CACHE_INDEXES_ON_WRITE">DEFAULT_CACHE_INDEXES_ON_WRITE</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_EVICT_ON_CLOSE">DEFAULT_EVICT_ON_CLOSE</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_IN_MEMORY">DEFAULT_IN_MEMORY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DEFAULT_PREFETCH_ON_OPEN">DEFAULT_PREFETCH_ON_OPEN</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DISABLED">DISABLED</a></span></code>
 <div class="block">Disabled cache configuration</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DROP_BEHIND_CACHE_COMPACTION_DEFAULT">DROP_BEHIND_CACHE_COMPACTION_DEFAULT</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#DROP_BEHIND_CACHE_COMPACTION_KEY">DROP_BEHIND_CACHE_COMPACTION_KEY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#dropBehindCompaction">dropBehindCompaction</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#EVICT_BLOCKS_ON_CLOSE_KEY">EVICT_BLOCKS_ON_CLOSE_KEY</a></span></code>
 <div class="block">Configuration key to evict all blocks of a given file from the block cache
  when the file is closed.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#evictOnClose">evictOnClose</a></span></code>
 <div class="block">Whether blocks of a file should be evicted when the file is closed</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#inMemory">inMemory</a></span></code>
 <div class="block">Whether blocks should be flagged as in-memory when being cached</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#PREFETCH_BLOCKS_ON_OPEN_KEY">PREFETCH_BLOCKS_ON_OPEN_KEY</a></span></code>
 <div class="block">Configuration key to prefetch all blocks of a given file into the block cache
  when the file is opened.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#prefetchOnOpen">prefetchOnOpen</a></span></code>
 <div class="block">Whether data blocks should be prefetched into the cache</div>
@@ -385,24 +402,32 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#getByteBuffAllocator--">getByteBuffAllocator</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i3" class="rowColor">
+<td class="colFirst"><code>long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#getCacheCompactedBlocksOnWriteThreshold--">getCacheCompactedBlocksOnWriteThreshold</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i4" class="altColor">
+<td class="colFirst"><code>private long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#getCacheCompactedBlocksOnWriteThreshold-org.apache.hadoop.conf.Configuration-">getCacheCompactedBlocksOnWriteThreshold</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
+</tr>
+<tr id="i5" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#isCombinedBlockCache--">isCombinedBlockCache</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i4" class="altColor">
+<tr id="i6" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#isInMemory--">isInMemory</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i5" class="rowColor">
+<tr id="i7" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#setCacheDataOnWrite-boolean-">setCacheDataOnWrite</a></span>(boolean&nbsp;cacheDataOnWrite)</code>&nbsp;</td>
 </tr>
-<tr id="i6" class="altColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#setEvictOnClose-boolean-">setEvictOnClose</a></span>(boolean&nbsp;evictOnClose)</code>
 <div class="block">Only used for testing.</div>
 </td>
 </tr>
-<tr id="i7" class="rowColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheBlockOnRead-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory-">shouldCacheBlockOnRead</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</code>
 <div class="block">Should we cache a block of a particular category? We always cache
@@ -410,63 +435,63 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  available.</div>
 </td>
 </tr>
-<tr id="i8" class="altColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheBloomsOnWrite--">shouldCacheBloomsOnWrite</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheCompactedBlocksOnWrite--">shouldCacheCompactedBlocksOnWrite</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheCompressed-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory-">shouldCacheCompressed</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheDataCompressed--">shouldCacheDataCompressed</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheDataOnRead--">shouldCacheDataOnRead</a></span>()</code>
 <div class="block">Returns whether the DATA blocks of this HFile should be cached on read or not (we always
  cache the meta blocks, the INDEX and BLOOM blocks).</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheDataOnWrite--">shouldCacheDataOnWrite</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldCacheIndexesOnWrite--">shouldCacheIndexesOnWrite</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldDropBehindCompaction--">shouldDropBehindCompaction</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldEvictOnClose--">shouldEvictOnClose</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldLockOnCacheMiss-org.apache.hadoop.hbase.io.hfile.BlockType-">shouldLockOnCacheMiss</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>
 <div class="block">If we make sure the block could not be cached, we will not acquire the lock
  otherwise we will acquire lock</div>
 </td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i20" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldPrefetchOnOpen--">shouldPrefetchOnOpen</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i19" class="rowColor">
+<tr id="i21" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#shouldReadBlockFromCache-org.apache.hadoop.hbase.io.hfile.BlockType-">shouldReadBlockFromCache</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</code>
 <div class="block">Return true if we may find this type of block in block cache.</div>
 </td>
 </tr>
-<tr id="i20" class="altColor">
+<tr id="i22" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#toString--">toString</a></span>()</code>&nbsp;</td>
 </tr>
@@ -628,13 +653,28 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
+<a name="CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY</h4>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.94">CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY</a></pre>
+<div class="block">Configuration key to determine total size in bytes of compacted files beyond which we do not
+ cache blocks on compaction</div>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY">Constant Field Values</a></dd>
+</dl>
+</li>
+</ul>
 <a name="DROP_BEHIND_CACHE_COMPACTION_KEY">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>DROP_BEHIND_CACHE_COMPACTION_KEY</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.90">DROP_BEHIND_CACHE_COMPACTION_KEY</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.97">DROP_BEHIND_CACHE_COMPACTION_KEY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DROP_BEHIND_CACHE_COMPACTION_KEY">Constant Field Values</a></dd>
@@ -647,7 +687,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_ON_READ</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.94">DEFAULT_CACHE_DATA_ON_READ</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.101">DEFAULT_CACHE_DATA_ON_READ</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_ON_READ">Constant Field Values</a></dd>
@@ -660,7 +700,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.95">DEFAULT_CACHE_DATA_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.102">DEFAULT_CACHE_DATA_ON_WRITE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_ON_WRITE">Constant Field Values</a></dd>
@@ -673,7 +713,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_IN_MEMORY</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.96">DEFAULT_IN_MEMORY</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.103">DEFAULT_IN_MEMORY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_IN_MEMORY">Constant Field Values</a></dd>
@@ -686,7 +726,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_INDEXES_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.97">DEFAULT_CACHE_INDEXES_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.104">DEFAULT_CACHE_INDEXES_ON_WRITE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_INDEXES_ON_WRITE">Constant Field Values</a></dd>
@@ -699,7 +739,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_BLOOMS_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.98">DEFAULT_CACHE_BLOOMS_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.105">DEFAULT_CACHE_BLOOMS_ON_WRITE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_BLOOMS_ON_WRITE">Constant Field Values</a></dd>
@@ -712,7 +752,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_EVICT_ON_CLOSE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.99">DEFAULT_EVICT_ON_CLOSE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.106">DEFAULT_EVICT_ON_CLOSE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_EVICT_ON_CLOSE">Constant Field Values</a></dd>
@@ -725,7 +765,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_DATA_COMPRESSED</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.100">DEFAULT_CACHE_DATA_COMPRESSED</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.107">DEFAULT_CACHE_DATA_COMPRESSED</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_DATA_COMPRESSED">Constant Field Values</a></dd>
@@ -738,7 +778,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PREFETCH_ON_OPEN</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.101">DEFAULT_PREFETCH_ON_OPEN</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.108">DEFAULT_PREFETCH_ON_OPEN</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_PREFETCH_ON_OPEN">Constant Field Values</a></dd>
@@ -751,7 +791,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.102">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.109">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE">Constant Field Values</a></dd>
@@ -764,20 +804,33 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DROP_BEHIND_CACHE_COMPACTION_DEFAULT</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.103">DROP_BEHIND_CACHE_COMPACTION_DEFAULT</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.110">DROP_BEHIND_CACHE_COMPACTION_DEFAULT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DROP_BEHIND_CACHE_COMPACTION_DEFAULT">Constant Field Values</a></dd>
 </dl>
 </li>
 </ul>
+<a name="DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD</h4>
+<pre>public static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.111">DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD</a></pre>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD">Constant Field Values</a></dd>
+</dl>
+</li>
+</ul>
 <a name="cacheDataOnRead">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataOnRead</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.111">cacheDataOnRead</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.119">cacheDataOnRead</a></pre>
 <div class="block">Whether blocks should be cached on read (default is on if there is a
  cache but this can be turned off on a per-family or per-request basis).
  If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.
@@ -790,7 +843,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>inMemory</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.114">inMemory</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.122">inMemory</a></pre>
 <div class="block">Whether blocks should be flagged as in-memory when being cached</div>
 </li>
 </ul>
@@ -800,7 +853,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataOnWrite</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.117">cacheDataOnWrite</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.125">cacheDataOnWrite</a></pre>
 <div class="block">Whether data blocks should be cached when new files are written</div>
 </li>
 </ul>
@@ -810,7 +863,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheIndexesOnWrite</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.120">cacheIndexesOnWrite</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.128">cacheIndexesOnWrite</a></pre>
 <div class="block">Whether index blocks should be cached when new files are written</div>
 </li>
 </ul>
@@ -820,7 +873,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheBloomsOnWrite</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.123">cacheBloomsOnWrite</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.131">cacheBloomsOnWrite</a></pre>
 <div class="block">Whether compound bloom filter blocks should be cached on write</div>
 </li>
 </ul>
@@ -830,7 +883,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>evictOnClose</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.126">evictOnClose</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.134">evictOnClose</a></pre>
 <div class="block">Whether blocks of a file should be evicted when the file is closed</div>
 </li>
 </ul>
@@ -840,7 +893,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheDataCompressed</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.129">cacheDataCompressed</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.137">cacheDataCompressed</a></pre>
 <div class="block">Whether data blocks should be stored in compressed and/or encrypted form in the cache</div>
 </li>
 </ul>
@@ -850,7 +903,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchOnOpen</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.132">prefetchOnOpen</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.140">prefetchOnOpen</a></pre>
 <div class="block">Whether data blocks should be prefetched into the cache</div>
 </li>
 </ul>
@@ -860,17 +913,27 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheCompactedDataOnWrite</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.137">cacheCompactedDataOnWrite</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.145">cacheCompactedDataOnWrite</a></pre>
 <div class="block">Whether data blocks should be cached when compacted file is written</div>
 </li>
 </ul>
+<a name="cacheCompactedDataOnWriteThreshold">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>cacheCompactedDataOnWriteThreshold</h4>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.150">cacheCompactedDataOnWriteThreshold</a></pre>
+<div class="block">Determine threshold beyond which we do not cache blocks on compaction</div>
+</li>
+</ul>
 <a name="dropBehindCompaction">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>dropBehindCompaction</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.139">dropBehindCompaction</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.152">dropBehindCompaction</a></pre>
 </li>
 </ul>
 <a name="blockCache">
@@ -879,7 +942,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockCache</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.142">blockCache</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.155">blockCache</a></pre>
 </li>
 </ul>
 <a name="byteBuffAllocator">
@@ -888,7 +951,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>byteBuffAllocator</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteBuffAllocator.html" title="class in org.apache.hadoop.hbase.io">ByteBuffAllocator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.144">byteBuffAllocator</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteBuffAllocator.html" title="class in org.apache.hadoop.hbase.io">ByteBuffAllocator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.157">byteBuffAllocator</a></pre>
 </li>
 </ul>
 </li>
@@ -905,7 +968,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.151">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.164">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Create a cache configuration using the specified configuration object and
  defaults for family level settings. Only use if no column family context.</div>
 <dl>
@@ -920,7 +983,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.155">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.168">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                    <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache)</pre>
 </li>
 </ul>
@@ -930,7 +993,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.165">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.178">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                    <a href="../../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family,
                    <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache,
                    <a href="../../../../../../org/apache/hadoop/hbase/io/ByteBuffAllocator.html" title="class in org.apache.hadoop.hbase.io">ByteBuffAllocator</a>&nbsp;byteBuffAllocator)</pre>
@@ -949,7 +1012,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.201">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.215">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Constructs a cache configuration copied from the specified configuration.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -963,7 +1026,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.216">CacheConfig</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.231">CacheConfig</a>()</pre>
 </li>
 </ul>
 </li>
@@ -980,7 +1043,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.236">shouldCacheDataOnRead</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.251">shouldCacheDataOnRead</a>()</pre>
 <div class="block">Returns whether the DATA blocks of this HFile should be cached on read or not (we always
  cache the meta blocks, the INDEX and BLOOM blocks).</div>
 <dl>
@@ -995,7 +1058,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldDropBehindCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.240">shouldDropBehindCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.255">shouldDropBehindCompaction</a>()</pre>
 </li>
 </ul>
 <a name="shouldCacheBlockOnRead-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory-">
@@ -1004,7 +1067,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBlockOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.249">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.264">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <div class="block">Should we cache a block of a particular category? We always cache
  important blocks such as index blocks, as long as the block cache is
  available.</div>
@@ -1016,7 +1079,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isInMemory</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.257">isInMemory</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.272">isInMemory</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks in this file should be flagged as in-memory</dd>
@@ -1029,7 +1092,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.265">shouldCacheDataOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.280">shouldCacheDataOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if data blocks should be written to the cache when an HFile is
@@ -1043,7 +1106,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setCacheDataOnWrite</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.274">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.289">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>cacheDataOnWrite</code> - whether data blocks should be written to the cache
@@ -1057,7 +1120,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>enableCacheOnWrite</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.285">enableCacheOnWrite</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.299">enableCacheOnWrite</a>()</pre>
 <div class="block">Enable cache on write including:
  cacheDataOnWrite
  cacheIndexesOnWrite
@@ -1070,7 +1133,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheIndexesOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.296">shouldCacheIndexesOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.309">shouldCacheIndexesOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if index blocks should be written to the cache when an HFile
@@ -1084,7 +1147,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBloomsOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.304">shouldCacheBloomsOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.317">shouldCacheBloomsOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if bloom blocks should be written to the cache when an HFile
@@ -1098,7 +1161,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldEvictOnClose</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.312">shouldEvictOnClose</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.325">shouldEvictOnClose</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks should be evicted from the cache when an HFile
@@ -1112,7 +1175,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setEvictOnClose</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.322">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.335">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
 <div class="block">Only used for testing.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1127,7 +1190,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.329">shouldCacheDataCompressed</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.342">shouldCacheDataCompressed</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if data blocks should be compressed in the cache, false if not</dd>
@@ -1140,7 +1203,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.336">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.349">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if this <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>BlockType.BlockCategory</code></a> should be compressed in blockcache, false otherwise</dd>
@@ -1153,7 +1216,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldPrefetchOnOpen</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.348">shouldPrefetchOnOpen</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.361">shouldPrefetchOnOpen</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks should be prefetched into the cache on open, false if not</dd>
@@ -1166,20 +1229,33 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheCompactedBlocksOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.355">shouldCacheCompactedBlocksOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.368">shouldCacheCompactedBlocksOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks should be cached while writing during compaction, false if not</dd>
 </dl>
 </li>
 </ul>
+<a name="getCacheCompactedBlocksOnWriteThreshold--">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getCacheCompactedBlocksOnWriteThreshold</h4>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.375">getCacheCompactedBlocksOnWriteThreshold</a>()</pre>
+<dl>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>total file size in bytes threshold for caching while writing during compaction</dd>
+</dl>
+</li>
+</ul>
 <a name="shouldReadBlockFromCache-org.apache.hadoop.hbase.io.hfile.BlockType-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldReadBlockFromCache</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.367">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.386">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">Return true if we may find this type of block in block cache.
  <p>
  TODO: today <code>family.isBlockCacheEnabled()</code> only means <code>cacheDataOnRead</code>, so here we
@@ -1194,7 +1270,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldLockOnCacheMiss</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.391">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.410">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">If we make sure the block could not be cached, we will not acquire the lock
  otherwise we will acquire lock</div>
 </li>
@@ -1205,7 +1281,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockCache</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.403">getBlockCache</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.422">getBlockCache</a>()</pre>
 <div class="block">Returns the block cache.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1219,7 +1295,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isCombinedBlockCache</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.407">isCombinedBlockCache</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.426">isCombinedBlockCache</a>()</pre>
 </li>
 </ul>
 <a name="getByteBuffAllocator--">
@@ -1228,7 +1304,16 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getByteBuffAllocator</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteBuffAllocator.html" title="class in org.apache.hadoop.hbase.io">ByteBuffAllocator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.411">getByteBuffAllocator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/ByteBuffAllocator.html" title="class in org.apache.hadoop.hbase.io">ByteBuffAllocator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.430">getByteBuffAllocator</a>()</pre>
+</li>
+</ul>
+<a name="getCacheCompactedBlocksOnWriteThreshold-org.apache.hadoop.conf.Configuration-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getCacheCompactedBlocksOnWriteThreshold</h4>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.434">getCacheCompactedBlocksOnWriteThreshold</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="toString--">
@@ -1237,7 +1322,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.416">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.450">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index e7c262a..4b0c04c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -305,12 +305,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 490a6d6..dbaf190 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -357,9 +357,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index a069ef9..d3e441d 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -296,10 +296,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
index 5c89e3f..f8add9f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
@@ -151,8 +151,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/ServerState.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">ServerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 08a4a14..c2c215e 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -359,11 +359,11 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index d4f80a3..b92915d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -222,9 +222,9 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 043f791..05056bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -430,19 +430,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 839001b..9c8846e 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index d2e9d68..24b5494 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -241,11 +241,11 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
index 7e85199..84977c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
@@ -467,7 +467,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#add-org.apache.hadoop.hbase.Cell-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">add</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#add-java.lang.Iterable-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">add</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#addChangedReaderObserver-org.apache.hadoop.hbase.regionserver.ChangedReadersObserver- [...]
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#add-org.apache.hadoop.hbase.Cell-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">add</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#add-java.lang.Iterable-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">add</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#addChangedReaderObserver-org.apache.hadoop.hbase.regionserver.ChangedReadersObserver- [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index 7fefce7..a25528a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2337">HStore.StoreFlusherImpl</a>
+<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2353">HStore.StoreFlusherImpl</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a></pre>
 </li>
@@ -279,7 +279,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>tracker</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2339">tracker</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2355">tracker</a></pre>
 </li>
 </ul>
 <a name="cacheFlushSeqNum">
@@ -288,7 +288,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushSeqNum</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2340">cacheFlushSeqNum</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2356">cacheFlushSeqNum</a></pre>
 </li>
 </ul>
 <a name="snapshot">
@@ -297,7 +297,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2341">snapshot</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2357">snapshot</a></pre>
 </li>
 </ul>
 <a name="tempFiles">
@@ -306,7 +306,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>tempFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2342">tempFiles</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2358">tempFiles</a></pre>
 </li>
 </ul>
 <a name="committedFiles">
@@ -315,7 +315,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>committedFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2343">committedFiles</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2359">committedFiles</a></pre>
 </li>
 </ul>
 <a name="cacheFlushCount">
@@ -324,7 +324,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushCount</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2344">cacheFlushCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2360">cacheFlushCount</a></pre>
 </li>
 </ul>
 <a name="cacheFlushSize">
@@ -333,7 +333,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2345">cacheFlushSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2361">cacheFlushSize</a></pre>
 </li>
 </ul>
 <a name="outputFileSize">
@@ -342,7 +342,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>outputFileSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2346">outputFileSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2362">outputFileSize</a></pre>
 </li>
 </ul>
 </li>
@@ -359,7 +359,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StoreFlusherImpl</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2348">StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2364">StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum,
                          <a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a>&nbsp;tracker)</pre>
 </li>
 </ul>
@@ -377,7 +377,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>prepare</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2358">prepare</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2374">prepare</a>()</pre>
 <div class="block">This is not thread safe. The caller should have a lock on the region or the store.
  If necessary, the lock can be added with the patch provided in HBASE-10087</div>
 <dl>
@@ -394,7 +394,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>flushCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2368">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2384">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#flushCache-org.apache.hadoop.hbase.monitoring.MonitoredTask-">StoreFlushContext</a></code></span></div>
 <div class="block">Flush the cache (create the new store file)
@@ -415,7 +415,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>commit</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2377">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2393">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#commit-org.apache.hadoop.hbase.monitoring.MonitoredTask-">StoreFlushContext</a></code></span></div>
 <div class="block">Commit the flush - add the store file to the store and clear the
@@ -440,7 +440,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputFileSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2420">getOutputFileSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2436">getOutputFileSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#getOutputFileSize--">getOutputFileSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a></code></dd>
@@ -455,7 +455,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommittedFiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2425">getCommittedFiles</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2441">getCommittedFiles</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#getCommittedFiles--">StoreFlushContext</a></code></span></div>
 <div class="block">Returns the newly committed files from the flush. Called only if commit returns true</div>
 <dl>
@@ -472,7 +472,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>replayFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2437">replayFlush</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2453">replayFlush</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
                         boolean&nbsp;dropMemstoreSnapshot)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to commit, but called in secondary region replicas for replaying the
@@ -495,7 +495,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2468">abort</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2484">abort</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Abort the snapshot preparation. Drops the snapshot if any.</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 5761b7c..cb43e5c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":9,"i36":10,"i37":9,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54": [...]
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":9,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":9,"i37":10,"i38":9,"i39":9,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54": [...]
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -584,20 +584,30 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
                  boolean&nbsp;shouldDropBehind)</code>&nbsp;</td>
 </tr>
 <tr id="i27" class="rowColor">
+<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-long-">createWriterInTmp</a></span>(long&nbsp;maxKeyCount,
+                 <a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+                 boolean&nbsp;isCompaction,
+                 boolean&nbsp;includeMVCCReadpoint,
+                 boolean&nbsp;includesTag,
+                 boolean&nbsp;shouldDropBehind,
+                 long&nbsp;totalCompactedFilesSize)</code>&nbsp;</td>
+</tr>
+<tr id="i28" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#deleteChangedReaderObserver-org.apache.hadoop.hbase.regionserver.ChangedReadersObserver-">deleteChangedReaderObserver</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</code>&nbsp;</td>
 </tr>
-<tr id="i28" class="altColor">
+<tr id="i29" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#deregisterChildren-org.apache.hadoop.hbase.conf.ConfigurationManager-">deregisterChildren</a></span>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</code>
 <div class="block">Needs to be called to deregister the children from the manager.</div>
 </td>
 </tr>
-<tr id="i29" class="rowColor">
+<tr id="i30" class="altColor">
 <td class="colFirst"><code>static long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#determineTTLFromFamily-org.apache.hadoop.hbase.client.ColumnFamilyDescriptor-">determineTTLFromFamily</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</code>&nbsp;</td>
 </tr>
-<tr id="i30" class="altColor">
+<tr id="i31" class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#doCompaction-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-java.util.Collection-org.apache.hadoop.hbase.security.User-long-java.util.List-">doCompaction</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compacti [...]
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToCompact,
@@ -605,11 +615,11 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
             long&nbsp;compactionStartTime,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;newFiles)</code>&nbsp;</td>
 </tr>
-<tr id="i31" class="rowColor">
+<tr id="i32" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#finishCompactionRequest-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-">finishCompactionRequest</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr)</code>&nbsp;</td>
 </tr>
-<tr id="i32" class="altColor">
+<tr id="i33" class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushCache-long-org.apache.hadoop.hbase.regionserver.MemStoreSnapshot-org.apache.hadoop.hbase.monitoring.MonitoredTask-org.apache.hadoop.hbase.regionserver.throttle.ThroughputController-org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker-">flushCache</a></span>(long&nbsp;logCacheFlushId,
           <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a>&nbsp;snapshot,
@@ -619,195 +629,195 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Write out current snapshot.</div>
 </td>
 </tr>
-<tr id="i33" class="rowColor">
+<tr id="i34" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true" title="class or interface in java.util">OptionalDouble</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getAvgStoreFileAge--">getAvgStoreFileAge</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i34" class="altColor">
+<tr id="i35" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getBlockingFileCount--">getBlockingFileCount</a></span>()</code>
 <div class="block">The number of files required before flushes for this store will be blocked.</div>
 </td>
 </tr>
-<tr id="i35" class="rowColor">
+<tr id="i36" class="altColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getBytesPerChecksum-org.apache.hadoop.conf.Configuration-">getBytesPerChecksum</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
 <div class="block">Returns the configured bytesPerChecksum value.</div>
 </td>
 </tr>
-<tr id="i36" class="altColor">
+<tr id="i37" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCacheConfig--">getCacheConfig</a></span>()</code>
 <div class="block">Used for tests.</div>
 </td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i38" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getChecksumType-org.apache.hadoop.conf.Configuration-">getChecksumType</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
 <div class="block">Returns the configured checksum algorithm.</div>
 </td>
 </tr>
-<tr id="i38" class="altColor">
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCloseCheckInterval--">getCloseCheckInterval</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i39" class="rowColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getColumnFamilyDescriptor--">getColumnFamilyDescriptor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i40" class="altColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getColumnFamilyName--">getColumnFamilyName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i41" class="rowColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactedCellsCount--">getCompactedCellsCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i42" class="altColor">
+<tr id="i43" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactedCellsSize--">getCompactedCellsSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i43" class="rowColor">
+<tr id="i44" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactedFiles--">getCompactedFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i44" class="altColor">
+<tr id="i45" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactedFilesCount--">getCompactedFilesCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i45" class="rowColor">
+<tr id="i46" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactionCheckMultiplier--">getCompactionCheckMultiplier</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i46" class="altColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>double</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactionPressure--">getCompactionPressure</a></span>()</code>
 <div class="block">This value can represent the degree of emergency of compaction for this store.</div>
 </td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactionProgress--">getCompactionProgress</a></span>()</code>
 <div class="block">getter for CompactionProgress object</div>
 </td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCompactPriority--">getCompactPriority</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getComparator--">getComparator</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i50" class="altColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCoprocessorHost--">getCoprocessorHost</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i51" class="rowColor">
+<tr id="i52" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getCurrentParallelPutCount--">getCurrentParallelPutCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i52" class="altColor">
+<tr id="i53" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getDataBlockEncoder--">getDataBlockEncoder</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i53" class="rowColor">
+<tr id="i54" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.fs.FileSystem</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getFileSystem--">getFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i54" class="altColor">
+<tr id="i55" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getFlushableSize--">getFlushableSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i55" class="rowColor">
+<tr id="i56" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getFlushedCellsCount--">getFlushedCellsCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i56" class="altColor">
+<tr id="i57" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getFlushedCellsSize--">getFlushedCellsSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i57" class="rowColor">
+<tr id="i58" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getFlushedOutputFileSize--">getFlushedOutputFileSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i58" class="altColor">
+<tr id="i59" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getHFilesSize--">getHFilesSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i59" class="rowColor">
+<tr id="i60" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getHRegion--">getHRegion</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i60" class="altColor">
+<tr id="i61" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getLastCompactSize--">getLastCompactSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i61" class="rowColor">
+<tr id="i62" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMajorCompactedCellsCount--">getMajorCompactedCellsCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i62" class="altColor">
+<tr id="i63" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMajorCompactedCellsSize--">getMajorCompactedCellsSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i63" class="rowColor">
+<tr id="i64" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMaxCompactedStoreFileRefCount--">getMaxCompactedStoreFileRefCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i64" class="altColor">
+<tr id="i65" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMaxMemStoreTS--">getMaxMemStoreTS</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i65" class="rowColor">
+<tr id="i66" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMaxSequenceId--">getMaxSequenceId</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i66" class="altColor">
+<tr id="i67" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMaxStoreFileAge--">getMaxStoreFileAge</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i67" class="rowColor">
+<tr id="i68" class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStore.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStore</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMemstore--">getMemstore</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i68" class="altColor">
+<tr id="i69" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMemStoreFlushSize--">getMemStoreFlushSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i69" class="rowColor">
+<tr id="i70" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMemStoreSize--">getMemStoreSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i70" class="altColor">
+<tr id="i71" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getMinStoreFileAge--">getMinStoreFileAge</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i71" class="rowColor">
+<tr id="i72" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getNumHFiles--">getNumHFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i72" class="altColor">
+<tr id="i73" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getNumReferenceFiles--">getNumReferenceFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i73" class="rowColor">
+<tr id="i74" class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getOffPeakHours--">getOffPeakHours</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i74" class="altColor">
+<tr id="i75" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getRegionFileSystem--">getRegionFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i75" class="rowColor">
+<tr id="i76" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getRegionInfo--">getRegionInfo</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i76" class="altColor">
+<tr id="i77" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanInfo--">getScanInfo</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i77" class="rowColor">
+<tr id="i78" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanner-org.apache.hadoop.hbase.client.Scan-java.util.NavigableSet-long-">getScanner</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
@@ -815,7 +825,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Return a scanner for both the memstore and the HStore files.</div>
 </td>
 </tr>
-<tr id="i78" class="altColor">
+<tr id="i79" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-byte:A-long-">getScanners</a></span>(boolean&nbsp;cacheBlocks,
            boolean&nbsp;isGet,
@@ -828,7 +838,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Get all scanners with no filtering based on TTL (that happens further down the line).</div>
 </td>
 </tr>
-<tr id="i79" class="rowColor">
+<tr id="i80" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-boolean-byte:A-boolean-long-">getScanners</a></span>(boolean&nbsp;cacheBlocks,
            boolean&nbsp;usePread,
@@ -842,7 +852,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Get all scanners with no filtering based on TTL (that happens further down the line).</div>
 </td>
 </tr>
-<tr id="i80" class="altColor">
+<tr id="i81" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-java.util.List-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-byte:A-long-boolean-">getScanners</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hba [...]
            boolean&nbsp;cacheBlocks,
@@ -858,7 +868,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
  (that happens further down the line).</div>
 </td>
 </tr>
-<tr id="i81" class="rowColor">
+<tr id="i82" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getScanners-java.util.List-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-boolean-byte:A-boolean-long-boolean-">getScanners</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/ha [...]
            boolean&nbsp;cacheBlocks,
@@ -875,75 +885,75 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
  (that happens further down the line).</div>
 </td>
 </tr>
-<tr id="i82" class="altColor">
+<tr id="i83" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getSize--">getSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i83" class="rowColor">
+<tr id="i84" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getSmallestReadPoint--">getSmallestReadPoint</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i84" class="altColor">
+<tr id="i85" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getSnapshotSize--">getSnapshotSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i85" class="rowColor">
+<tr id="i86" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getSplitPoint--">getSplitPoint</a></span>()</code>
 <div class="block">Determines if Store should be split.</div>
 </td>
 </tr>
-<tr id="i86" class="altColor">
+<tr id="i87" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreEngine--">getStoreEngine</a></span>()</code>
 <div class="block">Returns the StoreEngine that is backing this concrete implementation of Store.</div>
 </td>
 </tr>
-<tr id="i87" class="rowColor">
+<tr id="i88" class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/LongStream.html?is-external=true" title="class or interface in java.util.stream">LongStream</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreFileAgeStream--">getStoreFileAgeStream</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i88" class="altColor">
+<tr id="i89" class="rowColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefileFieldSize-org.apache.hadoop.hbase.regionserver.HStoreFile-java.util.function.ToLongFunction-">getStorefileFieldSize</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp;f)</code>&nbsp;</td>
 </tr>
-<tr id="i89" class="rowColor">
+<tr id="i90" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefiles--">getStorefiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i90" class="altColor">
+<tr id="i91" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesCount--">getStorefilesCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i91" class="rowColor">
+<tr id="i92" class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesFieldSize-java.util.function.ToLongFunction-">getStorefilesFieldSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="cl [...]
 </tr>
-<tr id="i92" class="altColor">
+<tr id="i93" class="rowColor">
 <td class="colFirst"><code>(package private) long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreFileSize-org.apache.hadoop.hbase.regionserver.HStoreFile-">getStoreFileSize</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file)</code>
 <div class="block">Computes the length of a store file without succumbing to any errors along the way.</div>
 </td>
 </tr>
-<tr id="i93" class="rowColor">
+<tr id="i94" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesRootLevelIndexSize--">getStorefilesRootLevelIndexSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i94" class="altColor">
+<tr id="i95" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesSize--">getStorefilesSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i95" class="rowColor">
+<tr id="i96" class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStorefilesSize-java.util.Collection-java.util.function.Predicate-">getStorefilesSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop. [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;predicate)</code>&nbsp;</td>
 </tr>
-<tr id="i96" class="altColor">
+<tr id="i97" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreFileTtl--">getStoreFileTtl</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i97" class="rowColor">
+<tr id="i98" class="altColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreHomedir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-byte:A-">getStoreHomedir</a></span>(org.apache.hadoop.fs.Path&nbsp;tabledir,
                <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
@@ -953,7 +963,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 </div>
 </td>
 </tr>
-<tr id="i98" class="altColor">
+<tr id="i99" class="rowColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreHomedir-org.apache.hadoop.fs.Path-java.lang.String-byte:A-">getStoreHomedir</a></span>(org.apache.hadoop.fs.Path&nbsp;tabledir,
                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedName,
@@ -963,29 +973,29 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 </div>
 </td>
 </tr>
-<tr id="i99" class="rowColor">
+<tr id="i100" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreRefCount--">getStoreRefCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i100" class="altColor">
+<tr id="i101" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getStoreSizeUncompressed--">getStoreSizeUncompressed</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i101" class="rowColor">
+<tr id="i102" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTableName--">getTableName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i102" class="altColor">
+<tr id="i103" class="rowColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalSize-java.util.Collection-">getTotalSize</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;& [...]
 </tr>
-<tr id="i103" class="rowColor">
+<tr id="i104" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalStaticBloomSize--">getTotalStaticBloomSize</a></span>()</code>
 <div class="block">Returns the total byte size of all Bloom filter bit arrays.</div>
 </td>
 </tr>
-<tr id="i104" class="altColor">
+<tr id="i105" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalStaticIndexSize--">getTotalStaticIndexSize</a></span>()</code>
 <div class="block">Returns the total size of all index blocks in the data block indexes, including the root level,
@@ -993,38 +1003,38 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
  single-level indexes.</div>
 </td>
 </tr>
-<tr id="i105" class="rowColor">
+<tr id="i106" class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#getTotalUncompressedBytes-java.util.List-">getTotalUncompressedBytes</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile< [...]
 </tr>
-<tr id="i106" class="altColor">
+<tr id="i107" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#hasReferences--">hasReferences</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i107" class="rowColor">
+<tr id="i108" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#hasTooManyStoreFiles--">hasTooManyStoreFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i108" class="altColor">
+<tr id="i109" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#heapSize--">heapSize</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i109" class="rowColor">
+<tr id="i110" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#isPrimaryReplicaStore--">isPrimaryReplicaStore</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i110" class="altColor">
+<tr id="i111" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#isSloppyMemStore--">isSloppyMemStore</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i111" class="rowColor">
+<tr id="i112" class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#loadStoreFiles-boolean-">loadStoreFiles</a></span>(boolean&nbsp;warmup)</code>
 <div class="block">Creates an unsorted list of StoreFile loaded in parallel
  from the given directory.</div>
 </td>
 </tr>
-<tr id="i112" class="altColor">
+<tr id="i113" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#logCompactionEndMessage-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-java.util.List-long-long-">logCompactionEndMessage</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr,
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
@@ -1033,64 +1043,64 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Log a very elaborate compaction completion message.</div>
 </td>
 </tr>
-<tr id="i113" class="rowColor">
+<tr id="i114" class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#moveCompactedFilesIntoPlace-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-java.util.List-org.apache.hadoop.hbase.security.User-">moveCompactedFilesIntoPlace</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Comp [...]
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;newFiles,
                            <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)</code>&nbsp;</td>
 </tr>
-<tr id="i114" class="altColor">
+<tr id="i115" class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#moveFileIntoPlace-org.apache.hadoop.fs.Path-">moveFileIntoPlace</a></span>(org.apache.hadoop.fs.Path&nbsp;newFile)</code>&nbsp;</td>
 </tr>
-<tr id="i115" class="rowColor">
+<tr id="i116" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#needsCompaction--">needsCompaction</a></span>()</code>
 <div class="block">See if there's too much store files in this store</div>
 </td>
 </tr>
-<tr id="i116" class="altColor">
+<tr id="i117" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#notifyChangedReadersObservers-java.util.List-">notifyChangedReadersObservers</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HSt [...]
 <div class="block">Notify all observers that set of Readers has changed.</div>
 </td>
 </tr>
-<tr id="i117" class="rowColor">
+<tr id="i118" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#onConfigurationChange-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
 <div class="block">This method would be called by the <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf"><code>ConfigurationManager</code></a>
  object when the <code>Configuration</code> object is reloaded from disk.</div>
 </td>
 </tr>
-<tr id="i118" class="altColor">
+<tr id="i119" class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#openStoreFiles-java.util.Collection-boolean-">openStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">Stor [...]
               boolean&nbsp;warmup)</code>&nbsp;</td>
 </tr>
-<tr id="i119" class="rowColor">
+<tr id="i120" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#postSnapshotOperation--">postSnapshotOperation</a></span>()</code>
 <div class="block">Perform tasks needed after the completion of snapshot operation.</div>
 </td>
 </tr>
-<tr id="i120" class="altColor">
+<tr id="i121" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#preBulkLoadHFile-java.lang.String-long-">preBulkLoadHFile</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPathStr,
                 long&nbsp;seqNum)</code>
 <div class="block">This method should only be called from Region.</div>
 </td>
 </tr>
-<tr id="i121" class="rowColor">
+<tr id="i122" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#preFlushSeqIDEstimation--">preFlushSeqIDEstimation</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i122" class="altColor">
+<tr id="i123" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#preSnapshotOperation--">preSnapshotOperation</a></span>()</code>
 <div class="block">Sets the store up for a region level snapshot operation.</div>
 </td>
 </tr>
-<tr id="i123" class="rowColor">
+<tr id="i124" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#recreateScanners-java.util.List-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-boolean-byte:A-boolean-long-boolean-">recreateScanners</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org [...]
                 boolean&nbsp;cacheBlocks,
@@ -1106,20 +1116,20 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Recreates the scanners on the current list of active store file scanners</div>
 </td>
 </tr>
-<tr id="i124" class="altColor">
+<tr id="i125" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#refreshStoreFiles--">refreshStoreFiles</a></span>()</code>
 <div class="block">Checks the underlying store files, and opens the files that have not been opened, and removes
  the store file readers for store files no longer available.</div>
 </td>
 </tr>
-<tr id="i125" class="rowColor">
+<tr id="i126" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#refreshStoreFiles-java.util.Collection-">refreshStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> [...]
 <div class="block">Replaces the store files that the store has with the given files.</div>
 </td>
 </tr>
-<tr id="i126" class="altColor">
+<tr id="i127" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#refreshStoreFilesInternal-java.util.Collection-">refreshStoreFilesInternal</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regi [...]
 <div class="block">Checks the underlying store files, and opens the files that  have not
@@ -1127,28 +1137,28 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
  available.</div>
 </td>
 </tr>
-<tr id="i127" class="rowColor">
+<tr id="i128" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#registerChildren-org.apache.hadoop.hbase.conf.ConfigurationManager-">registerChildren</a></span>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</code>
 <div class="block">Needs to be called to register the children to the manager.</div>
 </td>
 </tr>
-<tr id="i128" class="altColor">
+<tr id="i129" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#removeCompactedfiles-java.util.Collection-">removeCompactedfiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HSt [...]
 <div class="block">Archives and removes the compacted files</div>
 </td>
 </tr>
-<tr id="i129" class="rowColor">
+<tr id="i130" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#removeUnneededFiles--">removeUnneededFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i130" class="altColor">
+<tr id="i131" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#replaceStoreFiles-java.util.Collection-java.util.Collection-">replaceStoreFiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.re [...]
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;result)</code>&nbsp;</td>
 </tr>
-<tr id="i131" class="rowColor">
+<tr id="i132" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#replayCompactionMarker-org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor-boolean-boolean-">replayCompactionMarker</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor&nbsp;compaction,
                       boolean&nbsp;pickCompactionFiles,
@@ -1156,78 +1166,78 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Call to complete a compaction.</div>
 </td>
 </tr>
-<tr id="i132" class="altColor">
+<tr id="i133" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#reportArchivedFilesForQuota-java.util.List-java.util.List-">reportArchivedFilesForQuota</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="interface in org.apache.hadoop [...]
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;fileSizes)</code>&nbsp;</td>
 </tr>
-<tr id="i133" class="rowColor">
+<tr id="i134" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#requestCompaction--">requestCompaction</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i134" class="altColor">
+<tr id="i135" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#requestCompaction-int-org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker-org.apache.hadoop.hbase.security.User-">requestCompaction</a></span>(int&nbsp;priority,
                  <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">CompactionLifeCycleTracker</a>&nbsp;tracker,
                  <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)</code>&nbsp;</td>
 </tr>
-<tr id="i135" class="rowColor">
+<tr id="i136" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#setDataBlockEncoderInTest-org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder-">setDataBlockEncoderInTest</a></span>(<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;blockEncoder)</code>
 <div class="block">Should be used only in tests.</div>
 </td>
 </tr>
-<tr id="i136" class="altColor">
+<tr id="i137" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#setScanInfo-org.apache.hadoop.hbase.regionserver.ScanInfo-">setScanInfo</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</code>
 <div class="block">Set scan info, used by test</div>
 </td>
 </tr>
-<tr id="i137" class="rowColor">
+<tr id="i138" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#shouldPerformMajorCompaction--">shouldPerformMajorCompaction</a></span>()</code>
 <div class="block">Tests whether we should run a major compaction.</div>
 </td>
 </tr>
-<tr id="i138" class="altColor">
+<tr id="i139" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#snapshot--">snapshot</a></span>()</code>
 <div class="block">Snapshot this stores memstore.</div>
 </td>
 </tr>
-<tr id="i139" class="rowColor">
+<tr id="i140" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#startReplayingFromWAL--">startReplayingFromWAL</a></span>()</code>
 <div class="block">This message intends to inform the MemStore that next coming updates
  are going to be part of the replaying edits from WAL</div>
 </td>
 </tr>
-<tr id="i140" class="altColor">
+<tr id="i141" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#stopReplayingFromWAL--">stopReplayingFromWAL</a></span>()</code>
 <div class="block">This message intends to inform the MemStore that the replaying edits from WAL
  are done</div>
 </td>
 </tr>
-<tr id="i141" class="rowColor">
+<tr id="i142" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#throttleCompaction-long-">throttleCompaction</a></span>(long&nbsp;compactionSize)</code>&nbsp;</td>
 </tr>
-<tr id="i142" class="altColor">
+<tr id="i143" class="rowColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#timeOfOldestEdit--">timeOfOldestEdit</a></span>()</code>
 <div class="block">When was the last edit done in the memstore</div>
 </td>
 </tr>
-<tr id="i143" class="rowColor">
+<tr id="i144" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#toString--">toString</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i144" class="altColor">
+<tr id="i145" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#triggerMajorCompaction--">triggerMajorCompaction</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i145" class="rowColor">
+<tr id="i146" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#updateSpaceQuotaAfterFileReplacement-org.apache.hadoop.hbase.quotas.RegionSizeStore-org.apache.hadoop.hbase.client.RegionInfo-java.util.Collection-java.util.Collection-">updateSpaceQuotaAfterFileReplacement</a></span>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</ [...]
                                     <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
@@ -1237,14 +1247,14 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
  and adding in the size for new files.</div>
 </td>
 </tr>
-<tr id="i146" class="altColor">
+<tr id="i147" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#updateStorefiles-java.util.List-long-">updateStorefiles</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
                 long&nbsp;snapshotId)</code>
 <div class="block">Change storeFiles adding into place the Reader produced by this new flush.</div>
 </td>
 </tr>
-<tr id="i147" class="rowColor">
+<tr id="i148" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#upsert-java.lang.Iterable-long-org.apache.hadoop.hbase.regionserver.MemStoreSizing-">upsert</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a> [...]
       long&nbsp;readpoint,
@@ -1252,17 +1262,17 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <div class="block">Adds or replaces the specified KeyValues.</div>
 </td>
 </tr>
-<tr id="i148" class="altColor">
+<tr id="i149" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#validateStoreFile-org.apache.hadoop.fs.Path-">validateStoreFile</a></span>(org.apache.hadoop.fs.Path&nbsp;path)</code>
 <div class="block">Validates a store file by opening and closing it.</div>
 </td>
 </tr>
-<tr id="i149" class="rowColor">
+<tr id="i150" class="altColor">
 <td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#versionsToReturn-int-">versionsToReturn</a></span>(int&nbsp;wantedVersions)</code>&nbsp;</td>
 </tr>
-<tr id="i150" class="altColor">
+<tr id="i151" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#writeCompactionWalRecord-java.util.Collection-java.util.Collection-">writeCompactionWalRecord</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.h [...]
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)</code>
@@ -1790,7 +1800,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2497">FIXED_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2513">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="DEEP_OVERHEAD">
@@ -1799,7 +1809,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DEEP_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2501">DEEP_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2517">DEEP_OVERHEAD</a></pre>
 </li>
 </ul>
 </li>
@@ -2561,7 +2571,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createWriterInTmp</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1122">createWriterInTmp</a>(long&nbsp;maxKeyCount,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1113">createWriterInTmp</a>(long&nbsp;maxKeyCount,
                                          <a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
                                          boolean&nbsp;isCompaction,
                                          boolean&nbsp;includeMVCCReadpoint,
@@ -2569,6 +2579,26 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
                                          boolean&nbsp;shouldDropBehind)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>createWriterInTmp</h4>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1129">createWriterInTmp</a>(long&nbsp;maxKeyCount,
+                                         <a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+                                         boolean&nbsp;isCompaction,
+                                         boolean&nbsp;includeMVCCReadpoint,
+                                         boolean&nbsp;includesTag,
+                                         boolean&nbsp;shouldDropBehind,
+                                         long&nbsp;totalCompactedFilesSize)
+                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>compression</code> - Compression algorithm to use</dd>
 <dd><code>isCompaction</code> - whether we are creating a new file in a compaction</dd>
@@ -2587,7 +2617,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileContext</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1177">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1193">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
                                        boolean&nbsp;includeMVCCReadpoint,
                                        boolean&nbsp;includesTag,
                                        <a href="../../../../../org/apache/hadoop/hbase/io/crypto/Encryption.Context.html" title="class in org.apache.hadoop.hbase.io.crypto">Encryption.Context</a>&nbsp;cryptoContext)</pre>
@@ -2599,7 +2629,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1203">getTotalSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1219">getTotalSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)</pre>
 </li>
 </ul>
 <a name="updateStorefiles-java.util.List-long-">
@@ -2608,7 +2638,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>updateStorefiles</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1212">updateStorefiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1228">updateStorefiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
                                  long&nbsp;snapshotId)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Change storeFiles adding into place the Reader produced by this new flush.</div>
@@ -2628,7 +2658,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>notifyChangedReadersObservers</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1242">notifyChangedReadersObservers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1258">notifyChangedReadersObservers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Notify all observers that set of Readers has changed.</div>
 <dl>
@@ -2643,7 +2673,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1266">getScanners</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1282">getScanners</a>(boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;isGet,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
@@ -2675,7 +2705,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1286">getScanners</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1302">getScanners</a>(boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
                                          <a href="../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.html" title="class in org.apache.hadoop.hbase.regionserver.querymatcher">ScanQueryMatcher</a>&nbsp;matcher,
@@ -2710,7 +2740,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>clearAndClose</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1320">clearAndClose</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;scanners)</pre>
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1336">clearAndClose</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;scanners)</pre>
 </li>
 </ul>
 <a name="getScanners-java.util.List-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-byte:A-long-boolean-">
@@ -2719,7 +2749,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1344">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1360">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
                                          boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;isGet,
                                          boolean&nbsp;usePread,
@@ -2756,7 +2786,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1368">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1384">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
                                          boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
@@ -2796,7 +2826,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>addChangedReaderObserver</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1401">addChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1417">addChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>o</code> - Observer who wants to know about changes in set of Readers</dd>
@@ -2809,7 +2839,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteChangedReaderObserver</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1408">deleteChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1424">deleteChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>o</code> - Observer no longer interested in changes in set of Readers.</dd>
@@ -2822,7 +2852,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compact</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1459">compact</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/ [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1475">compact</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/ [...]
                                 <a href="../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;throughputController,
                                 <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2879,7 +2909,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>doCompaction</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1490">doCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/comp [...]
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1506">doCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/comp [...]
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToCompact,
                                         <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                                         long&nbsp;compactionStartTime,
@@ -2897,7 +2927,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>moveCompactedFilesIntoPlace</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1523">moveCompactedFilesIntoPlace</a>(<a href="../../../../../org/apache/hadoop/hbase/regi [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1539">moveCompactedFilesIntoPlace</a>(<a href="../../../../../org/apache/hadoop/hbase/regi [...]
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;newFiles,
                                                      <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2913,7 +2943,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>moveFileIntoPlace</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1539">moveFileIntoPlace</a>(org.apache.hadoop.fs.Path&nbsp;newFile)
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1555">moveFileIntoPlace</a>(org.apache.hadoop.fs.Path&nbsp;newFile)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2927,7 +2957,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>writeCompactionWalRecord</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1551">writeCompactionWalRecord</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacted,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1567">writeCompactionWalRecord</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacted,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the compaction WAL record.</div>
@@ -2946,7 +2976,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>replaceStoreFiles</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1572">replaceStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1588">replaceStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;result)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2961,7 +2991,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>updateSpaceQuotaAfterFileReplacement</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1603">updateSpaceQuotaAfterFileReplacement</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;sizeStore,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1619">updateSpaceQuotaAfterFileReplacement</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;sizeStore,
                                           <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;oldFiles,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)</pre>
@@ -2982,7 +3012,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>logCompactionEndMessage</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1630">logCompactionEndMessage</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1646">logCompactionEndMessage</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
                                      long&nbsp;now,
                                      long&nbsp;compactionStartTime)</pre>
@@ -3001,7 +3031,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>replayCompactionMarker</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1668">replayCompactionMarker</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor&nbsp;compaction,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1684">replayCompactionMarker</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor&nbsp;compaction,
                                    boolean&nbsp;pickCompactionFiles,
                                    boolean&nbsp;removeFiles)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3020,7 +3050,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compactRecentForTestingAssumingDefaultPolicy</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1733">compactRecentForTestingAssumingDefaultPolicy</a>(int&nbsp;N)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1749">compactRecentForTestingAssumingDefaultPolicy</a>(int&nbsp;N)
                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This method tries to compact N recent files for testing.
  Note that because compacting "recent" files only makes sense for some policies,
@@ -3040,7 +3070,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>hasReferences</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1785">hasReferences</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1801">hasReferences</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#hasReferences--">hasReferences</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3055,7 +3085,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionProgress</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1803">getCompactionProgress</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1819">getCompactionProgress</a>()</pre>
 <div class="block">getter for CompactionProgress object</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3069,7 +3099,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldPerformMajorCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1808">shouldPerformMajorCompaction</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1824">shouldPerformMajorCompaction</a>()
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#shouldPerformMajorCompaction--">Store</a></code></span></div>
 <div class="block">Tests whether we should run a major compaction. For example, if the configured major compaction
@@ -3090,7 +3120,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>requestCompaction</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1820">requestCompaction</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1836">requestCompaction</a>()
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3104,7 +3134,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>requestCompaction</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1824">requestCompaction</a>(int&nbsp;priority,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1840">requestCompaction</a>(int&nbsp;priority,
                                                      <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">CompactionLifeCycleTracker</a>&nbsp;tracker,
                                                      <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3120,7 +3150,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>addToCompactingFiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1906">addToCompactingFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToAdd)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1922">addToCompactingFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToAdd)</pre>
 <div class="block">Adds the files to compacting files. filesCompacting must be locked.</div>
 </li>
 </ul>
@@ -3130,7 +3160,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>removeUnneededFiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1918">removeUnneededFiles</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1934">removeUnneededFiles</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3144,7 +3174,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>cancelRequestedCompaction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1956">cancelRequestedCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&nbsp;compaction)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1972">cancelRequestedCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&nbsp;compaction)</pre>
 </li>
 </ul>
 <a name="finishCompactionRequest-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-">
@@ -3153,7 +3183,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>finishCompactionRequest</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1960">finishCompactionRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1976">finishCompactionRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr)</pre>
 </li>
 </ul>
 <a name="validateStoreFile-org.apache.hadoop.fs.Path-">
@@ -3162,7 +3192,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>validateStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1976">validateStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1992">validateStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Validates a store file by opening and closing it. In HFileV2 this should not be an expensive
  operation.</div>
@@ -3180,7 +3210,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>completeCompaction</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1995">completeCompaction</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2011">completeCompaction</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Update counts.</div>
 <dl>
@@ -3197,7 +3227,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>versionsToReturn</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2015">versionsToReturn</a>(int&nbsp;wantedVersions)</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2031">versionsToReturn</a>(int&nbsp;wantedVersions)</pre>
 </li>
 </ul>
 <a name="canSplit--">
@@ -3206,7 +3236,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>canSplit</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2025">canSplit</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2041">canSplit</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#canSplit--">Store</a></code></span></div>
 <div class="block">Returns whether this store is splittable, i.e., no reference file in this store.</div>
 <dl>
@@ -3221,7 +3251,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplitPoint</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2042">getSplitPoint</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2058">getSplitPoint</a>()</pre>
 <div class="block">Determines if Store should be split.</div>
 </li>
 </ul>
@@ -3231,7 +3261,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastCompactSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2062">getLastCompactSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2078">getLastCompactSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getLastCompactSize--">getLastCompactSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3246,7 +3276,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2067">getSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2083">getSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSize--">getSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3261,7 +3291,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>triggerMajorCompaction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2071">triggerMajorCompaction</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2087">triggerMajorCompaction</a>()</pre>
 </li>
 </ul>
 <a name="getScanner-org.apache.hadoop.hbase.client.Scan-java.util.NavigableSet-long-">
@@ -3270,7 +3300,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2087">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2103">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                                   long&nbsp;readPt)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3293,7 +3323,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2104">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2120">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                                         <a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                                         long&nbsp;readPt)
@@ -3310,7 +3340,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>recreateScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2125">recreateScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2141">recreateScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api [...]
                                               boolean&nbsp;cacheBlocks,
                                               boolean&nbsp;usePread,
                                               boolean&nbsp;isCompaction,
@@ -3349,7 +3379,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2159">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2175">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -3362,7 +3392,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2164">getStorefilesCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2180">getStorefilesCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesCount--">getStorefilesCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3377,7 +3407,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedFilesCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2169">getCompactedFilesCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2185">getCompactedFilesCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedFilesCount--">getCompactedFilesCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3392,7 +3422,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFileAgeStream</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/LongStream.html?is-external=true" title="class or interface in java.util.stream">LongStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2173">getStoreFileAgeStream</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/LongStream.html?is-external=true" title="class or interface in java.util.stream">LongStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2189">getStoreFileAgeStream</a>()</pre>
 </li>
 </ul>
 <a name="getMaxStoreFileAge--">
@@ -3401,7 +3431,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2186">getMaxStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2202">getMaxStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMaxStoreFileAge--">getMaxStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3416,7 +3446,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMinStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2191">getMinStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2207">getMinStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMinStoreFileAge--">getMinStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3431,7 +3461,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getAvgStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true" title="class or interface in java.util">OptionalDouble</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2196">getAvgStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true" title="class or interface in java.util">OptionalDouble</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2212">getAvgStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getAvgStoreFileAge--">getAvgStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3446,7 +3476,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumReferenceFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2201">getNumReferenceFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2217">getNumReferenceFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumReferenceFiles--">getNumReferenceFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3461,7 +3491,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumHFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2207">getNumHFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2223">getNumHFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumHFiles--">getNumHFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3476,7 +3506,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreSizeUncompressed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2213">getStoreSizeUncompressed</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2229">getStoreSizeUncompressed</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStoreSizeUncompressed--">getStoreSizeUncompressed</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3491,7 +3521,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2218">getStorefilesSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2234">getStorefilesSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesSize--">getStorefilesSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3506,7 +3536,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getHFilesSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2224">getHFilesSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2240">getHFilesSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getHFilesSize--">getHFilesSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3521,7 +3551,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalUncompressedBytes</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2230">getTotalUncompressedBytes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files)</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2246">getTotalUncompressedBytes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files)</pre>
 </li>
 </ul>
 <a name="getStorefilesSize-java.util.Collection-java.util.function.Predicate-">
@@ -3530,7 +3560,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2236">getStorefilesSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files,
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2252">getStorefilesSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;predicate)</pre>
 </li>
 </ul>
@@ -3540,7 +3570,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefileFieldSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2241">getStorefileFieldSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file,
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2257">getStorefileFieldSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp;f)</pre>
 </li>
 </ul>
@@ -3550,7 +3580,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesFieldSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2252">getStorefilesFieldSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp [...]
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2268">getStorefilesFieldSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp [...]
 </li>
 </ul>
 <a name="getStorefilesRootLevelIndexSize--">
@@ -3559,7 +3589,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesRootLevelIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2258">getStorefilesRootLevelIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2274">getStorefilesRootLevelIndexSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesRootLevelIndexSize--">getStorefilesRootLevelIndexSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3574,7 +3604,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2263">getTotalStaticIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2279">getTotalStaticIndexSize</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticIndexSize--">Store</a></code></span></div>
 <div class="block">Returns the total size of all index blocks in the data block indexes, including the root level,
  intermediate levels, and the leaf level for multi-level indexes, or just the root level for
@@ -3593,7 +3623,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticBloomSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2268">getTotalStaticBloomSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2284">getTotalStaticBloomSize</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticBloomSize--">Store</a></code></span></div>
 <div class="block">Returns the total byte size of all Bloom filter bit arrays. For compound Bloom filters even the
  Bloom blocks currently not loaded into the block cache are counted.</div>
@@ -3611,7 +3641,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemStoreSize</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2273">getMemStoreSize</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2289">getMemStoreSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMemStoreSize--">getMemStoreSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3626,7 +3656,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactPriority</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2278">getCompactPriority</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2294">getCompactPriority</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactPriority--">getCompactPriority</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3639,7 +3669,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>throttleCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2286">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2302">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
 </li>
 </ul>
 <a name="getHRegion--">
@@ -3648,7 +3678,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getHRegion</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2290">getHRegion</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2306">getHRegion</a>()</pre>
 </li>
 </ul>
 <a name="getCoprocessorHost--">
@@ -3657,7 +3687,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCoprocessorHost</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2294">getCoprocessorHost</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2310">getCoprocessorHost</a>()</pre>
 </li>
 </ul>
 <a name="getRegionInfo--">
@@ -3666,7 +3696,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2299">getRegionInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2315">getRegionInfo</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getRegionInfo--">getRegionInfo</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3681,7 +3711,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>areWritesEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2304">areWritesEnabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2320">areWritesEnabled</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#areWritesEnabled--">areWritesEnabled</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3694,7 +3724,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSmallestReadPoint</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2309">getSmallestReadPoint</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2325">getSmallestReadPoint</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSmallestReadPoint--">getSmallestReadPoint</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3711,7 +3741,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>upsert</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2323">upsert</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2339">upsert</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
                    long&nbsp;readpoint,
                    <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSizing.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStoreSizing</a>&nbsp;memstoreSizing)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3736,7 +3766,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createFlushContext</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2333">createFlushContext</a>(long&nbsp;cacheFlushId,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2349">createFlushContext</a>(long&nbsp;cacheFlushId,
                                             <a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a>&nbsp;tracker)</pre>
 </li>
 </ul>
@@ -3746,7 +3776,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>needsCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2480">needsCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2496">needsCompaction</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#needsCompaction--">Store</a></code></span></div>
 <div class="block">See if there's too much store files in this store</div>
 <dl>
@@ -3764,7 +3794,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2493">getCacheConfig</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2509">getCacheConfig</a>()</pre>
 <div class="block">Used for tests.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3778,7 +3808,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2508">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2524">heapSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize--">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -3794,7 +3824,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getComparator</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2514">getComparator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2530">getComparator</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getComparator--">getComparator</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3807,7 +3837,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2518">getScanInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2534">getScanInfo</a>()</pre>
 </li>
 </ul>
 <a name="setScanInfo-org.apache.hadoop.hbase.regionserver.ScanInfo-">
@@ -3816,7 +3846,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>setScanInfo</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2526">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2542">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
 <div class="block">Set scan info, used by test</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -3830,7 +3860,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>hasTooManyStoreFiles</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2531">hasTooManyStoreFiles</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2547">hasTooManyStoreFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#hasTooManyStoreFiles--">hasTooManyStoreFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3845,7 +3875,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2536">getFlushedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2552">getFlushedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsCount--">getFlushedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3860,7 +3890,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2541">getFlushedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2557">getFlushedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsSize--">getFlushedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3875,7 +3905,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedOutputFileSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2546">getFlushedOutputFileSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2562">getFlushedOutputFileSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedOutputFileSize--">getFlushedOutputFileSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3890,7 +3920,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2551">getCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2567">getCompactedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsCount--">getCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3905,7 +3935,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2556">getCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2572">getCompactedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsSize--">getCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3920,7 +3950,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2561">getMajorCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2577">getMajorCompactedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsCount--">getMajorCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3935,7 +3965,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2566">getMajorCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2582">getMajorCompactedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsSize--">getMajorCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3950,7 +3980,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreEngine</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2575">getStoreEngine</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2591">getStoreEngine</a>()</pre>
 <div class="block">Returns the StoreEngine that is backing this concrete implementation of Store.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3964,7 +3994,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getOffPeakHours</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2579">getOffPeakHours</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2595">getOffPeakHours</a>()</pre>
 </li>
 </ul>
 <a name="onConfigurationChange-org.apache.hadoop.conf.Configuration-">
@@ -3973,7 +4003,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>onConfigurationChange</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2587">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2603">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">This method would be called by the <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf"><code>ConfigurationManager</code></a>
  object when the <code>Configuration</code> object is reloaded from disk.</div>
 <dl>
@@ -3988,7 +4018,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>registerChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2599">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2615">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to register the children to the manager.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -4004,7 +4034,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>deregisterChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2607">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2623">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to deregister the children from the manager.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -4020,7 +4050,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionPressure</h4>
-<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2612">getCompactionPressure</a>()</pre>
+<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2628">getCompactionPressure</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactionPressure--">Store</a></code></span></div>
 <div class="block">This value can represent the degree of emergency of compaction for this store. It should be
  greater than or equal to 0.0, any value greater than 1.0 means we have too many store files.
@@ -4047,7 +4077,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>isPrimaryReplicaStore</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2617">isPrimaryReplicaStore</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2633">isPrimaryReplicaStore</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#isPrimaryReplicaStore--">isPrimaryReplicaStore</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4060,7 +4090,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>preSnapshotOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2625">preSnapshotOperation</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2641">preSnapshotOperation</a>()</pre>
 <div class="block">Sets the store up for a region level snapshot operation.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -4074,7 +4104,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>postSnapshotOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2633">postSnapshotOperation</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2649">postSnapshotOperation</a>()</pre>
 <div class="block">Perform tasks needed after the completion of snapshot operation.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -4088,7 +4118,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>closeAndArchiveCompactedFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2640">closeAndArchiveCompactedFiles</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2656">closeAndArchiveCompactedFiles</a>()
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Closes and archives the compacted files under this store</div>
 <dl>
@@ -4103,7 +4133,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>removeCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2670">removeCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedfiles)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2686">removeCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedfiles)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Archives and removes the compacted files</div>
 <dl>
@@ -4120,7 +4150,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFileSize</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2758">getStoreFileSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file)</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2774">getStoreFileSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file)</pre>
 <div class="block">Computes the length of a store file without succumbing to any errors along the way. If an
  error is encountered, the implementation returns <code>0</code> instead of the actual size.</div>
 <dl>
@@ -4137,7 +4167,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>preFlushSeqIDEstimation</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2776">preFlushSeqIDEstimation</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2792">preFlushSeqIDEstimation</a>()</pre>
 </li>
 </ul>
 <a name="isSloppyMemStore--">
@@ -4146,7 +4176,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>isSloppyMemStore</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2781">isSloppyMemStore</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2797">isSloppyMemStore</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#isSloppyMemStore--">isSloppyMemStore</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4161,7 +4191,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>clearCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2785">clearCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToRemove)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2801">clearCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToRemove)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -4175,7 +4205,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>reportArchivedFilesForQuota</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2795">reportArchivedFilesForQuota</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;archivedFiles,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2811">reportArchivedFilesForQuota</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;archivedFiles,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;fileSizes)</pre>
 </li>
 </ul>
@@ -4185,7 +4215,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCurrentParallelPutCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2823">getCurrentParallelPutCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2839">getCurrentParallelPutCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCurrentParallelPutCount--">getCurrentParallelPutCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4198,7 +4228,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreRefCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2827">getStoreRefCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2843">getStoreRefCount</a>()</pre>
 </li>
 </ul>
 <a name="getMaxCompactedStoreFileRefCount--">
@@ -4207,7 +4237,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMaxCompactedStoreFileRefCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2837">getMaxCompactedStoreFileRefCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2853">getMaxCompactedStoreFileRefCount</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>get maximum ref count of storeFile among all compacted HStore Files
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
index 7e41407..6b5beae 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFileWriter.html
@@ -387,6 +387,16 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#createWriterInTmp-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-boolean-boolean-boolean-long-">createWriterInTmp</a></span>(long&nbsp;maxKeyCount,
+                 <a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+                 boolean&nbsp;isCompaction,
+                 boolean&nbsp;includeMVCCReadpoint,
+                 boolean&nbsp;includesTag,
+                 boolean&nbsp;shouldDropBehind,
+                 long&nbsp;totalCompactedFilesSize)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HMobStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HMobStore.html#createWriterInTmp-org.apache.hadoop.hbase.mob.MobFileName-org.apache.hadoop.fs.Path-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-boolean-">createWriterInTmp</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/mob/MobFileName.html" title="class in org.apache.hadoop.hbase.mob">MobFileNa [...]
                  org.apache.hadoop.fs.Path&nbsp;basePath,
                  long&nbsp;maxKeyCount,
@@ -395,7 +405,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <div class="block">Creates the writer for the mob file in temp directory.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a></code></td>
 <td class="colLast"><span class="typeNameLabel">HMobStore.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HMobStore.html#createWriterInTmp-java.lang.String-org.apache.hadoop.fs.Path-long-org.apache.hadoop.hbase.io.compress.Compression.Algorithm-byte:A-boolean-">createWriterInTmp</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&n [...]
                  org.apache.hadoop.fs.Path&nbsp;basePath,
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html
index 7eec95f..a72bd35 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html
@@ -170,6 +170,12 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Min SeqId to keep during a major compaction</div>
 </td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private long</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html#totalCompactedFilesSize">totalCompactedFilesSize</a></span></code>
+<div class="block">Total size of the compacted files</div>
+</td>
+</tr>
 </table>
 </li>
 </ul>
@@ -282,13 +288,23 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="minSeqIdToKeep">
 <!--   -->
 </a>
-<ul class="blockListLast">
+<ul class="blockList">
 <li class="blockList">
 <h4>minSeqIdToKeep</h4>
 <pre>public&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html#line.132">minSeqIdToKeep</a></pre>
 <div class="block">Min SeqId to keep during a major compaction</div>
 </li>
 </ul>
+<a name="totalCompactedFilesSize">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>totalCompactedFilesSize</h4>
+<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html#line.134">totalCompactedFilesSize</a></pre>
+<div class="block">Total size of the compacted files</div>
+</li>
+</ul>
 </li>
 </ul>
 <!-- ========= CONSTRUCTOR DETAIL ======== -->
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html
index b904c05..b209a0d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>protected static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.230">Compactor.InternalScannerFactory</a></pre>
+<pre>protected static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.236">Compactor.InternalScannerFactory</a></pre>
 </li>
 </ul>
 </div>
@@ -161,7 +161,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanType</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html#line.232">getScanType</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestI [...]
+<pre><a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html#line.238">getScanType</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestI [...]
 </li>
 </ul>
 <a name="createScanner-org.apache.hadoop.hbase.regionserver.ScanInfo-java.util.List-org.apache.hadoop.hbase.regionserver.ScanType-org.apache.hadoop.hbase.regionserver.compactions.Compactor.FileDetails-long-">
@@ -170,7 +170,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html#line.234">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html#line.240">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;&nbsp;scanners,
                               <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;scanType,
                               <a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;fd,
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html
index ab8da48..9378188 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html
@@ -496,7 +496,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>defaultScannerFactory</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">Compactor.InternalScannerFactory</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.238">defaultScannerFactory</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">Compactor.InternalScannerFactory</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.244">defaultScannerFactory</a></pre>
 </li>
 </ul>
 </li>
@@ -540,7 +540,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileDetails</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.141">getFileDetails</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collectio [...]
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.143">getFileDetails</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collectio [...]
                                              boolean&nbsp;allFiles)
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Extracts some details about the files to compact that are commonly needed by compactors.</div>
@@ -561,7 +561,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileScanners</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.220">createFileScanners</a>(<a href="https://docs.oracle. [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.226">createFileScanners</a>(<a href="https://docs.oracle. [...]
                                                   long&nbsp;smallestReadPoint,
                                                   boolean&nbsp;useDropBehind)
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -582,7 +582,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getSmallestReadPoint</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.226">getSmallestReadPoint</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.232">getSmallestReadPoint</a>()</pre>
 </li>
 </ul>
 <a name="createTmpWriter-org.apache.hadoop.hbase.regionserver.compactions.Compactor.FileDetails-boolean-">
@@ -591,7 +591,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createTmpWriter</h4>
-<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.259">createTmpWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">C [...]
+<pre>protected final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.265">createTmpWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">C [...]
                                                 boolean&nbsp;shouldDropBehind)
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Creates a writer for a new file in a temporary directory.</div>
@@ -611,7 +611,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>preCompactScannerOpen</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.267">preCompactScannerOpen</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequest [...]
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.274">preCompactScannerOpen</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequest [...]
                                        <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;scanType,
                                        <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -627,7 +627,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>postCompactScannerOpen</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.283">postCompactScannerOpen</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions [...]
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.290">postCompactScannerOpen</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions [...]
                                                <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;scanType,
                                                <a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;scanner,
                                                <a href="../../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
@@ -651,7 +651,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>compact</h4>
-<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.292">compact</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions [...]
+<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.299">compact</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions [...]
                                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.InternalScannerFactory.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">Compactor.InternalScannerFactory</a>&nbsp;scannerFactory,
                                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.CellSinkFactory.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">Compactor.CellSinkFactory</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="type parameter in Compactor">T</a>&gt;&nbsp;sinkFactory,
                                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;throughputController,
@@ -671,7 +671,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>commitWriter</h4>
-<pre>protected abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.343">commitWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="type parameter in Compactor">T</a>&nbsp;writer,
+<pre>protected abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.350">commitWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="type parameter in Compactor">T</a>&nbsp;writer,
                                                                 <a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;fd,
                                                                 <a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;request)
                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -689,7 +689,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>abortWriter</h4>
-<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.346">abortWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="type parameter in Compactor">T</a>&nbsp;writer)
+<pre>protected abstract&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.353">abortWriter</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="type parameter in Compactor">T</a>&nbsp;writer)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -703,7 +703,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>performCompaction</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.360">performCompaction</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;fd,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.367">performCompaction</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/compactions/Compactor.FileDetails.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor.FileDetails</a>&nbsp;fd,
                                     <a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;scanner,
                                     <a href="../../../../../../org/apache/hadoop/hbase/regionserver/CellSink.html" title="interface in org.apache.hadoop.hbase.regionserver">CellSink</a>&nbsp;writer,
                                     long&nbsp;smallestReadPoint,
@@ -736,7 +736,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.484">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.491">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;&nbsp;scanners,
                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver">ScanType</a>&nbsp;scanType,
@@ -763,7 +763,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.499">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/InternalScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">InternalScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#line.506">createScanner</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
                                         <a href="../../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/StoreFileScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileScanner</a>&gt;&nbsp;scanners,
                                         long&nbsp;smallestReadPoint,
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 5e326cb..5b3ac5a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -735,20 +735,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 5b2745a..bd7348f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -131,8 +131,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 0c730b9..01364b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -249,8 +249,8 @@
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
index 732dfb2..581ff15 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
@@ -166,8 +166,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">SyncReplicationState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index 795500d..b47c8cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 23e6ea9..74d2bbb 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -162,11 +162,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index c060011..062071e 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -190,8 +190,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 84c399e..e069dc4 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -211,9 +211,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 0f2eb8b..f013445 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -560,13 +560,13 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index d78cc8b..f2c94bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -199,8 +199,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 1f09cb7..8dcff58 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  justification="Intentional; to be modified in test")<a name="line.10"></a>
 <span class="sourceLineNo">011</span>public class Version {<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String version = new String("3.0.0-SNAPSHOT");<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String revision = "8b00f9f0b160a3191889aa3a80478525c8faf4b3";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String revision = "77229c79e36d72fb0f1a85a80b6814e2ece1e81c";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String user = "jenkins";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String date = "Thu Jan 30 14:39:19 UTC 2020";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String date = "Fri Jan 31 14:35:45 UTC 2020";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>  public static final String url = "git://jenkins-websites-he-de.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.16"></a>
 <span class="sourceLineNo">017</span>  public static final String srcChecksum = "(stdin)=";<a name="line.17"></a>
 <span class="sourceLineNo">018</span>}<a name="line.18"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index f468c37..d76e5d7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -95,340 +95,374 @@
 <span class="sourceLineNo">087</span>  public static final String CACHE_COMPACTED_BLOCKS_ON_WRITE_KEY =<a name="line.87"></a>
 <span class="sourceLineNo">088</span>      "hbase.rs.cachecompactedblocksonwrite";<a name="line.88"></a>
 <span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public static final String DROP_BEHIND_CACHE_COMPACTION_KEY =<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      "hbase.hfile.drop.behind.compaction";<a name="line.91"></a>
-<span class="sourceLineNo">092</span><a name="line.92"></a>
-<span class="sourceLineNo">093</span>  // Defaults<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public static final boolean DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE = false;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  public static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * This cannot be disabled.<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   */<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  private final boolean cacheDataOnRead;<a name="line.111"></a>
+<span class="sourceLineNo">090</span>  /**<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * Configuration key to determine total size in bytes of compacted files beyond which we do not<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * cache blocks on compaction<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  public static final String CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY =<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      "hbase.rs.cachecompactedblocksonwrite.threshold";<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String DROP_BEHIND_CACHE_COMPACTION_KEY =<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      "hbase.hfile.drop.behind.compaction";<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>  // Defaults<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public static final boolean DEFAULT_CACHE_DATA_ON_READ = true;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = false;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  public static final boolean DEFAULT_IN_MEMORY = false;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  public static final boolean DEFAULT_CACHE_INDEXES_ON_WRITE = false;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = false;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public static final boolean DEFAULT_EVICT_ON_CLOSE = false;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  public static final boolean DEFAULT_CACHE_DATA_COMPRESSED = false;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  public static final boolean DEFAULT_PREFETCH_ON_OPEN = false;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  public static final boolean DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE = false;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public static final boolean DROP_BEHIND_CACHE_COMPACTION_DEFAULT = true;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public static final long DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD = Long.MAX_VALUE;<a name="line.111"></a>
 <span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  private final boolean inMemory;<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /** Whether data blocks should be cached when new files are written */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  private boolean cacheDataOnWrite;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  /** Whether index blocks should be cached when new files are written */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  private boolean cacheIndexesOnWrite;<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  private boolean cacheBloomsOnWrite;<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  private boolean evictOnClose;<a name="line.126"></a>
-<span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  private final boolean cacheDataCompressed;<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  private final boolean prefetchOnOpen;<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  /**<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * Whether data blocks should be cached when compacted file is written<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   */<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private final boolean cacheCompactedDataOnWrite;<a name="line.137"></a>
+<span class="sourceLineNo">113</span>  /**<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   * Whether blocks should be cached on read (default is on if there is a<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * cache but this can be turned off on a per-family or per-request basis).<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * If off we will STILL cache meta blocks; i.e. INDEX and BLOOM types.<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   * This cannot be disabled.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   */<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  private final boolean cacheDataOnRead;<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  /** Whether blocks should be flagged as in-memory when being cached */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  private final boolean inMemory;<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>  /** Whether data blocks should be cached when new files are written */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  private boolean cacheDataOnWrite;<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  /** Whether index blocks should be cached when new files are written */<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  private boolean cacheIndexesOnWrite;<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  /** Whether compound bloom filter blocks should be cached on write */<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  private boolean cacheBloomsOnWrite;<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  /** Whether blocks of a file should be evicted when the file is closed */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  private boolean evictOnClose;<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  /** Whether data blocks should be stored in compressed and/or encrypted form in the cache */<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private final boolean cacheDataCompressed;<a name="line.137"></a>
 <span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  private final boolean dropBehindCompaction;<a name="line.139"></a>
-<span class="sourceLineNo">140</span><a name="line.140"></a>
-<span class="sourceLineNo">141</span>  // Local reference to the block cache<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  private final BlockCache blockCache;<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  private final ByteBuffAllocator byteBuffAllocator;<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /**<a name="line.146"></a>
-<span class="sourceLineNo">147</span>   * Create a cache configuration using the specified configuration object and<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   * defaults for family level settings. Only use if no column family context.<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * @param conf hbase configuration<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   */<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  public CacheConfig(Configuration conf) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    this(conf, null);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  public CacheConfig(Configuration conf, BlockCache blockCache) {<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    this(conf, null, blockCache, ByteBuffAllocator.HEAP);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  }<a name="line.157"></a>
+<span class="sourceLineNo">139</span>  /** Whether data blocks should be prefetched into the cache */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private final boolean prefetchOnOpen;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>  /**<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   * Whether data blocks should be cached when compacted file is written<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   */<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  private final boolean cacheCompactedDataOnWrite;<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">147</span>  /**<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * Determine threshold beyond which we do not cache blocks on compaction<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   */<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  private long cacheCompactedDataOnWriteThreshold;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  private final boolean dropBehindCompaction;<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  // Local reference to the block cache<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  private final BlockCache blockCache;<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  private final ByteBuffAllocator byteBuffAllocator;<a name="line.157"></a>
 <span class="sourceLineNo">158</span><a name="line.158"></a>
 <span class="sourceLineNo">159</span>  /**<a name="line.159"></a>
 <span class="sourceLineNo">160</span>   * Create a cache configuration using the specified configuration object and<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   * family descriptor.<a name="line.161"></a>
+<span class="sourceLineNo">161</span>   * defaults for family level settings. Only use if no column family context.<a name="line.161"></a>
 <span class="sourceLineNo">162</span>   * @param conf hbase configuration<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   * @param family column family configuration<a name="line.163"></a>
-<span class="sourceLineNo">164</span>   */<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  public CacheConfig(Configuration conf, ColumnFamilyDescriptor family, BlockCache blockCache,<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      ByteBuffAllocator byteBuffAllocator) {<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    this.cacheDataOnRead = conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ) &amp;&amp;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        (family == null ? true : family.isBlockCacheEnabled());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    this.inMemory = family == null ? DEFAULT_IN_MEMORY : family.isInMemory();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    this.cacheDataCompressed =<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    this.dropBehindCompaction =<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    // For the following flags we enable them regardless of per-schema settings<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    // if they are enabled in the global configuration.<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    this.cacheDataOnWrite =<a name="line.176"></a>
-<span class="sourceLineNo">177</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE) ||<a name="line.177"></a>
-<span class="sourceLineNo">178</span>            (family == null ? false : family.isCacheDataOnWrite());<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    this.cacheIndexesOnWrite =<a name="line.179"></a>
-<span class="sourceLineNo">180</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE) ||<a name="line.180"></a>
-<span class="sourceLineNo">181</span>            (family == null ? false : family.isCacheIndexesOnWrite());<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    this.cacheBloomsOnWrite =<a name="line.182"></a>
-<span class="sourceLineNo">183</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE) ||<a name="line.183"></a>
-<span class="sourceLineNo">184</span>            (family == null ? false : family.isCacheBloomsOnWrite());<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    this.evictOnClose = conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE) ||<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        (family == null ? false : family.isEvictBlocksOnClose());<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    this.prefetchOnOpen = conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN) ||<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        (family == null ? false : family.isPrefetchBlocksOnOpen());<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    this.cacheCompactedDataOnWrite = conf.getBoolean(CACHE_COMPACTED_BLOCKS_ON_WRITE_KEY,<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    this.blockCache = blockCache;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    this.byteBuffAllocator = byteBuffAllocator;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    LOG.info("Created cacheConfig: " + this + (family == null ? "" : " for family " + family) +<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        " with blockCache=" + blockCache);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param cacheConf<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   */<a name="line.200"></a>
-<span class="sourceLineNo">201</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    this.cacheDataOnRead = cacheConf.cacheDataOnRead;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    this.inMemory = cacheConf.inMemory;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    this.cacheDataOnWrite = cacheConf.cacheDataOnWrite;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    this.cacheIndexesOnWrite = cacheConf.cacheIndexesOnWrite;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    this.cacheBloomsOnWrite = cacheConf.cacheBloomsOnWrite;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    this.evictOnClose = cacheConf.evictOnClose;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    this.cacheDataCompressed = cacheConf.cacheDataCompressed;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    this.prefetchOnOpen = cacheConf.prefetchOnOpen;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    this.cacheCompactedDataOnWrite = cacheConf.cacheCompactedDataOnWrite;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    this.dropBehindCompaction = cacheConf.dropBehindCompaction;<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    this.blockCache = cacheConf.blockCache;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    this.byteBuffAllocator = cacheConf.byteBuffAllocator;<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  }<a name="line.214"></a>
-<span class="sourceLineNo">215</span><a name="line.215"></a>
-<span class="sourceLineNo">216</span>  private CacheConfig() {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    this.cacheDataOnRead = false;<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    this.inMemory = false;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    this.cacheDataOnWrite = false;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    this.cacheIndexesOnWrite = false;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    this.cacheBloomsOnWrite = false;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    this.evictOnClose = false;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    this.cacheDataCompressed = false;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    this.prefetchOnOpen = false;<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    this.cacheCompactedDataOnWrite = false;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    this.dropBehindCompaction = false;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    this.blockCache = null;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    this.byteBuffAllocator = ByteBuffAllocator.HEAP;<a name="line.228"></a>
+<span class="sourceLineNo">163</span>   */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  public CacheConfig(Configuration conf) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    this(conf, null);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  }<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>  public CacheConfig(Configuration conf, BlockCache blockCache) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    this(conf, null, blockCache, ByteBuffAllocator.HEAP);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
+<span class="sourceLineNo">173</span>   * Create a cache configuration using the specified configuration object and<a name="line.173"></a>
+<span class="sourceLineNo">174</span>   * family descriptor.<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   * @param conf hbase configuration<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * @param family column family configuration<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   */<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public CacheConfig(Configuration conf, ColumnFamilyDescriptor family, BlockCache blockCache,<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      ByteBuffAllocator byteBuffAllocator) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    this.cacheDataOnRead = conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ) &amp;&amp;<a name="line.180"></a>
+<span class="sourceLineNo">181</span>        (family == null ? true : family.isBlockCacheEnabled());<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    this.inMemory = family == null ? DEFAULT_IN_MEMORY : family.isInMemory();<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    this.cacheDataCompressed =<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    this.dropBehindCompaction =<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT);<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    // For the following flags we enable them regardless of per-schema settings<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    // if they are enabled in the global configuration.<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    this.cacheDataOnWrite =<a name="line.189"></a>
+<span class="sourceLineNo">190</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE) ||<a name="line.190"></a>
+<span class="sourceLineNo">191</span>            (family == null ? false : family.isCacheDataOnWrite());<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    this.cacheIndexesOnWrite =<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE) ||<a name="line.193"></a>
+<span class="sourceLineNo">194</span>            (family == null ? false : family.isCacheIndexesOnWrite());<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    this.cacheBloomsOnWrite =<a name="line.195"></a>
+<span class="sourceLineNo">196</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE) ||<a name="line.196"></a>
+<span class="sourceLineNo">197</span>            (family == null ? false : family.isCacheBloomsOnWrite());<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    this.evictOnClose = conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE) ||<a name="line.198"></a>
+<span class="sourceLineNo">199</span>        (family == null ? false : family.isEvictBlocksOnClose());<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    this.prefetchOnOpen = conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN) ||<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        (family == null ? false : family.isPrefetchBlocksOnOpen());<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    this.cacheCompactedDataOnWrite = conf.getBoolean(CACHE_COMPACTED_BLOCKS_ON_WRITE_KEY,<a name="line.202"></a>
+<span class="sourceLineNo">203</span>      DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    this.cacheCompactedDataOnWriteThreshold = getCacheCompactedBlocksOnWriteThreshold(conf);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    this.blockCache = blockCache;<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    this.byteBuffAllocator = byteBuffAllocator;<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    LOG.info("Created cacheConfig: " + this + (family == null ? "" : " for family " + family) +<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        " with blockCache=" + blockCache);<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   * @param cacheConf<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    this.cacheDataOnRead = cacheConf.cacheDataOnRead;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    this.inMemory = cacheConf.inMemory;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    this.cacheDataOnWrite = cacheConf.cacheDataOnWrite;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    this.cacheIndexesOnWrite = cacheConf.cacheIndexesOnWrite;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    this.cacheBloomsOnWrite = cacheConf.cacheBloomsOnWrite;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    this.evictOnClose = cacheConf.evictOnClose;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    this.cacheDataCompressed = cacheConf.cacheDataCompressed;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    this.prefetchOnOpen = cacheConf.prefetchOnOpen;<a name="line.223"></a>
+<span class="sourceLineNo">224</span>    this.cacheCompactedDataOnWrite = cacheConf.cacheCompactedDataOnWrite;<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    this.cacheCompactedDataOnWriteThreshold = cacheConf.cacheCompactedDataOnWriteThreshold;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    this.dropBehindCompaction = cacheConf.dropBehindCompaction;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    this.blockCache = cacheConf.blockCache;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    this.byteBuffAllocator = cacheConf.byteBuffAllocator;<a name="line.228"></a>
 <span class="sourceLineNo">229</span>  }<a name="line.229"></a>
 <span class="sourceLineNo">230</span><a name="line.230"></a>
-<span class="sourceLineNo">231</span>  /**<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.232"></a>
-<span class="sourceLineNo">233</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.233"></a>
-<span class="sourceLineNo">234</span>   * @return true if blocks should be cached on read, false if not<a name="line.234"></a>
-<span class="sourceLineNo">235</span>   */<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  public boolean shouldCacheDataOnRead() {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    return cacheDataOnRead;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  }<a name="line.238"></a>
-<span class="sourceLineNo">239</span><a name="line.239"></a>
-<span class="sourceLineNo">240</span>  public boolean shouldDropBehindCompaction() {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    return dropBehindCompaction;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  }<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>  /**<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * Should we cache a block of a particular category? We always cache<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.246"></a>
-<span class="sourceLineNo">247</span>   * available.<a name="line.247"></a>
-<span class="sourceLineNo">248</span>   */<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    return cacheDataOnRead || category == BlockCategory.INDEX || category == BlockCategory.BLOOM ||<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        (prefetchOnOpen &amp;&amp; (category != BlockCategory.META &amp;&amp; category != BlockCategory.UNKNOWN));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  }<a name="line.252"></a>
-<span class="sourceLineNo">253</span><a name="line.253"></a>
-<span class="sourceLineNo">254</span>  /**<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public boolean isInMemory() {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    return this.inMemory;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>  /**<a name="line.261"></a>
-<span class="sourceLineNo">262</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.262"></a>
-<span class="sourceLineNo">263</span>   *         written, false if not<a name="line.263"></a>
-<span class="sourceLineNo">264</span>   */<a name="line.264"></a>
-<span class="sourceLineNo">265</span>  public boolean shouldCacheDataOnWrite() {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    return this.cacheDataOnWrite;<a name="line.266"></a>
+<span class="sourceLineNo">231</span>  private CacheConfig() {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    this.cacheDataOnRead = false;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    this.inMemory = false;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    this.cacheDataOnWrite = false;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    this.cacheIndexesOnWrite = false;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    this.cacheBloomsOnWrite = false;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    this.evictOnClose = false;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    this.cacheDataCompressed = false;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    this.prefetchOnOpen = false;<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    this.cacheCompactedDataOnWrite = false;<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    this.dropBehindCompaction = false;<a name="line.241"></a>
+<span class="sourceLineNo">242</span>    this.blockCache = null;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    this.byteBuffAllocator = ByteBuffAllocator.HEAP;<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>  /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.248"></a>
+<span class="sourceLineNo">249</span>   * @return true if blocks should be cached on read, false if not<a name="line.249"></a>
+<span class="sourceLineNo">250</span>   */<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  public boolean shouldCacheDataOnRead() {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    return cacheDataOnRead;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  }<a name="line.253"></a>
+<span class="sourceLineNo">254</span><a name="line.254"></a>
+<span class="sourceLineNo">255</span>  public boolean shouldDropBehindCompaction() {<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    return dropBehindCompaction;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  }<a name="line.257"></a>
+<span class="sourceLineNo">258</span><a name="line.258"></a>
+<span class="sourceLineNo">259</span>  /**<a name="line.259"></a>
+<span class="sourceLineNo">260</span>   * Should we cache a block of a particular category? We always cache<a name="line.260"></a>
+<span class="sourceLineNo">261</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.261"></a>
+<span class="sourceLineNo">262</span>   * available.<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   */<a name="line.263"></a>
+<span class="sourceLineNo">264</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    return cacheDataOnRead || category == BlockCategory.INDEX || category == BlockCategory.BLOOM ||<a name="line.265"></a>
+<span class="sourceLineNo">266</span>        (prefetchOnOpen &amp;&amp; (category != BlockCategory.META &amp;&amp; category != BlockCategory.UNKNOWN));<a name="line.266"></a>
 <span class="sourceLineNo">267</span>  }<a name="line.267"></a>
 <span class="sourceLineNo">268</span><a name="line.268"></a>
 <span class="sourceLineNo">269</span>  /**<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.270"></a>
-<span class="sourceLineNo">271</span>   *                         when an HFile is written<a name="line.271"></a>
-<span class="sourceLineNo">272</span>   */<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  @VisibleForTesting<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>  /**<a name="line.279"></a>
-<span class="sourceLineNo">280</span>   * Enable cache on write including:<a name="line.280"></a>
-<span class="sourceLineNo">281</span>   * cacheDataOnWrite<a name="line.281"></a>
-<span class="sourceLineNo">282</span>   * cacheIndexesOnWrite<a name="line.282"></a>
-<span class="sourceLineNo">283</span>   * cacheBloomsOnWrite<a name="line.283"></a>
-<span class="sourceLineNo">284</span>   */<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  public void enableCacheOnWrite() {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    this.cacheDataOnWrite = true;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    this.cacheIndexesOnWrite = true;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    this.cacheBloomsOnWrite = true;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  }<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span><a name="line.291"></a>
-<span class="sourceLineNo">292</span>  /**<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   *         is written, false if not<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   */<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    return this.cacheIndexesOnWrite;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  }<a name="line.298"></a>
-<span class="sourceLineNo">299</span><a name="line.299"></a>
-<span class="sourceLineNo">300</span>  /**<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.301"></a>
-<span class="sourceLineNo">302</span>   *         is written, false if not<a name="line.302"></a>
-<span class="sourceLineNo">303</span>   */<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return this.cacheBloomsOnWrite;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  /**<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   *         reader is closed, false if not<a name="line.310"></a>
-<span class="sourceLineNo">311</span>   */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>  public boolean shouldEvictOnClose() {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    return this.evictOnClose;<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  /**<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   * Only used for testing.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   *                     HFile reader is closed<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   */<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  @VisibleForTesting<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    this.evictOnClose = evictOnClose;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  }<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>  /**<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public boolean shouldCacheDataCompressed() {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  }<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
-<span class="sourceLineNo">334</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    switch (category) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>      case DATA:<a name="line.338"></a>
-<span class="sourceLineNo">339</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      default:<a name="line.340"></a>
-<span class="sourceLineNo">341</span>        return false;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>  /**<a name="line.345"></a>
-<span class="sourceLineNo">346</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   */<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  public boolean shouldPrefetchOnOpen() {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    return this.prefetchOnOpen;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>  }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>  /**<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @return true if blocks should be cached while writing during compaction, false if not<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   */<a name="line.354"></a>
-<span class="sourceLineNo">355</span>  public boolean shouldCacheCompactedBlocksOnWrite() {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    return this.cacheCompactedDataOnWrite;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  /**<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * Return true if we may find this type of block in block cache.<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * &lt;p&gt;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * configuration.<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    if (cacheDataOnRead) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      return true;<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    if (prefetchOnOpen) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      return true;<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    }<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    if (cacheDataOnWrite) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return true;<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    }<a name="line.376"></a>
-<span class="sourceLineNo">377</span>    if (blockType == null) {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      return true;<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.380"></a>
-<span class="sourceLineNo">381</span>        blockType.getCategory() == BlockCategory.INDEX) {<a name="line.381"></a>
-<span class="sourceLineNo">382</span>      return true;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    }<a name="line.383"></a>
-<span class="sourceLineNo">384</span>    return false;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>  }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>  /**<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   * otherwise we will acquire lock<a name="line.389"></a>
-<span class="sourceLineNo">390</span>   */<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    if (blockType == null) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      return true;<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * Returns the block cache.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   *<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @return the block cache, or null if caching is completely disabled<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public Optional&lt;BlockCache&gt; getBlockCache() {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    return Optional.ofNullable(this.blockCache);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  }<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  public boolean isCombinedBlockCache() {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    return blockCache instanceof CombinedBlockCache;<a name="line.408"></a>
-<span class="sourceLineNo">409</span>  }<a name="line.409"></a>
-<span class="sourceLineNo">410</span><a name="line.410"></a>
-<span class="sourceLineNo">411</span>  public ByteBuffAllocator getByteBuffAllocator() {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    return this.byteBuffAllocator;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  }<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>  @Override<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  public String toString() {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    return "cacheDataOnRead=" + shouldCacheDataOnRead() + ", cacheDataOnWrite="<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        + shouldCacheDataOnWrite() + ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite()<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        + ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() + ", cacheEvictOnClose="<a name="line.419"></a>
-<span class="sourceLineNo">420</span>        + shouldEvictOnClose() + ", cacheDataCompressed=" + shouldCacheDataCompressed()<a name="line.420"></a>
-<span class="sourceLineNo">421</span>        + ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>}<a name="line.423"></a>
+<span class="sourceLineNo">270</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   */<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  public boolean isInMemory() {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    return this.inMemory;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   *         written, false if not<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  public boolean shouldCacheDataOnWrite() {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    return this.cacheDataOnWrite;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   *                         when an HFile is written<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  @VisibleForTesting<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  /**<a name="line.293"></a>
+<span class="sourceLineNo">294</span>   * Enable cache on write including:<a name="line.294"></a>
+<span class="sourceLineNo">295</span>   * cacheDataOnWrite<a name="line.295"></a>
+<span class="sourceLineNo">296</span>   * cacheIndexesOnWrite<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   * cacheBloomsOnWrite<a name="line.297"></a>
+<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
+<span class="sourceLineNo">299</span>  public void enableCacheOnWrite() {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    this.cacheDataOnWrite = true;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this.cacheIndexesOnWrite = true;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    this.cacheBloomsOnWrite = true;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  }<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  /**<a name="line.305"></a>
+<span class="sourceLineNo">306</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   *         is written, false if not<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   */<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    return this.cacheIndexesOnWrite;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   *         is written, false if not<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    return this.cacheBloomsOnWrite;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  }<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   *         reader is closed, false if not<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   */<a name="line.324"></a>
+<span class="sourceLineNo">325</span>  public boolean shouldEvictOnClose() {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    return this.evictOnClose;<a name="line.326"></a>
+<span class="sourceLineNo">327</span>  }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>  /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * Only used for testing.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   *                     HFile reader is closed<a name="line.332"></a>
+<span class="sourceLineNo">333</span>   */<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  @VisibleForTesting<a name="line.334"></a>
+<span class="sourceLineNo">335</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    this.evictOnClose = evictOnClose;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
+<span class="sourceLineNo">338</span><a name="line.338"></a>
+<span class="sourceLineNo">339</span>  /**<a name="line.339"></a>
+<span class="sourceLineNo">340</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   */<a name="line.341"></a>
+<span class="sourceLineNo">342</span>  public boolean shouldCacheDataCompressed() {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
+<span class="sourceLineNo">345</span><a name="line.345"></a>
+<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
+<span class="sourceLineNo">347</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.347"></a>
+<span class="sourceLineNo">348</span>   */<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    switch (category) {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>      case DATA:<a name="line.351"></a>
+<span class="sourceLineNo">352</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      default:<a name="line.353"></a>
+<span class="sourceLineNo">354</span>        return false;<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  }<a name="line.356"></a>
+<span class="sourceLineNo">357</span><a name="line.357"></a>
+<span class="sourceLineNo">358</span>  /**<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.359"></a>
+<span class="sourceLineNo">360</span>   */<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public boolean shouldPrefetchOnOpen() {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return this.prefetchOnOpen;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * @return true if blocks should be cached while writing during compaction, false if not<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   */<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  public boolean shouldCacheCompactedBlocksOnWrite() {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    return this.cacheCompactedDataOnWrite;<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   * @return total file size in bytes threshold for caching while writing during compaction<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public long getCacheCompactedBlocksOnWriteThreshold() {<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    return this.cacheCompactedDataOnWriteThreshold;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>  }<a name="line.377"></a>
+<span class="sourceLineNo">378</span>  /**<a name="line.378"></a>
+<span class="sourceLineNo">379</span>   * Return true if we may find this type of block in block cache.<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * &lt;p&gt;<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * configuration.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   */<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    if (cacheDataOnRead) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      return true;<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    }<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    if (prefetchOnOpen) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>      return true;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    if (cacheDataOnWrite) {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>      return true;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    if (blockType == null) {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      return true;<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    }<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.399"></a>
+<span class="sourceLineNo">400</span>        blockType.getCategory() == BlockCategory.INDEX) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      return true;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    }<a name="line.402"></a>
+<span class="sourceLineNo">403</span>    return false;<a name="line.403"></a>
+<span class="sourceLineNo">404</span>  }<a name="line.404"></a>
+<span class="sourceLineNo">405</span><a name="line.405"></a>
+<span class="sourceLineNo">406</span>  /**<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.407"></a>
+<span class="sourceLineNo">408</span>   * otherwise we will acquire lock<a name="line.408"></a>
+<span class="sourceLineNo">409</span>   */<a name="line.409"></a>
+<span class="sourceLineNo">410</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    if (blockType == null) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      return true;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    }<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  /**<a name="line.417"></a>
+<span class="sourceLineNo">418</span>   * Returns the block cache.<a name="line.418"></a>
+<span class="sourceLineNo">419</span>   *<a name="line.419"></a>
+<span class="sourceLineNo">420</span>   * @return the block cache, or null if caching is completely disabled<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  public Optional&lt;BlockCache&gt; getBlockCache() {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>    return Optional.ofNullable(this.blockCache);<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  }<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  public boolean isCombinedBlockCache() {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    return blockCache instanceof CombinedBlockCache;<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
+<span class="sourceLineNo">429</span><a name="line.429"></a>
+<span class="sourceLineNo">430</span>  public ByteBuffAllocator getByteBuffAllocator() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    return this.byteBuffAllocator;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>  private long getCacheCompactedBlocksOnWriteThreshold(Configuration conf) {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    long cacheCompactedBlocksOnWriteThreshold = conf<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      .getLong(CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD_KEY,<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD);<a name="line.437"></a>
+<span class="sourceLineNo">438</span><a name="line.438"></a>
+<span class="sourceLineNo">439</span>    if (cacheCompactedBlocksOnWriteThreshold &lt; 0) {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      LOG.warn(<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        "cacheCompactedBlocksOnWriteThreshold value : {} is less than 0, resetting it to: {}",<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        cacheCompactedBlocksOnWriteThreshold, DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>      cacheCompactedBlocksOnWriteThreshold = DEFAULT_CACHE_COMPACTED_BLOCKS_ON_WRITE_THRESHOLD;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>    }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>    return cacheCompactedBlocksOnWriteThreshold;<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  }<a name="line.447"></a>
+<span class="sourceLineNo">448</span><a name="line.448"></a>
+<span class="sourceLineNo">449</span>  @Override<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  public String toString() {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    return "cacheDataOnRead=" + shouldCacheDataOnRead() + ", cacheDataOnWrite="<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        + shouldCacheDataOnWrite() + ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite()<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        + ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() + ", cacheEvictOnClose="<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        + shouldEvictOnClose() + ", cacheDataCompressed=" + shouldCacheDataCompressed()<a name="line.454"></a>
+<span class="sourceLineNo">455</span>        + ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.455"></a>
+<span class="sourceLineNo">456</span>  }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>}<a name="line.457"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index b73c964..aae2eca 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -1118,1743 +1118,1759 @@
 <span class="sourceLineNo">1110</span>    return sf;<a name="line.1110"></a>
 <span class="sourceLineNo">1111</span>  }<a name="line.1111"></a>
 <span class="sourceLineNo">1112</span><a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>  /**<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span>   * @param compression Compression algorithm to use<a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>   * @param includesTag - includesTag or not<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>   */<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>  // compaction<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>      boolean shouldDropBehind) throws IOException {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>    final CacheConfig writerCacheConf;<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>    if (isCompaction) {<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      writerCacheConf = new CacheConfig(cacheConf);<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      final boolean cacheCompactedBlocksOnWrite =<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>        cacheConf.shouldCacheCompactedBlocksOnWrite();<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span>      // if data blocks are to be cached on write<a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>      // during compaction, we should forcefully<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>      // cache index and bloom blocks as well<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span>      if (cacheCompactedBlocksOnWrite) {<a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>        if (!cacheOnWriteLogged) {<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>          LOG.info("For Store {} , cacheCompactedBlocksOnWrite is true, hence enabled " +<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>              "cacheOnWrite for Data blocks, Index blocks and Bloom filter blocks",<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>            getColumnFamilyName());<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>          cacheOnWriteLogged = true;<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>        }<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>      } else {<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>      }<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    } else {<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>      writerCacheConf = cacheConf;<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>      if (shouldCacheDataOnWrite) {<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>        if (!cacheOnWriteLogged) {<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>          LOG.info("For Store {} , cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>            "Index blocks and Bloom filter blocks", getColumnFamilyName());<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>          cacheOnWriteLogged = true;<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>        }<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      }<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span>    }<a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>    if (region.getRegionServerServices() != null) {<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>          region.getRegionInfo().getEncodedName());<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span>    }<a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      cryptoContext);<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>        this.getFileSystem())<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>            .withOutputDir(familyTempDir)<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>            .withBloomType(family.getBloomFilterType())<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>            .withFavoredNodes(favoredNodes)<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>            .withFileContext(hFileContext)<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>    return builder.build();<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>  }<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span><a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>    if (compression == null) {<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span>    }<a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span>                                .withIncludesTags(includesTag)<a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>                                .withCompression(compression)<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>                                .withCompressTags(family.isCompressTags())<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>                                .withChecksumType(checksumType)<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>                                .withBlockSize(blocksize)<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>                                .withHBaseCheckSum(true)<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>                                .withEncryptionContext(cryptoContext)<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>                                .withColumnFamily(family.getName())<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>                                .withTableName(region.getTableDescriptor()<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>                                    .getTableName().getName())<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>                                .withCellComparator(this.comparator)<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>                                .build();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    return hFileContext;<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>  }<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span><a name="line.1201"></a>
-<span class="sourceLineNo">1202</span><a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span>  }<a name="line.1205"></a>
-<span class="sourceLineNo">1206</span><a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>  /**<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>   * @param sfs Store files<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>   * @return Whether compaction is required.<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span>   */<a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>    this.lock.writeLock().lock();<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>    try {<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      if (snapshotId &gt; 0) {<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span>      }<a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>    } finally {<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // or changing the memstore. Let us release it before calling<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      // the lock.<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>      this.lock.writeLock().unlock();<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>    }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span>    // notify to be called here - only in case of flushes<a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>    notifyChangedReadersObservers(sfs);<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>    if (LOG.isTraceEnabled()) {<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      long totalSize = getTotalSize(sfs);<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>      LOG.trace(traceMessage);<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>    }<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>    return needsCompaction();<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span>  }<a name="line.1237"></a>
-<span class="sourceLineNo">1238</span><a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>  /**<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>   * Notify all observers that set of Readers has changed.<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>   */<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  private void notifyChangedReadersObservers(List&lt;HStoreFile&gt; sfs) throws IOException {<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span>    for (ChangedReadersObserver o : this.changedReaderObservers) {<a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>      List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>      this.lock.readLock().lock();<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>      try {<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>        memStoreScanners = this.memstore.getScanners(o.getReadPoint());<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>      } finally {<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>        this.lock.readLock().unlock();<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>      }<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>      o.updateReaders(sfs, memStoreScanners);<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>    }<a name="line.1252"></a>
+<span class="sourceLineNo">1113</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    boolean shouldDropBehind) throws IOException {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>    return createWriterInTmp(maxKeyCount, compression, isCompaction, includeMVCCReadpoint,<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      includesTag, shouldDropBehind, -1);<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>  }<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span><a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>  /**<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span>   * @param compression Compression algorithm to use<a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>   * @param includesTag - includesTag or not<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>   */<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>  // compaction<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>      boolean shouldDropBehind, long totalCompactedFilesSize) throws IOException {<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>    // creating new cache config for each new writer<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span>    final CacheConfig writerCacheConf = new CacheConfig(cacheConf);<a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>    if (isCompaction) {<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>      // Cache only when total file size remains lower than configured threshold<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>      final boolean cacheCompactedBlocksOnWrite =<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>        cacheConf.shouldCacheCompactedBlocksOnWrite();<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>      // if data blocks are to be cached on write<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>      // during compaction, we should forcefully<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>      // cache index and bloom blocks as well<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>      if (cacheCompactedBlocksOnWrite &amp;&amp; totalCompactedFilesSize &lt;= cacheConf<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>        .getCacheCompactedBlocksOnWriteThreshold()) {<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>        if (!cacheOnWriteLogged) {<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>          LOG.info("For Store {} , cacheCompactedBlocksOnWrite is true, hence enabled " +<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>              "cacheOnWrite for Data blocks, Index blocks and Bloom filter blocks",<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>            getColumnFamilyName());<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>          cacheOnWriteLogged = true;<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>        }<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>      } else {<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>        if (totalCompactedFilesSize &gt; cacheConf.getCacheCompactedBlocksOnWriteThreshold()) {<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>          // checking condition once again for logging<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span>          LOG.debug(<a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>            "For Store {}, setting cacheCompactedBlocksOnWrite as false as total size of compacted "<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>              + "files - {}, is greater than cacheCompactedBlocksOnWriteThreshold - {}",<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>            getColumnFamilyName(), totalCompactedFilesSize,<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>            cacheConf.getCacheCompactedBlocksOnWriteThreshold());<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span>        }<a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      }<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>    } else {<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span>      if (shouldCacheDataOnWrite) {<a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>        if (!cacheOnWriteLogged) {<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>          LOG.info("For Store {} , cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>            "Index blocks and Bloom filter blocks", getColumnFamilyName());<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>          cacheOnWriteLogged = true;<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>        }<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>      }<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>    }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>    if (region.getRegionServerServices() != null) {<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>          region.getRegionInfo().getEncodedName());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>    }<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>      cryptoContext);<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        this.getFileSystem())<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span>            .withOutputDir(familyTempDir)<a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>            .withBloomType(family.getBloomFilterType())<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>            .withFavoredNodes(favoredNodes)<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>            .withFileContext(hFileContext)<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span>    return builder.build();<a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  }<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span><a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>    if (compression == null) {<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    }<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>                                .withIncludesTags(includesTag)<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>                                .withCompression(compression)<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>                                .withCompressTags(family.isCompressTags())<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>                                .withChecksumType(checksumType)<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>                                .withBlockSize(blocksize)<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>                                .withHBaseCheckSum(true)<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>                                .withEncryptionContext(cryptoContext)<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span>                                .withColumnFamily(family.getName())<a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>                                .withTableName(region.getTableDescriptor()<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>                                    .getTableName().getName())<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>                                .withCellComparator(this.comparator)<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span>                                .build();<a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>    return hFileContext;<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>  }<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span><a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>  }<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>  /**<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>   * @param sfs Store files<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span>   * @return Whether compaction is required.<a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>   */<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>    this.lock.writeLock().lock();<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>    try {<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>      if (snapshotId &gt; 0) {<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>      }<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>    } finally {<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      // or changing the memstore. Let us release it before calling<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>      // the lock.<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>      this.lock.writeLock().unlock();<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span>    }<a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>    // notify to be called here - only in case of flushes<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    notifyChangedReadersObservers(sfs);<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (LOG.isTraceEnabled()) {<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>      long totalSize = getTotalSize(sfs);<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>      LOG.trace(traceMessage);<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>    }<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>    return needsCompaction();<a name="line.1252"></a>
 <span class="sourceLineNo">1253</span>  }<a name="line.1253"></a>
 <span class="sourceLineNo">1254</span><a name="line.1254"></a>
 <span class="sourceLineNo">1255</span>  /**<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>   * @param cacheBlocks cache the blocks or not<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span>   * @param usePread true to use pread, false if not<a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * @param matcher the scan query matcher<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   * @param startRow the start row<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>   * @param stopRow the stop row<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>   * @param readPt the read point of the current scan<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>   * @return all scanners for this store<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>   */<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean isGet, boolean usePread,<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt)<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      throws IOException {<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    return getScanners(cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow, false,<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>      readPt);<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span>  }<a name="line.1271"></a>
-<span class="sourceLineNo">1272</span><a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>  /**<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>   * @param cacheBlocks cache the blocks or not<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>   * @param usePread true to use pread, false if not<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>   * @param matcher the scan query matcher<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>   * @param startRow the start row<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>   * @param includeStartRow true to include start row, false if not<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>   * @param stopRow the stop row<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>   * @param readPt the read point of the current scan<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>   * @return all scanners for this store<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>   */<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean usePread,<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, boolean includeStartRow,<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      byte[] stopRow, boolean includeStopRow, long readPt) throws IOException {<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    Collection&lt;HStoreFile&gt; storeFilesToScan;<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>    List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span>    this.lock.readLock().lock();<a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>    try {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>      storeFilesToScan = this.storeEngine.getStoreFileManager().getFilesForScan(startRow,<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>        includeStartRow, stopRow, includeStopRow);<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span>      memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>    } finally {<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>      this.lock.readLock().unlock();<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>    }<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span><a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>    try {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>      // First the store file scanners<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span><a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      // TODO this used to get the store files in descending order,<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>      // but now we get them in ascending order, which I think is<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>      // actually more correct, since memstore get put at the end.<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>        .getScannersForStoreFiles(storeFilesToScan, cacheBlocks, usePread, isCompaction, false,<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>          matcher, readPt);<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>      scanners.addAll(sfScanners);<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // Then the memstore scanners<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      scanners.addAll(memStoreScanners);<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>      return scanners;<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span>    } catch (Throwable t) {<a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>      clearAndClose(memStoreScanners);<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    }<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span><a name="line.1319"></a>
-<span class="sourceLineNo">1320</span>  private static void clearAndClose(List&lt;KeyValueScanner&gt; scanners) {<a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>    if (scanners == null) {<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>      return;<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>    }<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>    for (KeyValueScanner s : scanners) {<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>      s.close();<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    }<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    scanners.clear();<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>  }<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span><a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>  /**<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>   * (that happens further down the line).<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span>   * @param files the list of files on which the scanners has to be created<a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>   * @param cacheBlocks cache the blocks or not<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * @param usePread true to use pread, false if not<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>   * @param matcher the scan query matcher<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>   * @param startRow the start row<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>   * @param stopRow the stop row<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>   * @param readPt the read point of the current scan<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>   */<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>      boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span>      byte[] startRow, byte[] stopRow, long readPt, boolean includeMemstoreScanner)<a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>      throws IOException {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>    return getScanners(files, cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow,<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      false, readPt, includeMemstoreScanner);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>  }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>  /**<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span>   * (that happens further down the line).<a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>   * @param files the list of files on which the scanners has to be created<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>   * @param cacheBlocks ache the blocks or not<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>   * @param usePread true to use pread, false if not<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>   * @param matcher the scan query matcher<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>   * @param startRow the start row<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>   * @param includeStartRow true to include start row, false if not<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>   * @param stopRow the stop row<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>   * @param readPt the read point of the current scan<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>   */<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>      boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow,<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span>      boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>    List&lt;KeyValueScanner&gt; memStoreScanners = null;<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>    if (includeMemstoreScanner) {<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      this.lock.readLock().lock();<a name="line.1374"></a>
-<span class="sourceLineNo">1375</span>      try {<a name="line.1375"></a>
-<span class="sourceLineNo">1376</span>        memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      } finally {<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>        this.lock.readLock().unlock();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      }<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>    }<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>    try {<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>        .getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, false, matcher,<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span>          readPt);<a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      scanners.addAll(sfScanners);<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>      // Then the memstore scanners<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>      if (memStoreScanners != null) {<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        scanners.addAll(memStoreScanners);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>      }<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>      return scanners;<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>    } catch (Throwable t) {<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>      clearAndClose(memStoreScanners);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>    }<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>  }<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span><a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>  /**<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>   * @param o Observer who wants to know about changes in set of Readers<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>   */<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>  public void addChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>    this.changedReaderObservers.add(o);<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>  }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span><a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>  /**<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>   * @param o Observer no longer interested in changes in set of Readers.<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>   */<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>  public void deleteChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span>    // We don't check if observer present; it may not be (legitimately)<a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    this.changedReaderObservers.remove(o);<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>  }<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  // Compaction<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1415"></a>
-<span class="sourceLineNo">1416</span><a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>  /**<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * Compact the StoreFiles.  This method may take some time, so the calling<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * thread must be able to block for long periods.<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   *<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * &lt;p&gt;During this time, the Store can work as usual, getting values from<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   * StoreFiles and writing new StoreFiles from the memstore.<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>   *<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>   * Existing StoreFiles are not destroyed until the new compacted StoreFile is<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>   * completely written-out to disk.<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>   *<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>   * &lt;p&gt;The compactLock prevents multiple simultaneous compactions.<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>   * The structureLock prevents us from interfering with other write operations.<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>   *<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>   * &lt;p&gt;We don't want to hold the structureLock for the whole time, as a compact()<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>   * can be lengthy and we want to allow cache-flushes during this period.<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>   *<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span>   * &lt;p&gt; Compaction event should be idempotent, since there is no IO Fencing for<a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>   * the region directory in hdfs. A region server might still try to complete the<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * compaction after it lost the region. That is why the following events are carefully<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * ordered for a compaction:<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   *  1. Compaction writes new files under region/.tmp directory (compaction output)<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   *  2. Compaction atomically moves the temporary file under region directory<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   *  3. Compaction appends a WAL edit containing the compaction input and output files.<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   *  Forces sync on WAL.<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>   *  4. Compaction deletes the input files from the region directory.<a name="line.1441"></a>
+<span class="sourceLineNo">1256</span>   * Notify all observers that set of Readers has changed.<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span>   */<a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  private void notifyChangedReadersObservers(List&lt;HStoreFile&gt; sfs) throws IOException {<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>    for (ChangedReadersObserver o : this.changedReaderObservers) {<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>      List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>      this.lock.readLock().lock();<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>      try {<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>        memStoreScanners = this.memstore.getScanners(o.getReadPoint());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>      } finally {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>        this.lock.readLock().unlock();<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>      }<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      o.updateReaders(sfs, memStoreScanners);<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   * @param cacheBlocks cache the blocks or not<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>   * @param usePread true to use pread, false if not<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>   * @param matcher the scan query matcher<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>   * @param startRow the start row<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>   * @param stopRow the stop row<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>   * @param readPt the read point of the current scan<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>   * @return all scanners for this store<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>   */<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean isGet, boolean usePread,<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt)<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>      throws IOException {<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>    return getScanners(cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow, false,<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>      readPt);<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>  }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span><a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  /**<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>   * @param cacheBlocks cache the blocks or not<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>   * @param usePread true to use pread, false if not<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span>   * @param matcher the scan query matcher<a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>   * @param startRow the start row<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * @param includeStartRow true to include start row, false if not<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * @param stopRow the stop row<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>   * @param readPt the read point of the current scan<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>   * @return all scanners for this store<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>   */<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean usePread,<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, boolean includeStartRow,<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>      byte[] stopRow, boolean includeStopRow, long readPt) throws IOException {<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>    Collection&lt;HStoreFile&gt; storeFilesToScan;<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>    List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    this.lock.readLock().lock();<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span>    try {<a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>      storeFilesToScan = this.storeEngine.getStoreFileManager().getFilesForScan(startRow,<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>        includeStartRow, stopRow, includeStopRow);<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    } finally {<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span>      this.lock.readLock().unlock();<a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    }<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span><a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    try {<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>      // First the store file scanners<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span><a name="line.1318"></a>
+<span class="sourceLineNo">1319</span>      // TODO this used to get the store files in descending order,<a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>      // but now we get them in ascending order, which I think is<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>      // actually more correct, since memstore get put at the end.<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>        .getScannersForStoreFiles(storeFilesToScan, cacheBlocks, usePread, isCompaction, false,<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>          matcher, readPt);<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>      scanners.addAll(sfScanners);<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>      // Then the memstore scanners<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      scanners.addAll(memStoreScanners);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      return scanners;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    } catch (Throwable t) {<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>      clearAndClose(memStoreScanners);<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>    }<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>  }<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span><a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private static void clearAndClose(List&lt;KeyValueScanner&gt; scanners) {<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>    if (scanners == null) {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>      return;<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    }<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    for (KeyValueScanner s : scanners) {<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span>      s.close();<a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    }<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    scanners.clear();<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>  }<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>  /**<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>   * (that happens further down the line).<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>   * @param files the list of files on which the scanners has to be created<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span>   * @param cacheBlocks cache the blocks or not<a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>   * @param usePread true to use pread, false if not<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span>   * @param matcher the scan query matcher<a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>   * @param startRow the start row<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>   * @param stopRow the stop row<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>   * @param readPt the read point of the current scan<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>   */<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>      boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      byte[] startRow, byte[] stopRow, long readPt, boolean includeMemstoreScanner)<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>      throws IOException {<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span>    return getScanners(files, cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow,<a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>      false, readPt, includeMemstoreScanner);<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>  }<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span><a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>  /**<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>   * (that happens further down the line).<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>   * @param files the list of files on which the scanners has to be created<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>   * @param cacheBlocks ache the blocks or not<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>   * @param usePread true to use pread, false if not<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1374"></a>
+<span class="sourceLineNo">1375</span>   * @param matcher the scan query matcher<a name="line.1375"></a>
+<span class="sourceLineNo">1376</span>   * @param startRow the start row<a name="line.1376"></a>
+<span class="sourceLineNo">1377</span>   * @param includeStartRow true to include start row, false if not<a name="line.1377"></a>
+<span class="sourceLineNo">1378</span>   * @param stopRow the stop row<a name="line.1378"></a>
+<span class="sourceLineNo">1379</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1379"></a>
+<span class="sourceLineNo">1380</span>   * @param readPt the read point of the current scan<a name="line.1380"></a>
+<span class="sourceLineNo">1381</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1381"></a>
+<span class="sourceLineNo">1382</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1382"></a>
+<span class="sourceLineNo">1383</span>   */<a name="line.1383"></a>
+<span class="sourceLineNo">1384</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1384"></a>
+<span class="sourceLineNo">1385</span>      boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow,<a name="line.1385"></a>
+<span class="sourceLineNo">1386</span>      boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.1386"></a>
+<span class="sourceLineNo">1387</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.1387"></a>
+<span class="sourceLineNo">1388</span>    List&lt;KeyValueScanner&gt; memStoreScanners = null;<a name="line.1388"></a>
+<span class="sourceLineNo">1389</span>    if (includeMemstoreScanner) {<a name="line.1389"></a>
+<span class="sourceLineNo">1390</span>      this.lock.readLock().lock();<a name="line.1390"></a>
+<span class="sourceLineNo">1391</span>      try {<a name="line.1391"></a>
+<span class="sourceLineNo">1392</span>        memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1392"></a>
+<span class="sourceLineNo">1393</span>      } finally {<a name="line.1393"></a>
+<span class="sourceLineNo">1394</span>        this.lock.readLock().unlock();<a name="line.1394"></a>
+<span class="sourceLineNo">1395</span>      }<a name="line.1395"></a>
+<span class="sourceLineNo">1396</span>    }<a name="line.1396"></a>
+<span class="sourceLineNo">1397</span>    try {<a name="line.1397"></a>
+<span class="sourceLineNo">1398</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1398"></a>
+<span class="sourceLineNo">1399</span>        .getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, false, matcher,<a name="line.1399"></a>
+<span class="sourceLineNo">1400</span>          readPt);<a name="line.1400"></a>
+<span class="sourceLineNo">1401</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1401"></a>
+<span class="sourceLineNo">1402</span>      scanners.addAll(sfScanners);<a name="line.1402"></a>
+<span class="sourceLineNo">1403</span>      // Then the memstore scanners<a name="line.1403"></a>
+<span class="sourceLineNo">1404</span>      if (memStoreScanners != null) {<a name="line.1404"></a>
+<span class="sourceLineNo">1405</span>        scanners.addAll(memStoreScanners);<a name="line.1405"></a>
+<span class="sourceLineNo">1406</span>      }<a name="line.1406"></a>
+<span class="sourceLineNo">1407</span>      return scanners;<a name="line.1407"></a>
+<span class="sourceLineNo">1408</span>    } catch (Throwable t) {<a name="line.1408"></a>
+<span class="sourceLineNo">1409</span>      clearAndClose(memStoreScanners);<a name="line.1409"></a>
+<span class="sourceLineNo">1410</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1410"></a>
+<span class="sourceLineNo">1411</span>    }<a name="line.1411"></a>
+<span class="sourceLineNo">1412</span>  }<a name="line.1412"></a>
+<span class="sourceLineNo">1413</span><a name="line.1413"></a>
+<span class="sourceLineNo">1414</span>  /**<a name="line.1414"></a>
+<span class="sourceLineNo">1415</span>   * @param o Observer who wants to know about changes in set of Readers<a name="line.1415"></a>
+<span class="sourceLineNo">1416</span>   */<a name="line.1416"></a>
+<span class="sourceLineNo">1417</span>  public void addChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1417"></a>
+<span class="sourceLineNo">1418</span>    this.changedReaderObservers.add(o);<a name="line.1418"></a>
+<span class="sourceLineNo">1419</span>  }<a name="line.1419"></a>
+<span class="sourceLineNo">1420</span><a name="line.1420"></a>
+<span class="sourceLineNo">1421</span>  /**<a name="line.1421"></a>
+<span class="sourceLineNo">1422</span>   * @param o Observer no longer interested in changes in set of Readers.<a name="line.1422"></a>
+<span class="sourceLineNo">1423</span>   */<a name="line.1423"></a>
+<span class="sourceLineNo">1424</span>  public void deleteChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1424"></a>
+<span class="sourceLineNo">1425</span>    // We don't check if observer present; it may not be (legitimately)<a name="line.1425"></a>
+<span class="sourceLineNo">1426</span>    this.changedReaderObservers.remove(o);<a name="line.1426"></a>
+<span class="sourceLineNo">1427</span>  }<a name="line.1427"></a>
+<span class="sourceLineNo">1428</span><a name="line.1428"></a>
+<span class="sourceLineNo">1429</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1429"></a>
+<span class="sourceLineNo">1430</span>  // Compaction<a name="line.1430"></a>
+<span class="sourceLineNo">1431</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1431"></a>
+<span class="sourceLineNo">1432</span><a name="line.1432"></a>
+<span class="sourceLineNo">1433</span>  /**<a name="line.1433"></a>
+<span class="sourceLineNo">1434</span>   * Compact the StoreFiles.  This method may take some time, so the calling<a name="line.1434"></a>
+<span class="sourceLineNo">1435</span>   * thread must be able to block for long periods.<a name="line.1435"></a>
+<span class="sourceLineNo">1436</span>   *<a name="line.1436"></a>
+<span class="sourceLineNo">1437</span>   * &lt;p&gt;During this time, the Store can work as usual, getting values from<a name="line.1437"></a>
+<span class="sourceLineNo">1438</span>   * StoreFiles and writing new StoreFiles from the memstore.<a name="line.1438"></a>
+<span class="sourceLineNo">1439</span>   *<a name="line.1439"></a>
+<span class="sourceLineNo">1440</span>   * Existing StoreFiles are not destroyed until the new compacted StoreFile is<a name="line.1440"></a>
+<span class="sourceLineNo">1441</span>   * completely written-out to disk.<a name="line.1441"></a>
 <span class="sourceLineNo">1442</span>   *<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>   * Failure conditions are handled like this:<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>   *  - If RS fails before 2, compaction wont complete. Even if RS lives on and finishes<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>   *  the compaction later, it will only write the new data file to the region directory.<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>   *  Since we already have this data, this will be idempotent but we will have a redundant<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>   *  copy of the data.<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>   *  - If RS fails between 2 and 3, the region will have a redundant copy of the data. The<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>   *  RS that failed won't be able to finish snyc() for WAL because of lease recovery in WAL.<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>   *  - If RS fails after 3, the region region server who opens the region will pick up the<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span>   *  the compaction marker from the WAL and replay it by removing the compaction input files.<a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>   *  Failed RS can also attempt to delete those files, but the operation will be idempotent<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   *<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * See HBASE-2231 for details.<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   *<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>   * @param compaction compaction details obtained from requestCompaction()<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>   * @return Storefile we compacted into or null if we failed or opted out early.<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>   */<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>  public List&lt;HStoreFile&gt; compact(CompactionContext compaction,<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>    ThroughputController throughputController, User user) throws IOException {<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>    assert compaction != null;<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>    CompactionRequestImpl cr = compaction.getRequest();<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>    try {<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>      // Do all sanity checking in here if we have a valid CompactionRequestImpl<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>      // because we need to clean up after it on the way out in a finally<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span>      // block below<a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>      long compactionStartTime = EnvironmentEdgeManager.currentTime();<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>      assert compaction.hasSelection();<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>      Collection&lt;HStoreFile&gt; filesToCompact = cr.getFiles();<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>      assert !filesToCompact.isEmpty();<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>      synchronized (filesCompacting) {<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>        // sanity check: we're compacting files that this store knows about<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>        // TODO: change this to LOG.error() after more debugging<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>        Preconditions.checkArgument(filesCompacting.containsAll(filesToCompact));<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>      }<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span><a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>      // Ready to go. Have list of files to compact.<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span>      LOG.info("Starting compaction of " + filesToCompact +<a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>        " into tmpdir=" + fs.getTempDir() + ", totalSize=" +<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>          TraditionalBinaryPrefix.long2String(cr.getSize(), "", 1));<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span><a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      return doCompaction(cr, filesToCompact, user, compactionStartTime,<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          compaction.compact(throughputController, user));<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>    } finally {<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      finishCompactionRequest(cr);<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>    }<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>  }<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span><a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>  @VisibleForTesting<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>  protected List&lt;HStoreFile&gt; doCompaction(CompactionRequestImpl cr,<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>      Collection&lt;HStoreFile&gt; filesToCompact, User user, long compactionStartTime,<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>      List&lt;Path&gt; newFiles) throws IOException {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>    // Do the steps necessary to complete the compaction.<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>    List&lt;HStoreFile&gt; sfs = moveCompactedFilesIntoPlace(cr, newFiles, user);<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>    writeCompactionWalRecord(filesToCompact, sfs);<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>    replaceStoreFiles(filesToCompact, sfs);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>    if (cr.isMajor()) {<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>      majorCompactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>      majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>    } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>      compactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>      compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>    }<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>    long outputBytes = getTotalSize(sfs);<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span><a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>    // At this point the store will use new files for all new scanners.<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>    completeCompaction(filesToCompact); // update store size.<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span><a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>    long now = EnvironmentEdgeManager.currentTime();<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>    if (region.getRegionServerServices() != null<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>        &amp;&amp; region.getRegionServerServices().getMetrics() != null) {<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span>      region.getRegionServerServices().getMetrics().updateCompaction(<a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>          region.getTableDescriptor().getTableName().getNameAsString(),<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>          cr.isMajor(), now - compactionStartTime, cr.getFiles().size(),<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>          newFiles.size(), cr.getSize(), outputBytes);<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span><a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>    }<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span><a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>    logCompactionEndMessage(cr, sfs, now, compactionStartTime);<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>    return sfs;<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>  }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>  private List&lt;HStoreFile&gt; moveCompactedFilesIntoPlace(CompactionRequestImpl cr,<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>      List&lt;Path&gt; newFiles, User user) throws IOException {<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    List&lt;HStoreFile&gt; sfs = new ArrayList&lt;&gt;(newFiles.size());<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span>    for (Path newFile : newFiles) {<a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>      assert newFile != null;<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span>      HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>      if (this.getCoprocessorHost() != null) {<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>        getCoprocessorHost().postCompact(this, sf, cr.getTracker(), cr, user);<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>      }<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>      assert sf != null;<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>      sfs.add(sf);<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>    }<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>    return sfs;<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  }<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span><a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>  // Package-visible for tests<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>  HStoreFile moveFileIntoPlace(Path newFile) throws IOException {<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    validateStoreFile(newFile);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    // Move the file into the right spot<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    Path destPath = fs.commitStoreFile(getColumnFamilyName(), newFile);<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    return createStoreFileAndReader(destPath);<a name="line.1543"></a>
-<span class="sourceLineNo">1544</span>  }<a name="line.1544"></a>
-<span class="sourceLineNo">1545</span><a name="line.1545"></a>
-<span class="sourceLineNo">1546</span>  /**<a name="line.1546"></a>
-<span class="sourceLineNo">1547</span>   * Writes the compaction WAL record.<a name="line.1547"></a>
-<span class="sourceLineNo">1548</span>   * @param filesCompacted Files compacted (input).<a name="line.1548"></a>
-<span class="sourceLineNo">1549</span>   * @param newFiles Files from compaction.<a name="line.1549"></a>
-<span class="sourceLineNo">1550</span>   */<a name="line.1550"></a>
-<span class="sourceLineNo">1551</span>  private void writeCompactionWalRecord(Collection&lt;HStoreFile&gt; filesCompacted,<a name="line.1551"></a>
-<span class="sourceLineNo">1552</span>      Collection&lt;HStoreFile&gt; newFiles) throws IOException {<a name="line.1552"></a>
-<span class="sourceLineNo">1553</span>    if (region.getWAL() == null) {<a name="line.1553"></a>
-<span class="sourceLineNo">1554</span>      return;<a name="line.1554"></a>
-<span class="sourceLineNo">1555</span>    }<a name="line.1555"></a>
-<span class="sourceLineNo">1556</span>    List&lt;Path&gt; inputPaths =<a name="line.1556"></a>
-<span class="sourceLineNo">1557</span>        filesCompacted.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1557"></a>
-<span class="sourceLineNo">1558</span>    List&lt;Path&gt; outputPaths =<a name="line.1558"></a>
-<span class="sourceLineNo">1559</span>        newFiles.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1559"></a>
-<span class="sourceLineNo">1560</span>    RegionInfo info = this.region.getRegionInfo();<a name="line.1560"></a>
-<span class="sourceLineNo">1561</span>    CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(info,<a name="line.1561"></a>
-<span class="sourceLineNo">1562</span>        family.getName(), inputPaths, outputPaths,<a name="line.1562"></a>
-<span class="sourceLineNo">1563</span>      fs.getStoreDir(getColumnFamilyDescriptor().getNameAsString()));<a name="line.1563"></a>
-<span class="sourceLineNo">1564</span>    // Fix reaching into Region to get the maxWaitForSeqId.<a name="line.1564"></a>
-<span class="sourceLineNo">1565</span>    // Does this method belong in Region altogether given it is making so many references up there?<a name="line.1565"></a>
-<span class="sourceLineNo">1566</span>    // Could be Region#writeCompactionMarker(compactionDescriptor);<a name="line.1566"></a>
-<span class="sourceLineNo">1567</span>    WALUtil.writeCompactionMarker(this.region.getWAL(), this.region.getReplicationScope(),<a name="line.1567"></a>
-<span class="sourceLineNo">1568</span>        this.region.getRegionInfo(), compactionDescriptor, this.region.getMVCC());<a name="line.1568"></a>
-<span class="sourceLineNo">1569</span>  }<a name="line.1569"></a>
-<span class="sourceLineNo">1570</span><a name="line.1570"></a>
-<span class="sourceLineNo">1571</span>  @VisibleForTesting<a name="line.1571"></a>
-<span class="sourceLineNo">1572</span>  void replaceStoreFiles(Collection&lt;HStoreFile&gt; compactedFiles, Collection&lt;HStoreFile&gt; result)<a name="line.1572"></a>
-<span class="sourceLineNo">1573</span>      throws IOException {<a name="line.1573"></a>
-<span class="sourceLineNo">1574</span>    this.lock.writeLock().lock();<a name="line.1574"></a>
-<span class="sourceLineNo">1575</span>    try {<a name="line.1575"></a>
-<span class="sourceLineNo">1576</span>      this.storeEngine.getStoreFileManager().addCompactionResults(compactedFiles, result);<a name="line.1576"></a>
-<span class="sourceLineNo">1577</span>      synchronized (filesCompacting) {<a name="line.1577"></a>
-<span class="sourceLineNo">1578</span>        filesCompacting.removeAll(compactedFiles);<a name="line.1578"></a>
-<span class="sourceLineNo">1579</span>      }<a name="line.1579"></a>
-<span class="sourceLineNo">1580</span><a name="line.1580"></a>
-<span class="sourceLineNo">1581</span>      // These may be null when the RS is shutting down. The space quota Chores will fix the Region<a name="line.1581"></a>
-<span class="sourceLineNo">1582</span>      // sizes later so it's not super-critical if we miss these.<a name="line.1582"></a>
-<span class="sourceLineNo">1583</span>      RegionServerServices rsServices = region.getRegionServerServices();<a name="line.1583"></a>
-<span class="sourceLineNo">1584</span>      if (rsServices != null &amp;&amp; rsServices.getRegionServerSpaceQuotaManager() != null) {<a name="line.1584"></a>
-<span class="sourceLineNo">1585</span>        updateSpaceQuotaAfterFileReplacement(<a name="line.1585"></a>
-<span class="sourceLineNo">1586</span>            rsServices.getRegionServerSpaceQuotaManager().getRegionSizeStore(), getRegionInfo(),<a name="line.1586"></a>
-<span class="sourceLineNo">1587</span>            compactedFiles, result);<a name="line.1587"></a>
-<span class="sourceLineNo">1588</span>      }<a name="line.1588"></a>
-<span class="sourceLineNo">1589</span>    } finally {<a name="line.1589"></a>
-<span class="sourceLineNo">1590</span>      this.lock.writeLock().unlock();<a name="line.1590"></a>
-<span class="sourceLineNo">1591</span>    }<a name="line.1591"></a>
-<span class="sourceLineNo">1592</span>  }<a name="line.1592"></a>
-<span class="sourceLineNo">1593</span><a name="line.1593"></a>
-<span class="sourceLineNo">1594</span>  /**<a name="line.1594"></a>
-<span class="sourceLineNo">1595</span>   * Updates the space quota usage for this region, removing the size for files compacted away<a name="line.1595"></a>
-<span class="sourceLineNo">1596</span>   * and adding in the size for new files.<a name="line.1596"></a>
-<span class="sourceLineNo">1597</span>   *<a name="line.1597"></a>
-<span class="sourceLineNo">1598</span>   * @param sizeStore The object tracking changes in region size for space quotas.<a name="line.1598"></a>
-<span class="sourceLineNo">1599</span>   * @param regionInfo The identifier for the region whose size is being updated.<a name="line.1599"></a>
-<span class="sourceLineNo">1600</span>   * @param oldFiles Files removed from this store's region.<a name="line.1600"></a>
-<span class="sourceLineNo">1601</span>   * @param newFiles Files added to this store's region.<a name="line.1601"></a>
-<span class="sourceLineNo">1602</span>   */<a name="line.1602"></a>
-<span class="sourceLineNo">1603</span>  void updateSpaceQuotaAfterFileReplacement(<a name="line.1603"></a>
-<span class="sourceLineNo">1604</span>      RegionSizeStore sizeStore, RegionInfo regionInfo, Collection&lt;HStoreFile&gt; oldFiles,<a name="line.1604"></a>
-<span class="sourceLineNo">1605</span>      Collection&lt;HStoreFile&gt; newFiles) {<a name="line.1605"></a>
-<span class="sourceLineNo">1606</span>    long delta = 0;<a name="line.1606"></a>
-<span class="sourceLineNo">1607</span>    if (oldFiles != null) {<a name="line.1607"></a>
-<span class="sourceLineNo">1608</span>      for (HStoreFile compactedFile : oldFiles) {<a name="line.1608"></a>
-<span class="sourceLineNo">1609</span>        if (compactedFile.isHFile()) {<a name="line.1609"></a>
-<span class="sourceLineNo">1610</span>          delta -= compactedFile.getReader().length();<a name="line.1610"></a>
-<span class="sourceLineNo">1611</span>        }<a name="line.1611"></a>
-<span class="sourceLineNo">1612</span>      }<a name="line.1612"></a>
-<span class="sourceLineNo">1613</span>    }<a name="line.1613"></a>
-<span class="sourceLineNo">1614</span>    if (newFiles != null) {<a name="line.1614"></a>
-<span class="sourceLineNo">1615</span>      for (HStoreFile newFile : newFiles) {<a name="line.1615"></a>
-<span class="sourceLineNo">1616</span>        if (newFile.isHFile()) {<a name="line.1616"></a>
-<span class="sourceLineNo">1617</span>          delta += newFile.getReader().length();<a name="line.1617"></a>
-<span class="sourceLineNo">1618</span>        }<a name="line.1618"></a>
-<span class="sourceLineNo">1619</span>      }<a name="line.1619"></a>
-<span class="sourceLineNo">1620</span>    }<a name="line.1620"></a>
-<span class="sourceLineNo">1621</span>    sizeStore.incrementRegionSize(regionInfo, delta);<a name="line.1621"></a>
-<span class="sourceLineNo">1622</span>  }<a name="line.1622"></a>
-<span class="sourceLineNo">1623</span><a name="line.1623"></a>
-<span class="sourceLineNo">1624</span>  /**<a name="line.1624"></a>
-<span class="sourceLineNo">1625</span>   * Log a very elaborate compaction completion message.<a name="line.1625"></a>
-<span class="sourceLineNo">1626</span>   * @param cr Request.<a name="line.1626"></a>
-<span class="sourceLineNo">1627</span>   * @param sfs Resulting files.<a name="line.1627"></a>
-<span class="sourceLineNo">1628</span>   * @param compactionStartTime Start time.<a name="line.1628"></a>
-<span class="sourceLineNo">1629</span>   */<a name="line.1629"></a>
-<span class="sourceLineNo">1630</span>  private void logCompactionEndMessage(<a name="line.1630"></a>
-<span class="sourceLineNo">1631</span>      CompactionRequestImpl cr, List&lt;HStoreFile&gt; sfs, long now, long compactionStartTime) {<a name="line.1631"></a>
-<span class="sourceLineNo">1632</span>    StringBuilder message = new StringBuilder(<a name="line.1632"></a>
-<span class="sourceLineNo">1633</span>      "Completed" + (cr.isMajor() ? " major" : "") + " compaction of "<a name="line.1633"></a>
-<span class="sourceLineNo">1634</span>      + cr.getFiles().size() + (cr.isAllFiles() ? " (all)" : "") + " file(s) in "<a name="line.1634"></a>
-<span class="sourceLineNo">1635</span>      + this + " of " + this.getRegionInfo().getShortNameToLog() + " into ");<a name="line.1635"></a>
-<span class="sourceLineNo">1636</span>    if (sfs.isEmpty()) {<a name="line.1636"></a>
-<span class="sourceLineNo">1637</span>      message.append("none, ");<a name="line.1637"></a>
-<span class="sourceLineNo">1638</span>    } else {<a name="line.1638"></a>
-<span class="sourceLineNo">1639</span>      for (HStoreFile sf: sfs) {<a name="line.1639"></a>
-<span class="sourceLineNo">1640</span>        message.append(sf.getPath().getName());<a name="line.1640"></a>
-<span class="sourceLineNo">1641</span>        message.append("(size=");<a name="line.1641"></a>
-<span class="sourceLineNo">1642</span>        message.append(TraditionalBinaryPrefix.long2String(sf.getReader().length(), "", 1));<a name="line.1642"></a>
-<span class="sourceLineNo">1643</span>        message.append("), ");<a name="line.1643"></a>
-<span class="sourceLineNo">1644</span>      }<a name="line.1644"></a>
-<span class="sourceLineNo">1645</span>    }<a name="line.1645"></a>
-<span class="sourceLineNo">1646</span>    message.append("total size for store is ")<a name="line.1646"></a>
-<span class="sourceLineNo">1647</span>      .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1))<a name="line.1647"></a>
-<span class="sourceLineNo">1648</span>      .append(". This selection was in queue for ")<a name="line.1648"></a>
-<span class="sourceLineNo">1649</span>      .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))<a name="line.1649"></a>
-<span class="sourceLineNo">1650</span>      .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))<a name="line.1650"></a>
-<span class="sourceLineNo">1651</span>      .append(" to execute.");<a name="line.1651"></a>
-<span class="sourceLineNo">1652</span>    LOG.info(message.toString());<a name="line.1652"></a>
-<span class="sourceLineNo">1653</span>    if (LOG.isTraceEnabled()) {<a name="line.1653"></a>
-<span class="sourceLineNo">1654</span>      int fileCount = storeEngine.getStoreFileManager().getStorefileCount();<a name="line.1654"></a>
-<span class="sourceLineNo">1655</span>      long resultSize = getTotalSize(sfs);<a name="line.1655"></a>
-<span class="sourceLineNo">1656</span>      String traceMessage = "COMPACTION start,end,size out,files in,files out,store size,"<a name="line.1656"></a>
-<span class="sourceLineNo">1657</span>        + "store files [" + compactionStartTime + "," + now + "," + resultSize + ","<a name="line.1657"></a>
-<span class="sourceLineNo">1658</span>          + cr.getFiles().size() + "," + sfs.size() + "," +  storeSize + "," + fileCount + "]";<a name="line.1658"></a>
-<span class="sourceLineNo">1659</span>      LOG.trace(traceMessage);<a name="line.1659"></a>
-<span class="sourceLineNo">1660</span>    }<a name="line.1660"></a>
-<span class="sourceLineNo">1661</span>  }<a name="line.1661"></a>
-<span class="sourceLineNo">1662</span><a name="line.1662"></a>
-<span class="sourceLineNo">1663</span>  /**<a name="line.1663"></a>
-<span class="sourceLineNo">1664</span>   * Call to complete a compaction. Its for the case where we find in the WAL a compaction<a name="line.1664"></a>
-<span class="sourceLineNo">1665</span>   * that was not finished.  We could find one recovering a WAL after a regionserver crash.<a name="line.1665"></a>
-<span class="sourceLineNo">1666</span>   * See HBASE-2231.<a name="line.1666"></a>
-<span class="sourceLineNo">1667</span>   */<a name="line.1667"></a>
-<span class="sourceLineNo">1668</span>  public void replayCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,<a name="line.1668"></a>
-<span class="sourceLineNo">1669</span>      boolean removeFiles) throws IOException {<a name="line.1669"></a>
-<span class="sourceLineNo">1670</span>    LOG.debug("Completing compaction from the WAL marker");<a name="line.1670"></a>
-<span class="sourceLineNo">1671</span>    List&lt;String&gt; compactionInputs = compaction.getCompactionInputList();<a name="line.1671"></a>
-<span class="sourceLineNo">1672</span>    List&lt;String&gt; compactionOutputs = Lists.newArrayList(compaction.getCompactionOutputList());<a name="line.1672"></a>
-<span class="sourceLineNo">1673</span><a name="line.1673"></a>
-<span class="sourceLineNo">1674</span>    // The Compaction Marker is written after the compaction is completed,<a name="line.1674"></a>
-<span class="sourceLineNo">1675</span>    // and the files moved into the region/family folder.<a name="line.1675"></a>
-<span class="sourceLineNo">1676</span>    //<a name="line.1676"></a>
-<span class="sourceLineNo">1677</span>    // If we crash after the entry is written, we may not have removed the<a name="line.1677"></a>
-<span class="sourceLineNo">1678</span>    // input files, but the output file is present.<a name="line.1678"></a>
-<span class="sourceLineNo">1679</span>    // (The unremoved input files will be removed by this function)<a name="line.1679"></a>
-<span class="sourceLineNo">1680</span>    //<a name="line.1680"></a>
-<span class="sourceLineNo">1681</span>    // If we scan the directory and the file is not present, it can mean that:<a name="line.1681"></a>
-<span class="sourceLineNo">1682</span>    //   - The file was manually removed by the user<a name="line.1682"></a>
-<span class="sourceLineNo">1683</span>    //   - The file was removed as consequence of subsequent compaction<a name="line.1683"></a>
-<span class="sourceLineNo">1684</span>    // so, we can't do anything with the "compaction output list" because those<a name="line.1684"></a>
-<span class="sourceLineNo">1685</span>    // files have already been loaded when opening the region (by virtue of<a name="line.1685"></a>
-<span class="sourceLineNo">1686</span>    // being in the store's folder) or they may be missing due to a compaction.<a name="line.1686"></a>
-<span class="sourceLineNo">1687</span><a name="line.1687"></a>
-<span class="sourceLineNo">1688</span>    String familyName = this.getColumnFamilyName();<a name="line.1688"></a>
-<span class="sourceLineNo">1689</span>    Set&lt;String&gt; inputFiles = new HashSet&lt;&gt;();<a name="line.1689"></a>
-<span class="sourceLineNo">1690</span>    for (String compactionInput : compactionInputs) {<a name="line.1690"></a>
-<span class="sourceLineNo">1691</span>      Path inputPath = fs.getStoreFilePath(familyName, compactionInput);<a name="line.1691"></a>
-<span class="sourceLineNo">1692</span>      inputFiles.add(inputPath.getName());<a name="line.1692"></a>
-<span class="sourceLineNo">1693</span>    }<a name="line.1693"></a>
-<span class="sourceLineNo">1694</span><a name="line.1694"></a>
-<span class="sourceLineNo">1695</span>    //some of the input files might already be deleted<a name="line.1695"></a>
-<span class="sourceLineNo">1696</span>    List&lt;HStoreFile&gt; inputStoreFiles = new ArrayList&lt;&gt;(compactionInputs.size());<a name="line.1696"></a>
-<span class="sourceLineNo">1697</span>    for (HStoreFile sf : this.getStorefiles()) {<a name="line.1697"></a>
-<span class="sourceLineNo">1698</span>      if (inputFiles.contains(sf.getPath().getName())) {<a name="line.1698"></a>
-<span class="sourceLineNo">1699</span>        inputStoreFiles.add(sf);<a name="line.1699"></a>
-<span class="sourceLineNo">1700</span>      }<a name="line.1700"></a>
-<span class="sourceLineNo">1701</span>    }<a name="line.1701"></a>
-<span class="sourceLineNo">1702</span><a name="line.1702"></a>
-<span class="sourceLineNo">1703</span>    // check whether we need to pick up the new files<a name="line.1703"></a>
-<span class="sourceLineNo">1704</span>    List&lt;HStoreFile&gt; outputStoreFiles = new ArrayList&lt;&gt;(compactionOutputs.size());<a name="line.1704"></a>
-<span class="sourceLineNo">1705</span><a name="line.1705"></a>
-<span class="sourceLineNo">1706</span>    if (pickCompactionFiles) {<a name="line.1706"></a>
-<span class="sourceLineNo">1707</span>      for (HStoreFile sf : this.getStorefiles()) {<a name="line.1707"></a>
-<span class="sourceLineNo">1708</span>        compactionOutputs.remove(sf.getPath().getName());<a name="line.1708"></a>
-<span class="sourceLineNo">1709</span>      }<a name="line.1709"></a>
-<span class="sourceLineNo">1710</span>      for (String compactionOutput : compactionOutputs) {<a name="line.1710"></a>
-<span class="sourceLineNo">1711</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), compactionOutput);<a name="line.1711"></a>
-<span class="sourceLineNo">1712</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.1712"></a>
-<span class="sourceLineNo">1713</span>        outputStoreFiles.add(storeFile);<a name="line.1713"></a>
-<span class="sourceLineNo">1714</span>      }<a name="line.1714"></a>
-<span class="sourceLineNo">1715</span>    }<a name="line.1715"></a>
-<span class="sourceLineNo">1716</span><a name="line.1716"></a>
-<span class="sourceLineNo">1717</span>    if (!inputStoreFiles.isEmpty() || !outputStoreFiles.isEmpty()) {<a name="line.1717"></a>
-<span class="sourceLineNo">1718</span>      LOG.info("Replaying compaction marker, replacing input files: " +<a name="line.1718"></a>
-<span class="sourceLineNo">1719</span>          inputStoreFiles + " with output files : " + outputStoreFiles);<a name="line.1719"></a>
-<span class="sourceLineNo">1720</span>      this.replaceStoreFiles(inputStoreFiles, outputStoreFiles);<a name="line.1720"></a>
-<span class="sourceLineNo">1721</span>      this.completeCompaction(inputStoreFiles);<a name="line.1721"></a>
-<span class="sourceLineNo">1722</span>    }<a name="line.1722"></a>
-<span class="sourceLineNo">1723</span>  }<a name="line.1723"></a>
-<span class="sourceLineNo">1724</span><a name="line.1724"></a>
-<span class="sourceLineNo">1725</span>  /**<a name="line.1725"></a>
-<span class="sourceLineNo">1726</span>   * This method tries to compact N recent files for testing.<a name="line.1726"></a>
-<span class="sourceLineNo">1727</span>   * Note that because compacting "recent" files only makes sense for some policies,<a name="line.1727"></a>
-<span class="sourceLineNo">1728</span>   * e.g. the default one, it assumes default policy is used. It doesn't use policy,<a name="line.1728"></a>
-<span class="sourceLineNo">1729</span>   * but instead makes a compaction candidate list by itself.<a name="line.1729"></a>
-<span class="sourceLineNo">1730</span>   * @param N Number of files.<a name="line.1730"></a>
-<span class="sourceLineNo">1731</span>   */<a name="line.1731"></a>
-<span class="sourceLineNo">1732</span>  @VisibleForTesting<a name="line.1732"></a>
-<span class="sourceLineNo">1733</span>  public void compactRecentForTestingAssumingDefaultPolicy(int N) throws IOException {<a name="line.1733"></a>
-<span class="sourceLineNo">1734</span>    List&lt;HStoreFile&gt; filesToCompact;<a name="line.1734"></a>
-<span class="sourceLineNo">1735</span>    boolean isMajor;<a name="line.1735"></a>
-<span class="sourceLineNo">1736</span><a name="line.1736"></a>
-<span class="sourceLineNo">1737</span>    this.lock.readLock().lock();<a name="line.1737"></a>
-<span class="sourceLineNo">1738</span>    try {<a name="line.1738"></a>
-<span class="sourceLineNo">1739</span>      synchronized (filesCompacting) {<a name="line.1739"></a>
-<span class="sourceLineNo">1740</span>        filesToCompact = Lists.newArrayList(storeEngine.getStoreFileManager().getStorefiles());<a name="line.1740"></a>
-<span class="sourceLineNo">1741</span>        if (!filesCompacting.isEmpty()) {<a name="line.1741"></a>
-<span class="sourceLineNo">1742</span>          // exclude all files older than the newest file we're currently<a name="line.1742"></a>
-<span class="sourceLineNo">1743</span>          // compacting. this allows us to preserve contiguity (HBASE-2856)<a name="line.1743"></a>
-<span class="sourceLineNo">1744</span>          HStoreFile last = filesCompacting.get(filesCompacting.size() - 1);<a name="line.1744"></a>
-<span class="sourceLineNo">1745</span>          int idx = filesToCompact.indexOf(last);<a name="line.1745"></a>
-<span class="sourceLineNo">1746</span>          Preconditions.checkArgument(idx != -1);<a name="line.1746"></a>
-<span class="sourceLineNo">1747</span>          filesToCompact.subList(0, idx + 1).clear();<a name="line.1747"></a>
-<span class="sourceLineNo">1748</span>        }<a name="line.1748"></a>
-<span class="sourceLineNo">1749</span>        int count = filesToCompact.size();<a name="line.1749"></a>
-<span class="sourceLineNo">1750</span>        if (N &gt; count) {<a name="line.1750"></a>
-<span class="sourceLineNo">1751</span>          throw new RuntimeException("Not enough files");<a name="line.1751"></a>
-<span class="sourceLineNo">1752</span>        }<a name="line.1752"></a>
-<span class="sourceLineNo">1753</span><a name="line.1753"></a>
-<span class="sourceLineNo">1754</span>        filesToCompact = filesToCompact.subList(count - N, count);<a name="line.1754"></a>
-<span class="sourceLineNo">1755</span>        isMajor = (filesToCompact.size() == storeEngine.getStoreFileManager().getStorefileCount());<a name="line.1755"></a>
-<span class="sourceLineNo">1756</span>        filesCompacting.addAll(filesToCompact);<a name="line.1756"></a>
-<span class="sourceLineNo">1757</span>        Collections.sort(filesCompacting, storeEngine.getStoreFileManager()<a name="line.1757"></a>
-<span class="sourceLineNo">1758</span>            .getStoreFileComparator());<a name="line.1758"></a>
-<span class="sourceLineNo">1759</span>      }<a name="line.1759"></a>
-<span class="sourceLineNo">1760</span>    } finally {<a name="line.1760"></a>
-<span class="sourceLineNo">1761</span>      this.lock.readLock().unlock();<a name="line.1761"></a>
-<span class="sourceLineNo">1762</span>    }<a name="line.1762"></a>
-<span class="sourceLineNo">1763</span><a name="line.1763"></a>
-<span class="sourceLineNo">1764</span>    try {<a name="line.1764"></a>
-<span class="sourceLineNo">1765</span>      // Ready to go. Have list of files to compact.<a name="line.1765"></a>
-<span class="sourceLineNo">1766</span>      List&lt;Path&gt; newFiles = ((DefaultCompactor)this.storeEngine.getCompactor())<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>          .compactForTesting(filesToCompact, isMajor);<a name="line.1767"></a>
-<span class="sourceLineNo">1768</span>      for (Path newFile: newFiles) {<a name="line.1768"></a>
-<span class="sourceLineNo">1769</span>        // Move the compaction into place.<a name="line.1769"></a>
-<span class="sourceLineNo">1770</span>        HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1770"></a>
-<span class="sourceLineNo">1771</span>        if (this.getCoprocessorHost() != null) {<a name="line.1771"></a>
-<span class="sourceLineNo">1772</span>          this.getCoprocessorHost().postCompact(this, sf, null, null, null);<a name="line.1772"></a>
-<span class="sourceLineNo">1773</span>        }<a name="line.1773"></a>
-<span class="sourceLineNo">1774</span>        replaceStoreFiles(filesToCompact, Collections.singletonList(sf));<a name="line.1774"></a>
-<span class="sourceLineNo">1775</span>        completeCompaction(filesToCompact);<a name="line.1775"></a>
-<span class="sourceLineNo">1776</span>      }<a name="line.1776"></a>
-<span class="sourceLineNo">1777</span>    } finally {<a name="line.1777"></a>
-<span class="sourceLineNo">1778</span>      synchronized (filesCompacting) {<a name="line.1778"></a>
-<span class="sourceLineNo">1779</span>        filesCompacting.removeAll(filesToCompact);<a name="line.1779"></a>
-<span class="sourceLineNo">1780</span>      }<a name="line.1780"></a>
-<span class="sourceLineNo">1781</span>    }<a name="line.1781"></a>
-<span class="sourceLineNo">1782</span>  }<a name="line.1782"></a>
-<span class="sourceLineNo">1783</span><a name="line.1783"></a>
-<span class="sourceLineNo">1784</span>  @Override<a name="line.1784"></a>
-<span class="sourceLineNo">1785</span>  public boolean hasReferences() {<a name="line.1785"></a>
-<span class="sourceLineNo">1786</span>    // Grab the read lock here, because we need to ensure that: only when the atomic<a name="line.1786"></a>
-<span class="sourceLineNo">1787</span>    // replaceStoreFiles(..) finished, we can get all the complete store file list.<a name="line.1787"></a>
-<span class="sourceLineNo">1788</span>    this.lock.readLock().lock();<a name="line.1788"></a>
-<span class="sourceLineNo">1789</span>    try {<a name="line.1789"></a>
-<span class="sourceLineNo">1790</span>      // Merge the current store files with compacted files here due to HBASE-20940.<a name="line.1790"></a>
-<span class="sourceLineNo">1791</span>      Collection&lt;HStoreFile&gt; allStoreFiles = new ArrayList&lt;&gt;(getStorefiles());<a name="line.1791"></a>
-<span class="sourceLineNo">1792</span>      allStoreFiles.addAll(getCompactedFiles());<a name="line.1792"></a>
-<span class="sourceLineNo">1793</span>      return StoreUtils.hasReferences(allStoreFiles);<a name="line.1793"></a>
-<span class="sourceLineNo">1794</span>    } finally {<a name="line.1794"></a>
-<span class="sourceLineNo">1795</span>      this.lock.readLock().unlock();<a name="line.1795"></a>
-<span class="sourceLineNo">1796</span>    }<a name="line.1796"></a>
-<span class="sourceLineNo">1797</span>  }<a name="line.1797"></a>
-<span class="sourceLineNo">1798</span><a name="line.1798"></a>
-<span class="sourceLineNo">1799</span>  /**<a name="line.1799"></a>
-<span class="sourceLineNo">1800</span>   * getter for CompactionProgress object<a name="line.1800"></a>
-<span class="sourceLineNo">1801</span>   * @return CompactionProgress object; can be null<a name="line.1801"></a>
-<span class="sourceLineNo">1802</span>   */<a name="line.1802"></a>
-<span class="sourceLineNo">1803</span>  public CompactionProgress getCompactionProgress() {<a name="line.1803"></a>
-<span class="sourceLineNo">1804</span>    return this.storeEngine.getCompactor().getProgress();<a name="line.1804"></a>
-<span class="sourceLineNo">1805</span>  }<a name="line.1805"></a>
-<span class="sourceLineNo">1806</span><a name="line.1806"></a>
-<span class="sourceLineNo">1807</span>  @Override<a name="line.1807"></a>
-<span class="sourceLineNo">1808</span>  public boolean shouldPerformMajorCompaction() throws IOException {<a name="line.1808"></a>
-<span class="sourceLineNo">1809</span>    for (HStoreFile sf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1809"></a>
-<span class="sourceLineNo">1810</span>      // TODO: what are these reader checks all over the place?<a name="line.1810"></a>
-<span class="sourceLineNo">1811</span>      if (sf.getReader() == null) {<a name="line.1811"></a>
-<span class="sourceLineNo">1812</span>        LOG.debug("StoreFile {} has null Reader", sf);<a name="line.1812"></a>
-<span class="sourceLineNo">1813</span>        return false;<a name="line.1813"></a>
-<span class="sourceLineNo">1814</span>      }<a name="line.1814"></a>
-<span class="sourceLineNo">1815</span>    }<a name="line.1815"></a>
-<span class="sourceLineNo">1816</span>    return storeEngine.getCompactionPolicy().shouldPerformMajorCompaction(<a name="line.1816"></a>
-<span class="sourceLineNo">1817</span>        this.storeEngine.getStoreFileManager().getStorefiles());<a name="line.1817"></a>
-<span class="sourceLineNo">1818</span>  }<a name="line.1818"></a>
-<span class="sourceLineNo">1819</span><a name="line.1819"></a>
-<span class="sourceLineNo">1820</span>  public Optional&lt;CompactionContext&gt; requestCompaction() throws IOException {<a name="line.1820"></a>
-<span class="sourceLineNo">1821</span>    return requestCompaction(NO_PRIORITY, CompactionLifeCycleTracker.DUMMY, null);<a name="line.1821"></a>
-<span class="sourceLineNo">1822</span>  }<a name="line.1822"></a>
-<span class="sourceLineNo">1823</span><a name="line.1823"></a>
-<span class="sourceLineNo">1824</span>  public Optional&lt;CompactionContext&gt; requestCompaction(int priority,<a name="line.1824"></a>
-<span class="sourceLineNo">1825</span>      CompactionLifeCycleTracker tracker, User user) throws IOException {<a name="line.1825"></a>
-<span class="sourceLineNo">1826</span>    // don't even select for compaction if writes are disabled<a name="line.1826"></a>
-<span class="sourceLineNo">1827</span>    if (!this.areWritesEnabled()) {<a name="line.1827"></a>
-<span class="sourceLineNo">1828</span>      return Optional.empty();<a name="line.1828"></a>
-<span class="sourceLineNo">1829</span>    }<a name="line.1829"></a>
-<span class="sourceLineNo">1830</span>    // Before we do compaction, try to get rid of unneeded files to simplify things.<a name="line.1830"></a>
-<span class="sourceLineNo">1831</span>    removeUnneededFiles();<a name="line.1831"></a>
-<span class="sourceLineNo">1832</span><a name="line.1832"></a>
-<span class="sourceLineNo">1833</span>    final CompactionContext compaction = storeEngine.createCompaction();<a name="line.1833"></a>
-<span class="sourceLineNo">1834</span>    CompactionRequestImpl request = null;<a name="line.1834"></a>
-<span class="sourceLineNo">1835</span>    this.lock.readLock().lock();<a name="line.1835"></a>
-<span class="sourceLineNo">1836</span>    try {<a name="line.1836"></a>
-<span class="sourceLineNo">1837</span>      synchronized (filesCompacting) {<a name="line.1837"></a>
-<span class="sourceLineNo">1838</span>        // First, see if coprocessor would want to override selection.<a name="line.1838"></a>
-<span class="sourceLineNo">1839</span>        if (this.getCoprocessorHost() != null) {<a name="line.1839"></a>
-<span class="sourceLineNo">1840</span>          final List&lt;HStoreFile&gt; candidatesForCoproc = compaction.preSelect(this.filesCompacting);<a name="line.1840"></a>
-<span class="sourceLineNo">1841</span>          boolean override = getCoprocessorHost().preCompactSelection(this,<a name="line.1841"></a>
-<span class="sourceLineNo">1842</span>              candidatesForCoproc, tracker, user);<a name="line.1842"></a>
-<span class="sourceLineNo">1843</span>          if (override) {<a name="line.1843"></a>
-<span class="sourceLineNo">1844</span>            // Coprocessor is overriding normal file selection.<a name="line.1844"></a>
-<span class="sourceLineNo">1845</span>            compaction.forceSelect(new CompactionRequestImpl(candidatesForCoproc));<a name="line.1845"></a>
-<span class="sourceLineNo">1846</span>          }<a name="line.1846"></a>
-<span class="sourceLineNo">1847</span>        }<a name="line.1847"></a>
+<span class="sourceLineNo">1443</span>   * &lt;p&gt;The compactLock prevents multiple simultaneous compactions.<a name="line.1443"></a>
+<span class="sourceLineNo">1444</span>   * The structureLock prevents us from interfering with other write operations.<a name="line.1444"></a>
+<span class="sourceLineNo">1445</span>   *<a name="line.1445"></a>
+<span class="sourceLineNo">1446</span>   * &lt;p&gt;We don't want to hold the structureLock for the whole time, as a compact()<a name="line.1446"></a>
+<span class="sourceLineNo">1447</span>   * can be lengthy and we want to allow cache-flushes during this period.<a name="line.1447"></a>
+<span class="sourceLineNo">1448</span>   *<a name="line.1448"></a>
+<span class="sourceLineNo">1449</span>   * &lt;p&gt; Compaction event should be idempotent, since there is no IO Fencing for<a name="line.1449"></a>
+<span class="sourceLineNo">1450</span>   * the region directory in hdfs. A region server might still try to complete the<a name="line.1450"></a>
+<span class="sourceLineNo">1451</span>   * compaction after it lost the region. That is why the following events are carefully<a name="line.1451"></a>
+<span class="sourceLineNo">1452</span>   * ordered for a compaction:<a name="line.1452"></a>
+<span class="sourceLineNo">1453</span>   *  1. Compaction writes new files under region/.tmp directory (compaction output)<a name="line.1453"></a>
+<span class="sourceLineNo">1454</span>   *  2. Compaction atomically moves the temporary file under region directory<a name="line.1454"></a>
+<span class="sourceLineNo">1455</span>   *  3. Compaction appends a WAL edit containing the compaction input and output files.<a name="line.1455"></a>
+<span class="sourceLineNo">1456</span>   *  Forces sync on WAL.<a name="line.1456"></a>
+<span class="sourceLineNo">1457</span>   *  4. Compaction deletes the input files from the region directory.<a name="line.1457"></a>
+<span class="sourceLineNo">1458</span>   *<a name="line.1458"></a>
+<span class="sourceLineNo">1459</span>   * Failure conditions are handled like this:<a name="line.1459"></a>
+<span class="sourceLineNo">1460</span>   *  - If RS fails before 2, compaction wont complete. Even if RS lives on and finishes<a name="line.1460"></a>
+<span class="sourceLineNo">1461</span>   *  the compaction later, it will only write the new data file to the region directory.<a name="line.1461"></a>
+<span class="sourceLineNo">1462</span>   *  Since we already have this data, this will be idempotent but we will have a redundant<a name="line.1462"></a>
+<span class="sourceLineNo">1463</span>   *  copy of the data.<a name="line.1463"></a>
+<span class="sourceLineNo">1464</span>   *  - If RS fails between 2 and 3, the region will have a redundant copy of the data. The<a name="line.1464"></a>
+<span class="sourceLineNo">1465</span>   *  RS that failed won't be able to finish snyc() for WAL because of lease recovery in WAL.<a name="line.1465"></a>
+<span class="sourceLineNo">1466</span>   *  - If RS fails after 3, the region region server who opens the region will pick up the<a name="line.1466"></a>
+<span class="sourceLineNo">1467</span>   *  the compaction marker from the WAL and replay it by removing the compaction input files.<a name="line.1467"></a>
+<span class="sourceLineNo">1468</span>   *  Failed RS can also attempt to delete those files, but the operation will be idempotent<a name="line.1468"></a>
+<span class="sourceLineNo">1469</span>   *<a name="line.1469"></a>
+<span class="sourceLineNo">1470</span>   * See HBASE-2231 for details.<a name="line.1470"></a>
+<span class="sourceLineNo">1471</span>   *<a name="line.1471"></a>
+<span class="sourceLineNo">1472</span>   * @param compaction compaction details obtained from requestCompaction()<a name="line.1472"></a>
+<span class="sourceLineNo">1473</span>   * @return Storefile we compacted into or null if we failed or opted out early.<a name="line.1473"></a>
+<span class="sourceLineNo">1474</span>   */<a name="line.1474"></a>
+<span class="sourceLineNo">1475</span>  public List&lt;HStoreFile&gt; compact(CompactionContext compaction,<a name="line.1475"></a>
+<span class="sourceLineNo">1476</span>    ThroughputController throughputController, User user) throws IOException {<a name="line.1476"></a>
+<span class="sourceLineNo">1477</span>    assert compaction != null;<a name="line.1477"></a>
+<span class="sourceLineNo">1478</span>    CompactionRequestImpl cr = compaction.getRequest();<a name="line.1478"></a>
+<span class="sourceLineNo">1479</span>    try {<a name="line.1479"></a>
+<span class="sourceLineNo">1480</span>      // Do all sanity checking in here if we have a valid CompactionRequestImpl<a name="line.1480"></a>
+<span class="sourceLineNo">1481</span>      // because we need to clean up after it on the way out in a finally<a name="line.1481"></a>
+<span class="sourceLineNo">1482</span>      // block below<a name="line.1482"></a>
+<span class="sourceLineNo">1483</span>      long compactionStartTime = EnvironmentEdgeManager.currentTime();<a name="line.1483"></a>
+<span class="sourceLineNo">1484</span>      assert compaction.hasSelection();<a name="line.1484"></a>
+<span class="sourceLineNo">1485</span>      Collection&lt;HStoreFile&gt; filesToCompact = cr.getFiles();<a name="line.1485"></a>
+<span class="sourceLineNo">1486</span>      assert !filesToCompact.isEmpty();<a name="line.1486"></a>
+<span class="sourceLineNo">1487</span>      synchronized (filesCompacting) {<a name="line.1487"></a>
+<span class="sourceLineNo">1488</span>        // sanity check: we're compacting files that this store knows about<a name="line.1488"></a>
+<span class="sourceLineNo">1489</span>        // TODO: change this to LOG.error() after more debugging<a name="line.1489"></a>
+<span class="sourceLineNo">1490</span>        Preconditions.checkArgument(filesCompacting.containsAll(filesToCompact));<a name="line.1490"></a>
+<span class="sourceLineNo">1491</span>      }<a name="line.1491"></a>
+<span class="sourceLineNo">1492</span><a name="line.1492"></a>
+<span class="sourceLineNo">1493</span>      // Ready to go. Have list of files to compact.<a name="line.1493"></a>
+<span class="sourceLineNo">1494</span>      LOG.info("Starting compaction of " + filesToCompact +<a name="line.1494"></a>
+<span class="sourceLineNo">1495</span>        " into tmpdir=" + fs.getTempDir() + ", totalSize=" +<a name="line.1495"></a>
+<span class="sourceLineNo">1496</span>          TraditionalBinaryPrefix.long2String(cr.getSize(), "", 1));<a name="line.1496"></a>
+<span class="sourceLineNo">1497</span><a name="line.1497"></a>
+<span class="sourceLineNo">1498</span>      return doCompaction(cr, filesToCompact, user, compactionStartTime,<a name="line.1498"></a>
+<span class="sourceLineNo">1499</span>          compaction.compact(throughputController, user));<a name="line.1499"></a>
+<span class="sourceLineNo">1500</span>    } finally {<a name="line.1500"></a>
+<span class="sourceLineNo">1501</span>      finishCompactionRequest(cr);<a name="line.1501"></a>
+<span class="sourceLineNo">1502</span>    }<a name="line.1502"></a>
+<span class="sourceLineNo">1503</span>  }<a name="line.1503"></a>
+<span class="sourceLineNo">1504</span><a name="line.1504"></a>
+<span class="sourceLineNo">1505</span>  @VisibleForTesting<a name="line.1505"></a>
+<span class="sourceLineNo">1506</span>  protected List&lt;HStoreFile&gt; doCompaction(CompactionRequestImpl cr,<a name="line.1506"></a>
+<span class="sourceLineNo">1507</span>      Collection&lt;HStoreFile&gt; filesToCompact, User user, long compactionStartTime,<a name="line.1507"></a>
+<span class="sourceLineNo">1508</span>      List&lt;Path&gt; newFiles) throws IOException {<a name="line.1508"></a>
+<span class="sourceLineNo">1509</span>    // Do the steps necessary to complete the compaction.<a name="line.1509"></a>
+<span class="sourceLineNo">1510</span>    List&lt;HStoreFile&gt; sfs = moveCompactedFilesIntoPlace(cr, newFiles, user);<a name="line.1510"></a>
+<span class="sourceLineNo">1511</span>    writeCompactionWalRecord(filesToCompact, sfs);<a name="line.1511"></a>
+<span class="sourceLineNo">1512</span>    replaceStoreFiles(filesToCompact, sfs);<a name="line.1512"></a>
+<span class="sourceLineNo">1513</span>    if (cr.isMajor()) {<a name="line.1513"></a>
+<span class="sourceLineNo">1514</span>      majorCompactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1514"></a>
+<span class="sourceLineNo">1515</span>      majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1515"></a>
+<span class="sourceLineNo">1516</span>    } else {<a name="line.1516"></a>
+<span class="sourceLineNo">1517</span>      compactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1517"></a>
+<span class="sourceLineNo">1518</span>      compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1518"></a>
+<span class="sourceLineNo">1519</span>    }<a name="line.1519"></a>
+<span class="sourceLineNo">1520</span>    long outputBytes = getTotalSize(sfs);<a name="line.1520"></a>
+<span class="sourceLineNo">1521</span><a name="line.1521"></a>
+<span class="sourceLineNo">1522</span>    // At this point the store will use new files for all new scanners.<a name="line.1522"></a>
+<span class="sourceLineNo">1523</span>    completeCompaction(filesToCompact); // update store size.<a name="line.1523"></a>
+<span class="sourceLineNo">1524</span><a name="line.1524"></a>
+<span class="sourceLineNo">1525</span>    long now = EnvironmentEdgeManager.currentTime();<a name="line.1525"></a>
+<span class="sourceLineNo">1526</span>    if (region.getRegionServerServices() != null<a name="line.1526"></a>
+<span class="sourceLineNo">1527</span>        &amp;&amp; region.getRegionServerServices().getMetrics() != null) {<a name="line.1527"></a>
+<span class="sourceLineNo">1528</span>      region.getRegionServerServices().getMetrics().updateCompaction(<a name="line.1528"></a>
+<span class="sourceLineNo">1529</span>          region.getTableDescriptor().getTableName().getNameAsString(),<a name="line.1529"></a>
+<span class="sourceLineNo">1530</span>          cr.isMajor(), now - compactionStartTime, cr.getFiles().size(),<a name="line.1530"></a>
+<span class="sourceLineNo">1531</span>          newFiles.size(), cr.getSize(), outputBytes);<a name="line.1531"></a>
+<span class="sourceLineNo">1532</span><a name="line.1532"></a>
+<span class="sourceLineNo">1533</span>    }<a name="line.1533"></a>
+<span class="sourceLineNo">1534</span><a name="line.1534"></a>
+<span class="sourceLineNo">1535</span>    logCompactionEndMessage(cr, sfs, now, compactionStartTime);<a name="line.1535"></a>
+<span class="sourceLineNo">1536</span>    return sfs;<a name="line.1536"></a>
+<span class="sourceLineNo">1537</span>  }<a name="line.1537"></a>
+<span class="sourceLineNo">1538</span><a name="line.1538"></a>
+<span class="sourceLineNo">1539</span>  private List&lt;HStoreFile&gt; moveCompactedFilesIntoPlace(CompactionRequestImpl cr,<a name="line.1539"></a>
+<span class="sourceLineNo">1540</span>      List&lt;Path&gt; newFiles, User user) throws IOException {<a name="line.1540"></a>
+<span class="sourceLineNo">1541</span>    List&lt;HStoreFile&gt; sfs = new ArrayList&lt;&gt;(newFiles.size());<a name="line.1541"></a>
+<span class="sourceLineNo">1542</span>    for (Path newFile : newFiles) {<a name="line.1542"></a>
+<span class="sourceLineNo">1543</span>      assert newFile != null;<a name="line.1543"></a>
+<span class="sourceLineNo">1544</span>      HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1544"></a>
+<span class="sourceLineNo">1545</span>      if (this.getCoprocessorHost() != null) {<a name="line.1545"></a>
+<span class="sourceLineNo">1546</span>        getCoprocessorHost().postCompact(this, sf, cr.getTracker(), cr, user);<a name="line.1546"></a>
+<span class="sourceLineNo">1547</span>      }<a name="line.1547"></a>
+<span class="sourceLineNo">1548</span>      assert sf != null;<a name="line.1548"></a>
+<span class="sourceLineNo">1549</span>      sfs.add(sf);<a name="line.1549"></a>
+<span class="sourceLineNo">1550</span>    }<a name="line.1550"></a>
+<span class="sourceLineNo">1551</span>    return sfs;<a name="line.1551"></a>
+<span class="sourceLineNo">1552</span>  }<a name="line.1552"></a>
+<span class="sourceLineNo">1553</span><a name="line.1553"></a>
+<span class="sourceLineNo">1554</span>  // Package-visible for tests<a name="line.1554"></a>
+<span class="sourceLineNo">1555</span>  HStoreFile moveFileIntoPlace(Path newFile) throws IOException {<a name="line.1555"></a>
+<span class="sourceLineNo">1556</span>    validateStoreFile(newFile);<a name="line.1556"></a>
+<span class="sourceLineNo">1557</span>    // Move the file into the right spot<a name="line.1557"></a>
+<span class="sourceLineNo">1558</span>    Path destPath = fs.commitStoreFile(getColumnFamilyName(), newFile);<a name="line.1558"></a>
+<span class="sourceLineNo">1559</span>    return createStoreFileAndReader(destPath);<a name="line.1559"></a>
+<span class="sourceLineNo">1560</span>  }<a name="line.1560"></a>
+<span class="sourceLineNo">1561</span><a name="line.1561"></a>
+<span class="sourceLineNo">1562</span>  /**<a name="line.1562"></a>
+<span class="sourceLineNo">1563</span>   * Writes the compaction WAL record.<a name="line.1563"></a>
+<span class="sourceLineNo">1564</span>   * @param filesCompacted Files compacted (input).<a name="line.1564"></a>
+<span class="sourceLineNo">1565</span>   * @param newFiles Files from compaction.<a name="line.1565"></a>
+<span class="sourceLineNo">1566</span>   */<a name="line.1566"></a>
+<span class="sourceLineNo">1567</span>  private void writeCompactionWalRecord(Collection&lt;HStoreFile&gt; filesCompacted,<a name="line.1567"></a>
+<span class="sourceLineNo">1568</span>      Collection&lt;HStoreFile&gt; newFiles) throws IOException {<a name="line.1568"></a>
+<span class="sourceLineNo">1569</span>    if (region.getWAL() == null) {<a name="line.1569"></a>
+<span class="sourceLineNo">1570</span>      return;<a name="line.1570"></a>
+<span class="sourceLineNo">1571</span>    }<a name="line.1571"></a>
+<span class="sourceLineNo">1572</span>    List&lt;Path&gt; inputPaths =<a name="line.1572"></a>
+<span class="sourceLineNo">1573</span>        filesCompacted.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1573"></a>
+<span class="sourceLineNo">1574</span>    List&lt;Path&gt; outputPaths =<a name="line.1574"></a>
+<span class="sourceLineNo">1575</span>        newFiles.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1575"></a>
+<span class="sourceLineNo">1576</span>    RegionInfo info = this.region.getRegionInfo();<a name="line.1576"></a>
+<span class="sourceLineNo">1577</span>    CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(info,<a name="line.1577"></a>
+<span class="sourceLineNo">1578</span>        family.getName(), inputPaths, outputPaths,<a name="line.1578"></a>
+<span class="sourceLineNo">1579</span>      fs.getStoreDir(getColumnFamilyDescriptor().getNameAsString()));<a name="line.1579"></a>
+<span class="sourceLineNo">1580</span>    // Fix reaching into Region to get the maxWaitForSeqId.<a name="line.1580"></a>
+<span class="sourceLineNo">1581</span>    // Does this method belong in Region altogether given it is making so many references up there?<a name="line.1581"></a>
+<span class="sourceLineNo">1582</span>    // Could be Region#writeCompactionMarker(compactionDescriptor);<a name="line.1582"></a>
+<span class="sourceLineNo">1583</span>    WALUtil.writeCompactionMarker(this.region.getWAL(), this.region.getReplicationScope(),<a name="line.1583"></a>
+<span class="sourceLineNo">1584</span>        this.region.getRegionInfo(), compactionDescriptor, this.region.getMVCC());<a name="line.1584"></a>
+<span class="sourceLineNo">1585</span>  }<a name="line.1585"></a>
+<span class="sourceLineNo">1586</span><a name="line.1586"></a>
+<span class="sourceLineNo">1587</span>  @VisibleForTesting<a name="line.1587"></a>
+<span class="sourceLineNo">1588</span>  void replaceStoreFiles(Collection&lt;HStoreFile&gt; compactedFiles, Collection&lt;HStoreFile&gt; result)<a name="line.1588"></a>
+<span class="sourceLineNo">1589</span>      throws IOException {<a name="line.1589"></a>
+<span class="sourceLineNo">1590</span>    this.lock.writeLock().lock();<a name="line.1590"></a>
+<span class="sourceLineNo">1591</span>    try {<a name="line.1591"></a>
+<span class="sourceLineNo">1592</span>      this.storeEngine.getStoreFileManager().addCompactionResults(compactedFiles, result);<a name="line.1592"></a>
+<span class="sourceLineNo">1593</span>      synchronized (filesCompacting) {<a name="line.1593"></a>
+<span class="sourceLineNo">1594</span>        filesCompacting.removeAll(compactedFiles);<a name="line.1594"></a>
+<span class="sourceLineNo">1595</span>      }<a name="line.1595"></a>
+<span class="sourceLineNo">1596</span><a name="line.1596"></a>
+<span class="sourceLineNo">1597</span>      // These may be null when the RS is shutting down. The space quota Chores will fix the Region<a name="line.1597"></a>
+<span class="sourceLineNo">1598</span>      // sizes later so it's not super-critical if we miss these.<a name="line.1598"></a>
+<span class="sourceLineNo">1599</span>      RegionServerServices rsServices = region.getRegionServerServices();<a name="line.1599"></a>
+<span class="sourceLineNo">1600</span>      if (rsServices != null &amp;&amp; rsServices.getRegionServerSpaceQuotaManager() != null) {<a name="line.1600"></a>
+<span class="sourceLineNo">1601</span>        updateSpaceQuotaAfterFileReplacement(<a name="line.1601"></a>
+<span class="sourceLineNo">1602</span>            rsServices.getRegionServerSpaceQuotaManager().getRegionSizeStore(), getRegionInfo(),<a name="line.1602"></a>
+<span class="sourceLineNo">1603</span>            compactedFiles, result);<a name="line.1603"></a>
+<span class="sourceLineNo">1604</span>      }<a name="line.1604"></a>
+<span class="sourceLineNo">1605</span>    } finally {<a name="line.1605"></a>
+<span class="sourceLineNo">1606</span>      this.lock.writeLock().unlock();<a name="line.1606"></a>
+<span class="sourceLineNo">1607</span>    }<a name="line.1607"></a>
+<span class="sourceLineNo">1608</span>  }<a name="line.1608"></a>
+<span class="sourceLineNo">1609</span><a name="line.1609"></a>
+<span class="sourceLineNo">1610</span>  /**<a name="line.1610"></a>
+<span class="sourceLineNo">1611</span>   * Updates the space quota usage for this region, removing the size for files compacted away<a name="line.1611"></a>
+<span class="sourceLineNo">1612</span>   * and adding in the size for new files.<a name="line.1612"></a>
+<span class="sourceLineNo">1613</span>   *<a name="line.1613"></a>
+<span class="sourceLineNo">1614</span>   * @param sizeStore The object tracking changes in region size for space quotas.<a name="line.1614"></a>
+<span class="sourceLineNo">1615</span>   * @param regionInfo The identifier for the region whose size is being updated.<a name="line.1615"></a>
+<span class="sourceLineNo">1616</span>   * @param oldFiles Files removed from this store's region.<a name="line.1616"></a>
+<span class="sourceLineNo">1617</span>   * @param newFiles Files added to this store's region.<a name="line.1617"></a>
+<span class="sourceLineNo">1618</span>   */<a name="line.1618"></a>
+<span class="sourceLineNo">1619</span>  void updateSpaceQuotaAfterFileReplacement(<a name="line.1619"></a>
+<span class="sourceLineNo">1620</span>      RegionSizeStore sizeStore, RegionInfo regionInfo, Collection&lt;HStoreFile&gt; oldFiles,<a name="line.1620"></a>
+<span class="sourceLineNo">1621</span>      Collection&lt;HStoreFile&gt; newFiles) {<a name="line.1621"></a>
+<span class="sourceLineNo">1622</span>    long delta = 0;<a name="line.1622"></a>
+<span class="sourceLineNo">1623</span>    if (oldFiles != null) {<a name="line.1623"></a>
+<span class="sourceLineNo">1624</span>      for (HStoreFile compactedFile : oldFiles) {<a name="line.1624"></a>
+<span class="sourceLineNo">1625</span>        if (compactedFile.isHFile()) {<a name="line.1625"></a>
+<span class="sourceLineNo">1626</span>          delta -= compactedFile.getReader().length();<a name="line.1626"></a>
+<span class="sourceLineNo">1627</span>        }<a name="line.1627"></a>
+<span class="sourceLineNo">1628</span>      }<a name="line.1628"></a>
+<span class="sourceLineNo">1629</span>    }<a name="line.1629"></a>
+<span class="sourceLineNo">1630</span>    if (newFiles != null) {<a name="line.1630"></a>
+<span class="sourceLineNo">1631</span>      for (HStoreFile newFile : newFiles) {<a name="line.1631"></a>
+<span class="sourceLineNo">1632</span>        if (newFile.isHFile()) {<a name="line.1632"></a>
+<span class="sourceLineNo">1633</span>          delta += newFile.getReader().length();<a name="line.1633"></a>
+<span class="sourceLineNo">1634</span>        }<a name="line.1634"></a>
+<span class="sourceLineNo">1635</span>      }<a name="line.1635"></a>
+<span class="sourceLineNo">1636</span>    }<a name="line.1636"></a>
+<span class="sourceLineNo">1637</span>    sizeStore.incrementRegionSize(regionInfo, delta);<a name="line.1637"></a>
+<span class="sourceLineNo">1638</span>  }<a name="line.1638"></a>
+<span class="sourceLineNo">1639</span><a name="line.1639"></a>
+<span class="sourceLineNo">1640</span>  /**<a name="line.1640"></a>
+<span class="sourceLineNo">1641</span>   * Log a very elaborate compaction completion message.<a name="line.1641"></a>
+<span class="sourceLineNo">1642</span>   * @param cr Request.<a name="line.1642"></a>
+<span class="sourceLineNo">1643</span>   * @param sfs Resulting files.<a name="line.1643"></a>
+<span class="sourceLineNo">1644</span>   * @param compactionStartTime Start time.<a name="line.1644"></a>
+<span class="sourceLineNo">1645</span>   */<a name="line.1645"></a>
+<span class="sourceLineNo">1646</span>  private void logCompactionEndMessage(<a name="line.1646"></a>
+<span class="sourceLineNo">1647</span>      CompactionRequestImpl cr, List&lt;HStoreFile&gt; sfs, long now, long compactionStartTime) {<a name="line.1647"></a>
+<span class="sourceLineNo">1648</span>    StringBuilder message = new StringBuilder(<a name="line.1648"></a>
+<span class="sourceLineNo">1649</span>      "Completed" + (cr.isMajor() ? " major" : "") + " compaction of "<a name="line.1649"></a>
+<span class="sourceLineNo">1650</span>      + cr.getFiles().size() + (cr.isAllFiles() ? " (all)" : "") + " file(s) in "<a name="line.1650"></a>
+<span class="sourceLineNo">1651</span>      + this + " of " + this.getRegionInfo().getShortNameToLog() + " into ");<a name="line.1651"></a>
+<span class="sourceLineNo">1652</span>    if (sfs.isEmpty()) {<a name="line.1652"></a>
+<span class="sourceLineNo">1653</span>      message.append("none, ");<a name="line.1653"></a>
+<span class="sourceLineNo">1654</span>    } else {<a name="line.1654"></a>
+<span class="sourceLineNo">1655</span>      for (HStoreFile sf: sfs) {<a name="line.1655"></a>
+<span class="sourceLineNo">1656</span>        message.append(sf.getPath().getName());<a name="line.1656"></a>
+<span class="sourceLineNo">1657</span>        message.append("(size=");<a name="line.1657"></a>
+<span class="sourceLineNo">1658</span>        message.append(TraditionalBinaryPrefix.long2String(sf.getReader().length(), "", 1));<a name="line.1658"></a>
+<span class="sourceLineNo">1659</span>        message.append("), ");<a name="line.1659"></a>
+<span class="sourceLineNo">1660</span>      }<a name="line.1660"></a>
+<span class="sourceLineNo">1661</span>    }<a name="line.1661"></a>
+<span class="sourceLineNo">1662</span>    message.append("total size for store is ")<a name="line.1662"></a>
+<span class="sourceLineNo">1663</span>      .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1))<a name="line.1663"></a>
+<span class="sourceLineNo">1664</span>      .append(". This selection was in queue for ")<a name="line.1664"></a>
+<span class="sourceLineNo">1665</span>      .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))<a name="line.1665"></a>
+<span class="sourceLineNo">1666</span>      .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))<a name="line.1666"></a>
+<span class="sourceLineNo">1667</span>      .append(" to execute.");<a name="line.1667"></a>
+<span class="sourceLineNo">1668</span>    LOG.info(message.toString());<a name="line.1668"></a>
+<span class="sourceLineNo">1669</span>    if (LOG.isTraceEnabled()) {<a name="line.1669"></a>
+<span class="sourceLineNo">1670</span>      int fileCount = storeEngine.getStoreFileManager().getStorefileCount();<a name="line.1670"></a>
+<span class="sourceLineNo">1671</span>      long resultSize = getTotalSize(sfs);<a name="line.1671"></a>
+<span class="sourceLineNo">1672</span>      String traceMessage = "COMPACTION start,end,size out,files in,files out,store size,"<a name="line.1672"></a>
+<span class="sourceLineNo">1673</span>        + "store files [" + compactionStartTime + "," + now + "," + resultSize + ","<a name="line.1673"></a>
+<span class="sourceLineNo">1674</span>          + cr.getFiles().size() + "," + sfs.size() + "," +  storeSize + "," + fileCount + "]";<a name="line.1674"></a>
+<span class="sourceLineNo">1675</span>      LOG.trace(traceMessage);<a name="line.1675"></a>
+<span class="sourceLineNo">1676</span>    }<a name="line.1676"></a>
+<span class="sourceLineNo">1677</span>  }<a name="line.1677"></a>
+<span class="sourceLineNo">1678</span><a name="line.1678"></a>
+<span class="sourceLineNo">1679</span>  /**<a name="line.1679"></a>
+<span class="sourceLineNo">1680</span>   * Call to complete a compaction. Its for the case where we find in the WAL a compaction<a name="line.1680"></a>
+<span class="sourceLineNo">1681</span>   * that was not finished.  We could find one recovering a WAL after a regionserver crash.<a name="line.1681"></a>
+<span class="sourceLineNo">1682</span>   * See HBASE-2231.<a name="line.1682"></a>
+<span class="sourceLineNo">1683</span>   */<a name="line.1683"></a>
+<span class="sourceLineNo">1684</span>  public void replayCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,<a name="line.1684"></a>
+<span class="sourceLineNo">1685</span>      boolean removeFiles) throws IOException {<a name="line.1685"></a>
+<span class="sourceLineNo">1686</span>    LOG.debug("Completing compaction from the WAL marker");<a name="line.1686"></a>
+<span class="sourceLineNo">1687</span>    List&lt;String&gt; compactionInputs = compaction.getCompactionInputList();<a name="line.1687"></a>
+<span class="sourceLineNo">1688</span>    List&lt;String&gt; compactionOutputs = Lists.newArrayList(compaction.getCompactionOutputList());<a name="line.1688"></a>
+<span class="sourceLineNo">1689</span><a name="line.1689"></a>
+<span class="sourceLineNo">1690</span>    // The Compaction Marker is written after the compaction is completed,<a name="line.1690"></a>
+<span class="sourceLineNo">1691</span>    // and the files moved into the region/family folder.<a name="line.1691"></a>
+<span class="sourceLineNo">1692</span>    //<a name="line.1692"></a>
+<span class="sourceLineNo">1693</span>    // If we crash after the entry is written, we may not have removed the<a name="line.1693"></a>
+<span class="sourceLineNo">1694</span>    // input files, but the output file is present.<a name="line.1694"></a>
+<span class="sourceLineNo">1695</span>    // (The unremoved input files will be removed by this function)<a name="line.1695"></a>
+<span class="sourceLineNo">1696</span>    //<a name="line.1696"></a>
+<span class="sourceLineNo">1697</span>    // If we scan the directory and the file is not present, it can mean that:<a name="line.1697"></a>
+<span class="sourceLineNo">1698</span>    //   - The file was manually removed by the user<a name="line.1698"></a>
+<span class="sourceLineNo">1699</span>    //   - The file was removed as consequence of subsequent compaction<a name="line.1699"></a>
+<span class="sourceLineNo">1700</span>    // so, we can't do anything with the "compaction output list" because those<a name="line.1700"></a>
+<span class="sourceLineNo">1701</span>    // files have already been loaded when opening the region (by virtue of<a name="line.1701"></a>
+<span class="sourceLineNo">1702</span>    // being in the store's folder) or they may be missing due to a compaction.<a name="line.1702"></a>
+<span class="sourceLineNo">1703</span><a name="line.1703"></a>
+<span class="sourceLineNo">1704</span>    String familyName = this.getColumnFamilyName();<a name="line.1704"></a>
+<span class="sourceLineNo">1705</span>    Set&lt;String&gt; inputFiles = new HashSet&lt;&gt;();<a name="line.1705"></a>
+<span class="sourceLineNo">1706</span>    for (String compactionInput : compactionInputs) {<a name="line.1706"></a>
+<span class="sourceLineNo">1707</span>      Path inputPath = fs.getStoreFilePath(familyName, compactionInput);<a name="line.1707"></a>
+<span class="sourceLineNo">1708</span>      inputFiles.add(inputPath.getName());<a name="line.1708"></a>
+<span class="sourceLineNo">1709</span>    }<a name="line.1709"></a>
+<span class="sourceLineNo">1710</span><a name="line.1710"></a>
+<span class="sourceLineNo">1711</span>    //some of the input files might already be deleted<a name="line.1711"></a>
+<span class="sourceLineNo">1712</span>    List&lt;HStoreFile&gt; inputStoreFiles = new ArrayList&lt;&gt;(compactionInputs.size());<a name="line.1712"></a>
+<span class="sourceLineNo">1713</span>    for (HStoreFile sf : this.getStorefiles()) {<a name="line.1713"></a>
+<span class="sourceLineNo">1714</span>      if (inputFiles.contains(sf.getPath().getName())) {<a name="line.1714"></a>
+<span class="sourceLineNo">1715</span>        inputStoreFiles.add(sf);<a name="line.1715"></a>
+<span class="sourceLineNo">1716</span>      }<a name="line.1716"></a>
+<span class="sourceLineNo">1717</span>    }<a name="line.1717"></a>
+<span class="sourceLineNo">1718</span><a name="line.1718"></a>
+<span class="sourceLineNo">1719</span>    // check whether we need to pick up the new files<a name="line.1719"></a>
+<span class="sourceLineNo">1720</span>    List&lt;HStoreFile&gt; outputStoreFiles = new ArrayList&lt;&gt;(compactionOutputs.size());<a name="line.1720"></a>
+<span class="sourceLineNo">1721</span><a name="line.1721"></a>
+<span class="sourceLineNo">1722</span>    if (pickCompactionFiles) {<a name="line.1722"></a>
+<span class="sourceLineNo">1723</span>      for (HStoreFile sf : this.getStorefiles()) {<a name="line.1723"></a>
+<span class="sourceLineNo">1724</span>        compactionOutputs.remove(sf.getPath().getName());<a name="line.1724"></a>
+<span class="sourceLineNo">1725</span>      }<a name="line.1725"></a>
+<span class="sourceLineNo">1726</span>      for (String compactionOutput : compactionOutputs) {<a name="line.1726"></a>
+<span class="sourceLineNo">1727</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), compactionOutput);<a name="line.1727"></a>
+<span class="sourceLineNo">1728</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.1728"></a>
+<span class="sourceLineNo">1729</span>        outputStoreFiles.add(storeFile);<a name="line.1729"></a>
+<span class="sourceLineNo">1730</span>      }<a name="line.1730"></a>
+<span class="sourceLineNo">1731</span>    }<a name="line.1731"></a>
+<span class="sourceLineNo">1732</span><a name="line.1732"></a>
+<span class="sourceLineNo">1733</span>    if (!inputStoreFiles.isEmpty() || !outputStoreFiles.isEmpty()) {<a name="line.1733"></a>
+<span class="sourceLineNo">1734</span>      LOG.info("Replaying compaction marker, replacing input files: " +<a name="line.1734"></a>
+<span class="sourceLineNo">1735</span>          inputStoreFiles + " with output files : " + outputStoreFiles);<a name="line.1735"></a>
+<span class="sourceLineNo">1736</span>      this.replaceStoreFiles(inputStoreFiles, outputStoreFiles);<a name="line.1736"></a>
+<span class="sourceLineNo">1737</span>      this.completeCompaction(inputStoreFiles);<a name="line.1737"></a>
+<span class="sourceLineNo">1738</span>    }<a name="line.1738"></a>
+<span class="sourceLineNo">1739</span>  }<a name="line.1739"></a>
+<span class="sourceLineNo">1740</span><a name="line.1740"></a>
+<span class="sourceLineNo">1741</span>  /**<a name="line.1741"></a>
+<span class="sourceLineNo">1742</span>   * This method tries to compact N recent files for testing.<a name="line.1742"></a>
+<span class="sourceLineNo">1743</span>   * Note that because compacting "recent" files only makes sense for some policies,<a name="line.1743"></a>
+<span class="sourceLineNo">1744</span>   * e.g. the default one, it assumes default policy is used. It doesn't use policy,<a name="line.1744"></a>
+<span class="sourceLineNo">1745</span>   * but instead makes a compaction candidate list by itself.<a name="line.1745"></a>
+<span class="sourceLineNo">1746</span>   * @param N Number of files.<a name="line.1746"></a>
+<span class="sourceLineNo">1747</span>   */<a name="line.1747"></a>
+<span class="sourceLineNo">1748</span>  @VisibleForTesting<a name="line.1748"></a>
+<span class="sourceLineNo">1749</span>  public void compactRecentForTestingAssumingDefaultPolicy(int N) throws IOException {<a name="line.1749"></a>
+<span class="sourceLineNo">1750</span>    List&lt;HStoreFile&gt; filesToCompact;<a name="line.1750"></a>
+<span class="sourceLineNo">1751</span>    boolean isMajor;<a name="line.1751"></a>
+<span class="sourceLineNo">1752</span><a name="line.1752"></a>
+<span class="sourceLineNo">1753</span>    this.lock.readLock().lock();<a name="line.1753"></a>
+<span class="sourceLineNo">1754</span>    try {<a name="line.1754"></a>
+<span class="sourceLineNo">1755</span>      synchronized (filesCompacting) {<a name="line.1755"></a>
+<span class="sourceLineNo">1756</span>        filesToCompact = Lists.newArrayList(storeEngine.getStoreFileManager().getStorefiles());<a name="line.1756"></a>
+<span class="sourceLineNo">1757</span>        if (!filesCompacting.isEmpty()) {<a name="line.1757"></a>
+<span class="sourceLineNo">1758</span>          // exclude all files older than the newest file we're currently<a name="line.1758"></a>
+<span class="sourceLineNo">1759</span>          // compacting. this allows us to preserve contiguity (HBASE-2856)<a name="line.1759"></a>
+<span class="sourceLineNo">1760</span>          HStoreFile last = filesCompacting.get(filesCompacting.size() - 1);<a name="line.1760"></a>
+<span class="sourceLineNo">1761</span>          int idx = filesToCompact.indexOf(last);<a name="line.1761"></a>
+<span class="sourceLineNo">1762</span>          Preconditions.checkArgument(idx != -1);<a name="line.1762"></a>
+<span class="sourceLineNo">1763</span>          filesToCompact.subList(0, idx + 1).clear();<a name="line.1763"></a>
+<span class="sourceLineNo">1764</span>        }<a name="line.1764"></a>
+<span class="sourceLineNo">1765</span>        int count = filesToCompact.size();<a name="line.1765"></a>
+<span class="sourceLineNo">1766</span>        if (N &gt; count) {<a name="line.1766"></a>
+<span class="sourceLineNo">1767</span>          throw new RuntimeException("Not enough files");<a name="line.1767"></a>
+<span class="sourceLineNo">1768</span>        }<a name="line.1768"></a>
+<span class="sourceLineNo">1769</span><a name="line.1769"></a>
+<span class="sourceLineNo">1770</span>        filesToCompact = filesToCompact.subList(count - N, count);<a name="line.1770"></a>
+<span class="sourceLineNo">1771</span>        isMajor = (filesToCompact.size() == storeEngine.getStoreFileManager().getStorefileCount());<a name="line.1771"></a>
+<span class="sourceLineNo">1772</span>        filesCompacting.addAll(filesToCompact);<a name="line.1772"></a>
+<span class="sourceLineNo">1773</span>        Collections.sort(filesCompacting, storeEngine.getStoreFileManager()<a name="line.1773"></a>
+<span class="sourceLineNo">1774</span>            .getStoreFileComparator());<a name="line.1774"></a>
+<span class="sourceLineNo">1775</span>      }<a name="line.1775"></a>
+<span class="sourceLineNo">1776</span>    } finally {<a name="line.1776"></a>
+<span class="sourceLineNo">1777</span>      this.lock.readLock().unlock();<a name="line.1777"></a>
+<span class="sourceLineNo">1778</span>    }<a name="line.1778"></a>
+<span class="sourceLineNo">1779</span><a name="line.1779"></a>
+<span class="sourceLineNo">1780</span>    try {<a name="line.1780"></a>
+<span class="sourceLineNo">1781</span>      // Ready to go. Have list of files to compact.<a name="line.1781"></a>
+<span class="sourceLineNo">1782</span>      List&lt;Path&gt; newFiles = ((DefaultCompactor)this.storeEngine.getCompactor())<a name="line.1782"></a>
+<span class="sourceLineNo">1783</span>          .compactForTesting(filesToCompact, isMajor);<a name="line.1783"></a>
+<span class="sourceLineNo">1784</span>      for (Path newFile: newFiles) {<a name="line.1784"></a>
+<span class="sourceLineNo">1785</span>        // Move the compaction into place.<a name="line.1785"></a>
+<span class="sourceLineNo">1786</span>        HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1786"></a>
+<span class="sourceLineNo">1787</span>        if (this.getCoprocessorHost() != null) {<a name="line.1787"></a>
+<span class="sourceLineNo">1788</span>          this.getCoprocessorHost().postCompact(this, sf, null, null, null);<a name="line.1788"></a>
+<span class="sourceLineNo">1789</span>        }<a name="line.1789"></a>
+<span class="sourceLineNo">1790</span>        replaceStoreFiles(filesToCompact, Collections.singletonList(sf));<a name="line.1790"></a>
+<span class="sourceLineNo">1791</span>        completeCompaction(filesToCompact);<a name="line.1791"></a>
+<span class="sourceLineNo">1792</span>      }<a name="line.1792"></a>
+<span class="sourceLineNo">1793</span>    } finally {<a name="line.1793"></a>
+<span class="sourceLineNo">1794</span>      synchronized (filesCompacting) {<a name="line.1794"></a>
+<span class="sourceLineNo">1795</span>        filesCompacting.removeAll(filesToCompact);<a name="line.1795"></a>
+<span class="sourceLineNo">1796</span>      }<a name="line.1796"></a>
+<span class="sourceLineNo">1797</span>    }<a name="line.1797"></a>
+<span class="sourceLineNo">1798</span>  }<a name="line.1798"></a>
+<span class="sourceLineNo">1799</span><a name="line.1799"></a>
+<span class="sourceLineNo">1800</span>  @Override<a name="line.1800"></a>
+<span class="sourceLineNo">1801</span>  public boolean hasReferences() {<a name="line.1801"></a>
+<span class="sourceLineNo">1802</span>    // Grab the read lock here, because we need to ensure that: only when the atomic<a name="line.1802"></a>
+<span class="sourceLineNo">1803</span>    // replaceStoreFiles(..) finished, we can get all the complete store file list.<a name="line.1803"></a>
+<span class="sourceLineNo">1804</span>    this.lock.readLock().lock();<a name="line.1804"></a>
+<span class="sourceLineNo">1805</span>    try {<a name="line.1805"></a>
+<span class="sourceLineNo">1806</span>      // Merge the current store files with compacted files here due to HBASE-20940.<a name="line.1806"></a>
+<span class="sourceLineNo">1807</span>      Collection&lt;HStoreFile&gt; allStoreFiles = new ArrayList&lt;&gt;(getStorefiles());<a name="line.1807"></a>
+<span class="sourceLineNo">1808</span>      allStoreFiles.addAll(getCompactedFiles());<a name="line.1808"></a>
+<span class="sourceLineNo">1809</span>      return StoreUtils.hasReferences(allStoreFiles);<a name="line.1809"></a>
+<span class="sourceLineNo">1810</span>    } finally {<a name="line.1810"></a>
+<span class="sourceLineNo">1811</span>      this.lock.readLock().unlock();<a name="line.1811"></a>
+<span class="sourceLineNo">1812</span>    }<a name="line.1812"></a>
+<span class="sourceLineNo">1813</span>  }<a name="line.1813"></a>
+<span class="sourceLineNo">1814</span><a name="line.1814"></a>
+<span class="sourceLineNo">1815</span>  /**<a name="line.1815"></a>
+<span class="sourceLineNo">1816</span>   * getter for CompactionProgress object<a name="line.1816"></a>
+<span class="sourceLineNo">1817</span>   * @return CompactionProgress object; can be null<a name="line.1817"></a>
+<span class="sourceLineNo">1818</span>   */<a name="line.1818"></a>
+<span class="sourceLineNo">1819</span>  public CompactionProgress getCompactionProgress() {<a name="line.1819"></a>
+<span class="sourceLineNo">1820</span>    return this.storeEngine.getCompactor().getProgress();<a name="line.1820"></a>
+<span class="sourceLineNo">1821</span>  }<a name="line.1821"></a>
+<span class="sourceLineNo">1822</span><a name="line.1822"></a>
+<span class="sourceLineNo">1823</span>  @Override<a name="line.1823"></a>
+<span class="sourceLineNo">1824</span>  public boolean shouldPerformMajorCompaction() throws IOException {<a name="line.1824"></a>
+<span class="sourceLineNo">1825</span>    for (HStoreFile sf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1825"></a>
+<span class="sourceLineNo">1826</span>      // TODO: what are these reader checks all over the place?<a name="line.1826"></a>
+<span class="sourceLineNo">1827</span>      if (sf.getReader() == null) {<a name="line.1827"></a>
+<span class="sourceLineNo">1828</span>        LOG.debug("StoreFile {} has null Reader", sf);<a name="line.1828"></a>
+<span class="sourceLineNo">1829</span>        return false;<a name="line.1829"></a>
+<span class="sourceLineNo">1830</span>      }<a name="line.1830"></a>
+<span class="sourceLineNo">1831</span>    }<a name="line.1831"></a>
+<span class="sourceLineNo">1832</span>    return storeEngine.getCompactionPolicy().shouldPerformMajorCompaction(<a name="line.1832"></a>
+<span class="sourceLineNo">1833</span>        this.storeEngine.getStoreFileManager().getStorefiles());<a name="line.1833"></a>
+<span class="sourceLineNo">1834</span>  }<a name="line.1834"></a>
+<span class="sourceLineNo">1835</span><a name="line.1835"></a>
+<span class="sourceLineNo">1836</span>  public Optional&lt;CompactionContext&gt; requestCompaction() throws IOException {<a name="line.1836"></a>
+<span class="sourceLineNo">1837</span>    return requestCompaction(NO_PRIORITY, CompactionLifeCycleTracker.DUMMY, null);<a name="line.1837"></a>
+<span class="sourceLineNo">1838</span>  }<a name="line.1838"></a>
+<span class="sourceLineNo">1839</span><a name="line.1839"></a>
+<span class="sourceLineNo">1840</span>  public Optional&lt;CompactionContext&gt; requestCompaction(int priority,<a name="line.1840"></a>
+<span class="sourceLineNo">1841</span>      CompactionLifeCycleTracker tracker, User user) throws IOException {<a name="line.1841"></a>
+<span class="sourceLineNo">1842</span>    // don't even select for compaction if writes are disabled<a name="line.1842"></a>
+<span class="sourceLineNo">1843</span>    if (!this.areWritesEnabled()) {<a name="line.1843"></a>
+<span class="sourceLineNo">1844</span>      return Optional.empty();<a name="line.1844"></a>
+<span class="sourceLineNo">1845</span>    }<a name="line.1845"></a>
+<span class="sourceLineNo">1846</span>    // Before we do compaction, try to get rid of unneeded files to simplify things.<a name="line.1846"></a>
+<span class="sourceLineNo">1847</span>    removeUnneededFiles();<a name="line.1847"></a>
 <span class="sourceLineNo">1848</span><a name="line.1848"></a>
-<span class="sourceLineNo">1849</span>        // Normal case - coprocessor is not overriding file selection.<a name="line.1849"></a>
-<span class="sourceLineNo">1850</span>        if (!compaction.hasSelection()) {<a name="line.1850"></a>
-<span class="sourceLineNo">1851</span>          boolean isUserCompaction = priority == Store.PRIORITY_USER;<a name="line.1851"></a>
-<span class="sourceLineNo">1852</span>          boolean mayUseOffPeak = offPeakHours.isOffPeakHour() &amp;&amp;<a name="line.1852"></a>
-<span class="sourceLineNo">1853</span>              offPeakCompactionTracker.compareAndSet(false, true);<a name="line.1853"></a>
-<span class="sourceLineNo">1854</span>          try {<a name="line.1854"></a>
-<span class="sourceLineNo">1855</span>            compaction.select(this.filesCompacting, isUserCompaction,<a name="line.1855"></a>
-<span class="sourceLineNo">1856</span>              mayUseOffPeak, forceMajor &amp;&amp; filesCompacting.isEmpty());<a name="line.1856"></a>
-<span class="sourceLineNo">1857</span>          } catch (IOException e) {<a name="line.1857"></a>
-<span class="sourceLineNo">1858</span>            if (mayUseOffPeak) {<a name="line.1858"></a>
-<span class="sourceLineNo">1859</span>              offPeakCompactionTracker.set(false);<a name="line.1859"></a>
-<span class="sourceLineNo">1860</span>            }<a name="line.1860"></a>
-<span class="sourceLineNo">1861</span>            throw e;<a name="line.1861"></a>
+<span class="sourceLineNo">1849</span>    final CompactionContext compaction = storeEngine.createCompaction();<a name="line.1849"></a>
+<span class="sourceLineNo">1850</span>    CompactionRequestImpl request = null;<a name="line.1850"></a>
+<span class="sourceLineNo">1851</span>    this.lock.readLock().lock();<a name="line.1851"></a>
+<span class="sourceLineNo">1852</span>    try {<a name="line.1852"></a>
+<span class="sourceLineNo">1853</span>      synchronized (filesCompacting) {<a name="line.1853"></a>
+<span class="sourceLineNo">1854</span>        // First, see if coprocessor would want to override selection.<a name="line.1854"></a>
+<span class="sourceLineNo">1855</span>        if (this.getCoprocessorHost() != null) {<a name="line.1855"></a>
+<span class="sourceLineNo">1856</span>          final List&lt;HStoreFile&gt; candidatesForCoproc = compaction.preSelect(this.filesCompacting);<a name="line.1856"></a>
+<span class="sourceLineNo">1857</span>          boolean override = getCoprocessorHost().preCompactSelection(this,<a name="line.1857"></a>
+<span class="sourceLineNo">1858</span>              candidatesForCoproc, tracker, user);<a name="line.1858"></a>
+<span class="sourceLineNo">1859</span>          if (override) {<a name="line.1859"></a>
+<span class="sourceLineNo">1860</span>            // Coprocessor is overriding normal file selection.<a name="line.1860"></a>
+<span class="sourceLineNo">1861</span>            compaction.forceSelect(new CompactionRequestImpl(candidatesForCoproc));<a name="line.1861"></a>
 <span class="sourceLineNo">1862</span>          }<a name="line.1862"></a>
-<span class="sourceLineNo">1863</span>          assert compaction.hasSelection();<a name="line.1863"></a>
-<span class="sourceLineNo">1864</span>          if (mayUseOffPeak &amp;&amp; !compaction.getRequest().isOffPeak()) {<a name="line.1864"></a>
-<span class="sourceLineNo">1865</span>            // Compaction policy doesn't want to take advantage of off-peak.<a name="line.1865"></a>
-<span class="sourceLineNo">1866</span>            offPeakCompactionTracker.set(false);<a name="line.1866"></a>
-<span class="sourceLineNo">1867</span>          }<a name="line.1867"></a>
-<span class="sourceLineNo">1868</span>        }<a name="line.1868"></a>
-<span class="sourceLineNo">1869</span>        if (this.getCoprocessorHost() != null) {<a name="line.1869"></a>
-<span class="sourceLineNo">1870</span>          this.getCoprocessorHost().postCompactSelection(<a name="line.1870"></a>
-<span class="sourceLineNo">1871</span>              this, ImmutableList.copyOf(compaction.getRequest().getFiles()), tracker,<a name="line.1871"></a>
-<span class="sourceLineNo">1872</span>              compaction.getRequest(), user);<a name="line.1872"></a>
-<span class="sourceLineNo">1873</span>        }<a name="line.1873"></a>
-<span class="sourceLineNo">1874</span>        // Finally, we have the resulting files list. Check if we have any files at all.<a name="line.1874"></a>
-<span class="sourceLineNo">1875</span>        request = compaction.getRequest();<a name="line.1875"></a>
-<span class="sourceLineNo">1876</span>        Collection&lt;HStoreFile&gt; selectedFiles = request.getFiles();<a name="line.1876"></a>
-<span class="sourceLineNo">1877</span>        if (selectedFiles.isEmpty()) {<a name="line.1877"></a>
-<span class="sourceLineNo">1878</span>          return Optional.empty();<a name="line.1878"></a>
-<span class="sourceLineNo">1879</span>        }<a name="line.1879"></a>
-<span class="sourceLineNo">1880</span><a name="line.1880"></a>
-<span class="sourceLineNo">1881</span>        addToCompactingFiles(selectedFiles);<a name="line.1881"></a>
-<span class="sourceLineNo">1882</span><a name="line.1882"></a>
-<span class="sourceLineNo">1883</span>        // If we're enqueuing a major, clear the force flag.<a name="line.1883"></a>
-<span class="sourceLineNo">1884</span>        this.forceMajor = this.forceMajor &amp;&amp; !request.isMajor();<a name="line.1884"></a>
-<span class="sourceLineNo">1885</span><a name="line.1885"></a>
-<span class="sourceLineNo">1886</span>        // Set common request properties.<a name="line.1886"></a>
-<span class="sourceLineNo">1887</span>        // Set priority, either override value supplied by caller or from store.<a name="line.1887"></a>
-<span class="sourceLineNo">1888</span>        request.setPriority((priority != Store.NO_PRIORITY) ? priority : getCompactPriority());<a name="line.1888"></a>
-<span class="sourceLineNo">1889</span>        request.setDescription(getRegionInfo().getRegionNameAsString(), getColumnFamilyName());<a name="line.1889"></a>
-<span class="sourceLineNo">1890</span>        request.setTracker(tracker);<a name="line.1890"></a>
-<span class="sourceLineNo">1891</span>      }<a name="line.1891"></a>
-<span class="sourceLineNo">1892</span>    } finally {<a name="line.1892"></a>
-<span class="sourceLineNo">1893</span>      this.lock.readLock().unlock();<a name="line.1893"></a>
-<span class="sourceLineNo">1894</span>    }<a name="line.1894"></a>
-<span class="sourceLineNo">1895</span><a name="line.1895"></a>
-<span class="sourceLineNo">1896</span>    if (LOG.isDebugEnabled()) {<a name="line.1896"></a>
-<span class="sourceLineNo">1897</span>      LOG.debug(getRegionInfo().getEncodedName() + " - " + getColumnFamilyName()<a name="line.1897"></a>
-<span class="sourceLineNo">1898</span>          + ": Initiating " + (request.isMajor() ? "major" : "minor") + " compaction"<a name="line.1898"></a>
-<span class="sourceLineNo">1899</span>          + (request.isAllFiles() ? " (all files)" : ""));<a name="line.1899"></a>
-<span class="sourceLineNo">1900</span>    }<a name="line.1900"></a>
-<span class="sourceLineNo">1901</span>    this.region.reportCompactionRequestStart(request.isMajor());<a name="line.1901"></a>
-<span class="sourceLineNo">1902</span>    return Optional.of(compaction);<a name="line.1902"></a>
-<span class="sourceLineNo">1903</span>  }<a name="line.1903"></a>
-<span class="sourceLineNo">1904</span><a name="line.1904"></a>
-<span class="sourceLineNo">1905</span>  /** Adds the files to compacting files. filesCompacting must be locked. */<a name="line.1905"></a>
-<span class="sourceLineNo">1906</span>  private void addToCompactingFiles(Collection&lt;HStoreFile&gt; filesToAdd) {<a name="line.1906"></a>
-<span class="sourceLineNo">1907</span>    if (CollectionUtils.isEmpty(filesToAdd)) {<a name="line.1907"></a>
-<span class="sourceLineNo">1908</span>      return;<a name="line.1908"></a>
-<span class="sourceLineNo">1909</span>    }<a name="line.1909"></a>
-<span class="sourceLineNo">1910</span>    // Check that we do not try to compact the same StoreFile twice.<a name="line.1910"></a>
-<span class="sourceLineNo">1911</span>    if (!Collections.disjoint(filesCompacting, filesToAdd)) {<a name="line.1911"></a>
-<span class="sourceLineNo">1912</span>      Preconditions.checkArgument(false, "%s overlaps with %s", filesToAdd, filesCompacting);<a name="line.1912"></a>
-<span class="sourceLineNo">1913</span>    }<a name="line.1913"></a>
-<span class="sourceLineNo">1914</span>    filesCompacting.addAll(filesToAdd);<a name="line.1914"></a>
-<span class="sourceLineNo">1915</span>    Collections.sort(filesCompacting, storeEngine.getStoreFileManager().getStoreFileComparator());<a name="line.1915"></a>
-<span class="sourceLineNo">1916</span>  }<a name="line.1916"></a>
-<span class="sourceLineNo">1917</span><a name="line.1917"></a>
-<span class="sourceLineNo">1918</span>  private void removeUnneededFiles() throws IOException {<a name="line.1918"></a>
-<span class="sourceLineNo">1919</span>    if (!conf.getBoolean("hbase.store.delete.expired.storefile", true)) {<a name="line.1919"></a>
-<span class="sourceLineNo">1920</span>      return;<a name="line.1920"></a>
-<span class="sourceLineNo">1921</span>    }<a name="line.1921"></a>
-<span class="sourceLineNo">1922</span>    if (getColumnFamilyDescriptor().getMinVersions() &gt; 0) {<a name="line.1922"></a>
-<span class="sourceLineNo">1923</span>      LOG.debug("Skipping expired store file removal due to min version being {}",<a name="line.1923"></a>
-<span class="sourceLineNo">1924</span>          getColumnFamilyDescriptor().getMinVersions());<a name="line.1924"></a>
-<span class="sourceLineNo">1925</span>      return;<a name="line.1925"></a>
-<span class="sourceLineNo">1926</span>    }<a name="line.1926"></a>
-<span class="sourceLineNo">1927</span>    this.lock.readLock().lock();<a name="line.1927"></a>
-<span class="sourceLineNo">1928</span>    Collection&lt;HStoreFile&gt; delSfs = null;<a name="line.1928"></a>
-<span class="sourceLineNo">1929</span>    try {<a name="line.1929"></a>
-<span class="sourceLineNo">1930</span>      synchronized (filesCompacting) {<a name="line.1930"></a>
-<span class="sourceLineNo">1931</span>        long cfTtl = getStoreFileTtl();<a name="line.1931"></a>
-<span class="sourceLineNo">1932</span>        if (cfTtl != Long.MAX_VALUE) {<a name="line.1932"></a>
-<span class="sourceLineNo">1933</span>          delSfs = storeEngine.getStoreFileManager().getUnneededFiles(<a name="line.1933"></a>
-<span class="sourceLineNo">1934</span>              EnvironmentEdgeManager.currentTime() - cfTtl, filesCompacting);<a name="line.1934"></a>
-<span class="sourceLineNo">1935</span>          addToCompactingFiles(delSfs);<a name="line.1935"></a>
-<span class="sourceLineNo">1936</span>        }<a name="line.1936"></a>
-<span class="sourceLineNo">1937</span>      }<a name="line.1937"></a>
-<span class="sourceLineNo">1938</span>    } finally {<a name="line.1938"></a>
-<span class="sourceLineNo">1939</span>      this.lock.readLock().unlock();<a name="line.1939"></a>
-<span class="sourceLineNo">1940</span>    }<a name="line.1940"></a>
-<span class="sourceLineNo">1941</span><a name="line.1941"></a>
-<span class="sourceLineNo">1942</span>    if (CollectionUtils.isEmpty(delSfs)) {<a name="line.1942"></a>
-<span class="sourceLineNo">1943</span>      return;<a name="line.1943"></a>
-<span class="sourceLineNo">1944</span>    }<a name="line.1944"></a>
-<span class="sourceLineNo">1945</span><a name="line.1945"></a>
-<span class="sourceLineNo">1946</span>    Collection&lt;HStoreFile&gt; newFiles = Collections.emptyList(); // No new files.<a name="line.1946"></a>
-<span class="sourceLineNo">1947</span>    writeCompactionWalRecord(delSfs, newFiles);<a name="line.1947"></a>
-<span class="sourceLineNo">1948</span>    replaceStoreFiles(delSfs, newFiles);<a name="line.1948"></a>
-<span class="sourceLineNo">1949</span>    completeCompaction(delSfs);<a name="line.1949"></a>
-<span class="sourceLineNo">1950</span>    LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in "<a name="line.1950"></a>
-<span class="sourceLineNo">1951</span>        + this + " of " + this.getRegionInfo().getRegionNameAsString()<a name="line.1951"></a>
-<span class="sourceLineNo">1952</span>        + "; total size for store is "<a name="line.1952"></a>
-<span class="sourceLineNo">1953</span>        + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1));<a name="line.1953"></a>
-<span class="sourceLineNo">1954</span>  }<a name="line.1954"></a>
-<span class="sourceLineNo">1955</span><a name="line.1955"></a>
-<span class="sourceLineNo">1956</span>  public void cancelRequestedCompaction(CompactionContext compaction) {<a name="line.1956"></a>
-<span class="sourceLineNo">1957</span>    finishCompactionRequest(compaction.getRequest());<a name="line.1957"></a>
-<span class="sourceLineNo">1958</span>  }<a name="line.1958"></a>
-<span class="sourceLineNo">1959</span><a name="line.1959"></a>
-<span class="sourceLineNo">1960</span>  private void finishCompactionRequest(CompactionRequestImpl cr) {<a name="line.1960"></a>
-<span class="sourceLineNo">1961</span>    this.region.reportCompactionRequestEnd(cr.isMajor(), cr.getFiles().size(), cr.getSize());<a name="line.1961"></a>
-<span class="sourceLineNo">1962</span>    if (cr.isOffPeak()) {<a name="line.1962"></a>
-<span class="sourceLineNo">1963</span>      offPeakCompactionTracker.set(false);<a name="line.1963"></a>
-<span class="sourceLineNo">1964</span>      cr.setOffPeak(false);<a name="line.1964"></a>
-<span class="sourceLineNo">1965</span>    }<a name="line.1965"></a>
-<span class="sourceLineNo">1966</span>    synchronized (filesCompacting) {<a name="line.1966"></a>
-<span class="sourceLineNo">1967</span>      filesCompacting.removeAll(cr.getFiles());<a name="line.1967"></a>
-<span class="sourceLineNo">1968</span>    }<a name="line.1968"></a>
-<span class="sourceLineNo">1969</span>  }<a name="line.1969"></a>
-<span class="sourceLineNo">1970</span><a name="line.1970"></a>
-<span class="sourceLineNo">1971</span>  /**<a name="line.1971"></a>
-<span class="sourceLineNo">1972</span>   * Validates a store file by opening and closing it. In HFileV2 this should not be an expensive<a name="line.1972"></a>
-<span class="sourceLineNo">1973</span>   * operation.<a name="line.1973"></a>
-<span class="sourceLineNo">1974</span>   * @param path the path to the store file<a name="line.1974"></a>
-<span class="sourceLineNo">1975</span>   */<a name="line.1975"></a>
-<span class="sourceLineNo">1976</span>  private void validateStoreFile(Path path) throws IOException {<a name="line.1976"></a>
-<span class="sourceLineNo">1977</span>    HStoreFile storeFile = null;<a name="line.1977"></a>
-<span class="sourceLineNo">1978</span>    try {<a name="line.1978"></a>
-<span class="sourceLineNo">1979</span>      storeFile = createStoreFileAndReader(path);<a name="line.1979"></a>
-<span class="sourceLineNo">1980</span>    } catch (IOException e) {<a name="line.1980"></a>
-<span class="sourceLineNo">1981</span>      LOG.error("Failed to open store file : {}, keeping it in tmp location", path, e);<a name="line.1981"></a>
-<span class="sourceLineNo">1982</span>      throw e;<a name="line.1982"></a>
-<span class="sourceLineNo">1983</span>    } finally {<a name="line.1983"></a>
-<span class="sourceLineNo">1984</span>      if (storeFile != null) {<a name="line.1984"></a>
-<span class="sourceLineNo">1985</span>        storeFile.closeStoreFile(false);<a name="line.1985"></a>
-<span class="sourceLineNo">1986</span>      }<a name="line.1986"></a>
-<span class="sourceLineNo">1987</span>    }<a name="line.1987"></a>
-<span class="sourceLineNo">1988</span>  }<a name="line.1988"></a>
-<span class="sourceLineNo">1989</span><a name="line.1989"></a>
-<span class="sourceLineNo">1990</span>  /**<a name="line.1990"></a>
-<span class="sourceLineNo">1991</span>   * Update counts.<a name="line.1991"></a>
-<span class="sourceLineNo">1992</span>   * @param compactedFiles list of files that were compacted<a name="line.1992"></a>
-<span class="sourceLineNo">1993</span>   */<a name="line.1993"></a>
-<span class="sourceLineNo">1994</span>  @VisibleForTesting<a name="line.1994"></a>
-<span class="sourceLineNo">1995</span>  protected void completeCompaction(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.1995"></a>
-<span class="sourceLineNo">1996</span>  // Rename this method! TODO.<a name="line.1996"></a>
-<span class="sourceLineNo">1997</span>    throws IOException {<a name="line.1997"></a>
-<span class="sourceLineNo">1998</span>    this.storeSize.set(0L);<a name="line.1998"></a>
-<span class="sourceLineNo">1999</span>    this.totalUncompressedBytes.set(0L);<a name="line.1999"></a>
-<span class="sourceLineNo">2000</span>    for (HStoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.2000"></a>
-<span class="sourceLineNo">2001</span>      StoreFileReader r = hsf.getReader();<a name="line.2001"></a>
-<span class="sourceLineNo">2002</span>      if (r == null) {<a name="line.2002"></a>
-<span class="sourceLineNo">2003</span>        LOG.warn("StoreFile {} has a null Reader", hsf);<a name="line.2003"></a>
-<span class="sourceLineNo">2004</span>        continue;<a name="line.2004"></a>
-<span class="sourceLineNo">2005</span>      }<a name="line.2005"></a>
-<span class="sourceLineNo">2006</span>      this.storeSize.addAndGet(r.length());<a name="line.2006"></a>
-<span class="sourceLineNo">2007</span>      this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.2007"></a>
-<span class="sourceLineNo">2008</span>    }<a name="line.2008"></a>
-<span class="sourceLineNo">2009</span>  }<a name="line.2009"></a>
-<span class="sourceLineNo">2010</span><a name="line.2010"></a>
-<span class="sourceLineNo">2011</span>  /*<a name="line.2011"></a>
-<span class="sourceLineNo">2012</span>   * @param wantedVersions How many versions were asked for.<a name="line.2012"></a>
-<span class="sourceLineNo">2013</span>   * @return wantedVersions or this families' {@link HConstants#VERSIONS}.<a name="line.2013"></a>
-<span class="sourceLineNo">2014</span>   */<a name="line.2014"></a>
-<span class="sourceLineNo">2015</span>  int versionsToReturn(final int wantedVersions) {<a name="line.2015"></a>
-<span class="sourceLineNo">2016</span>    if (wantedVersions &lt;= 0) {<a name="line.2016"></a>
-<span class="sourceLineNo">2017</span>      throw new IllegalArgumentException("Number of versions must be &gt; 0");<a name="line.2017"></a>
-<span class="sourceLineNo">2018</span>    }<a name="line.2018"></a>
-<span class="sourceLineNo">2019</span>    // Make sure we do not return more than maximum versions for this store.<a name="line.2019"></a>
-<span class="sourceLineNo">2020</span>    int maxVersions = this.family.getMaxVersions();<a name="line.2020"></a>
-<span class="sourceLineNo">2021</span>    return wantedVersions &gt; maxVersions ? maxVersions: wantedVersions;<a name="line.2021"></a>
-<span class="sourceLineNo">2022</span>  }<a name="line.2022"></a>
-<span class="sourceLineNo">2023</span><a name="line.2023"></a>
-<span class="sourceLineNo">2024</span>  @Override<a name="line.2024"></a>
-<span class="sourceLineNo">2025</span>  public boolean canSplit() {<a name="line.2025"></a>
-<span class="sourceLineNo">2026</span>    this.lock.readLock().lock();<a name="line.2026"></a>
-<span class="sourceLineNo">2027</span>    try {<a name="line.2027"></a>
-<span class="sourceLineNo">2028</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2028"></a>
-<span class="sourceLineNo">2029</span>      boolean result = !hasReferences();<a name="line.2029"></a>
-<span class="sourceLineNo">2030</span>      if (!result) {<a name="line.2030"></a>
-<span class="sourceLineNo">2031</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2031"></a>
-<span class="sourceLineNo">2032</span>      }<a name="line.2032"></a>
-<span class="sourceLineNo">2033</span>      return result;<a name="line.2033"></a>
-<span class="sourceLineNo">2034</span>    } finally {<a name="line.2034"></a>
-<span class="sourceLineNo">2035</span>      this.lock.readLock().unlock();<a name="line.2035"></a>
-<span class="sourceLineNo">2036</span>    }<a name="line.2036"></a>
-<span class="sourceLineNo">2037</span>  }<a name="line.2037"></a>
-<span class="sourceLineNo">2038</span><a name="line.2038"></a>
-<span class="sourceLineNo">2039</span>  /**<a name="line.2039"></a>
-<span class="sourceLineNo">2040</span>   * Determines if Store should be split.<a name="line.2040"></a>
-<span class="sourceLineNo">2041</span>   */<a name="line.2041"></a>
-<span class="sourceLineNo">2042</span>  public Optional&lt;byte[]&gt; getSplitPoint() {<a name="line.2042"></a>
-<span class="sourceLineNo">2043</span>    this.lock.readLock().lock();<a name="line.2043"></a>
-<span class="sourceLineNo">2044</span>    try {<a name="line.2044"></a>
-<span class="sourceLineNo">2045</span>      // Should already be enforced by the split policy!<a name="line.2045"></a>
-<span class="sourceLineNo">2046</span>      assert !this.getRegionInfo().isMetaRegion();<a name="line.2046"></a>
-<span class="sourceLineNo">2047</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2047"></a>
-<span class="sourceLineNo">2048</span>      if (hasReferences()) {<a name="line.2048"></a>
-<span class="sourceLineNo">2049</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2049"></a>
-<span class="sourceLineNo">2050</span>        return Optional.empty();<a name="line.2050"></a>
-<span class="sourceLineNo">2051</span>      }<a name="line.2051"></a>
-<span class="sourceLineNo">2052</span>      return this.storeEngine.getStoreFileManager().getSplitPoint();<a name="line.2052"></a>
-<span class="sourceLineNo">2053</span>    } catch(IOException e) {<a name="line.2053"></a>
-<span class="sourceLineNo">2054</span>      LOG.warn("Failed getting store size for {}", this, e);<a name="line.2054"></a>
-<span class="sourceLineNo">2055</span>    } finally {<a name="line.2055"></a>
-<span class="sourceLineNo">2056</span>      this.lock.readLock().unlock();<a name="line.2056"></a>
-<span class="sourceLineNo">2057</span>    }<a name="line.2057"></a>
-<span class="sourceLineNo">2058</span>    return Optional.empty();<a name="line.2058"></a>
-<span class="sourceLineNo">2059</span>  }<a name="line.2059"></a>
-<span class="sourceLineNo">2060</span><a name="line.2060"></a>
-<span class="sourceLineNo">2061</span>  @Override<a name="line.2061"></a>
-<span class="sourceLineNo">2062</span>  public long getLastCompactSize() {<a name="line.2062"></a>
-<span class="sourceLineNo">2063</span>    return this.lastCompactSize;<a name="line.2063"></a>
-<span class="sourceLineNo">2064</span>  }<a name="line.2064"></a>
-<span class="sourceLineNo">2065</span><a name="line.2065"></a>
-<span class="sourceLineNo">2066</span>  @Override<a name="line.2066"></a>
-<span class="sourceLineNo">2067</span>  public long getSize() {<a name="line.2067"></a>
-<span class="sourceLineNo">2068</span>    return storeSize.get();<a name="line.2068"></a>
-<span class="sourceLineNo">2069</span>  }<a name="line.2069"></a>
-<span class="sourceLineNo">2070</span><a name="line.2070"></a>
-<span class="sourceLineNo">2071</span>  public void triggerMajorCompaction() {<a name="line.2071"></a>
-<span class="sourceLineNo">2072</span>    this.forceMajor = true;<a name="line.2072"></a>
-<span class="sourceLineNo">2073</span>  }<a name="line.2073"></a>
-<span class="sourceLineNo">2074</span><a name="line.2074"></a>
-<span class="sourceLineNo">2075</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2075"></a>
-<span class="sourceLineNo">2076</span>  // File administration<a name="line.2076"></a>
-<span class="sourceLineNo">2077</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2077"></a>
-<span class="sourceLineNo">2078</span><a name="line.2078"></a>
-<span class="sourceLineNo">2079</span>  /**<a name="line.2079"></a>
-<span class="sourceLineNo">2080</span>   * Return a scanner for both the memstore and the HStore files. Assumes we are not in a<a name="line.2080"></a>
-<span class="sourceLineNo">2081</span>   * compaction.<a name="line.2081"></a>
-<span class="sourceLineNo">2082</span>   * @param scan Scan to apply when scanning the stores<a name="line.2082"></a>
-<span class="sourceLineNo">2083</span>   * @param targetCols columns to scan<a name="line.2083"></a>
-<span class="sourceLineNo">2084</span>   * @return a scanner over the current key values<a name="line.2084"></a>
-<span class="sourceLineNo">2085</span>   * @throws IOException on failure<a name="line.2085"></a>
-<span class="sourceLineNo">2086</span>   */<a name="line.2086"></a>
-<span class="sourceLineNo">2087</span>  public KeyValueScanner getScanner(Scan scan, final NavigableSet&lt;byte[]&gt; targetCols, long readPt)<a name="line.2087"></a>
-<span class="sourceLineNo">2088</span>      throws IOException {<a name="line.2088"></a>
-<span class="sourceLineNo">2089</span>    lock.readLock().lock();<a name="line.2089"></a>
-<span class="sourceLineNo">2090</span>    try {<a name="line.2090"></a>
-<span class="sourceLineNo">2091</span>      ScanInfo scanInfo;<a name="line.2091"></a>
-<span class="sourceLineNo">2092</span>      if (this.getCoprocessorHost() != null) {<a name="line.2092"></a>
-<span class="sourceLineNo">2093</span>        scanInfo = this.getCoprocessorHost().preStoreScannerOpen(this);<a name="line.2093"></a>
-<span class="sourceLineNo">2094</span>      } else {<a name="line.2094"></a>
-<span class="sourceLineNo">2095</span>        scanInfo = getScanInfo();<a name="line.2095"></a>
-<span class="sourceLineNo">2096</span>      }<a name="line.2096"></a>
-<span class="sourceLineNo">2097</span>      return createScanner(scan, scanInfo, targetCols, readPt);<a name="line.2097"></a>
-<span class="sourceLineNo">2098</span>    } finally {<a name="line.2098"></a>
-<span class="sourceLineNo">2099</span>      lock.readLock().unlock();<a name="line.2099"></a>
-<span class="sourceLineNo">2100</span>    }<a name="line.2100"></a>
-<span class="sourceLineNo">2101</span>  }<a name="line.2101"></a>
-<span class="sourceLineNo">2102</span><a name="line.2102"></a>
-<span class="sourceLineNo">2103</span>  // HMobStore will override this method to return its own implementation.<a name="line.2103"></a>
-<span class="sourceLineNo">2104</span>  protected KeyValueScanner createScanner(Scan scan, ScanInfo scanInfo,<a name="line.2104"></a>
-<span class="sourceLineNo">2105</span>      NavigableSet&lt;byte[]&gt; targetCols, long readPt) throws IOException {<a name="line.2105"></a>
-<span class="sourceLineNo">2106</span>    return scan.isReversed() ? new ReversedStoreScanner(this, scanInfo, scan, targetCols, readPt)<a name="line.2106"></a>
-<span class="sourceLineNo">2107</span>        : new StoreScanner(this, scanInfo, scan, targetCols, readPt);<a name="line.2107"></a>
-<span class="sourceLineNo">2108</span>  }<a name="line.2108"></a>
-<span class="sourceLineNo">2109</span><a name="line.2109"></a>
-<span class="sourceLineNo">2110</span>  /**<a name="line.2110"></a>
-<span class="sourceLineNo">2111</span>   * Recreates the scanners on the current list of active store file scanners<a name="line.2111"></a>
-<span class="sourceLineNo">2112</span>   * @param currentFileScanners the current set of active store file scanners<a name="line.2112"></a>
-<span class="sourceLineNo">2113</span>   * @param cacheBlocks cache the blocks or not<a name="line.2113"></a>
-<span class="sourceLineNo">2114</span>   * @param usePread use pread or not<a name="line.2114"></a>
-<span class="sourceLineNo">2115</span>   * @param isCompaction is the scanner for compaction<a name="line.2115"></a>
-<span class="sourceLineNo">2116</span>   * @param matcher the scan query matcher<a name="line.2116"></a>
-<span class="sourceLineNo">2117</span>   * @param startRow the scan's start row<a name="line.2117"></a>
-<span class="sourceLineNo">2118</span>   * @param includeStartRow should the scan include the start row<a name="line.2118"></a>
-<span class="sourceLineNo">2119</span>   * @param stopRow the scan's stop row<a name="line.2119"></a>
-<span class="sourceLineNo">2120</span>   * @param includeStopRow should the scan include the stop row<a name="line.2120"></a>
-<span class="sourceLineNo">2121</span>   * @param readPt the read point of the current scane<a name="line.2121"></a>
-<span class="sourceLineNo">2122</span>   * @param includeMemstoreScanner whether the current scanner should include memstorescanner<a name="line.2122"></a>
-<span class="sourceLineNo">2123</span>   * @return list of scanners recreated on the current Scanners<a name="line.2123"></a>
-<span class="sourceLineNo">2124</span>   */<a name="line.2124"></a>
-<span class="sourceLineNo">2125</span>  public List&lt;KeyValueScanner&gt; recreateScanners(List&lt;KeyValueScanner&gt; currentFileScanners,<a name="line.2125"></a>
-<span class="sourceLineNo">2126</span>      boolean cacheBlocks, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.2126"></a>
-<span class="sourceLineNo">2127</span>      byte[] startRow, boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.2127"></a>
-<span class="sourceLineNo">2128</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.2128"></a>
-<span class="sourceLineNo">2129</span>    this.lock.readLock().lock();<a name="line.2129"></a>
-<span class="sourceLineNo">2130</span>    try {<a name="line.2130"></a>
-<span class="sourceLineNo">2131</span>      Map&lt;String, HStoreFile&gt; name2File =<a name="line.2131"></a>
-<span class="sourceLineNo">2132</span>          new HashMap&lt;&gt;(getStorefilesCount() + getCompactedFilesCount());<a name="line.2132"></a>
-<span class="sourceLineNo">2133</span>      for (HStoreFile file : getStorefiles()) {<a name="line.2133"></a>
-<span class="sourceLineNo">2134</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2134"></a>
-<span class="sourceLineNo">2135</span>      }<a name="line.2135"></a>
-<span class="sourceLineNo">2136</span>      Collection&lt;HStoreFile&gt; compactedFiles = getCompactedFiles();<a name="line.2136"></a>
-<span class="sourceLineNo">2137</span>      for (HStoreFile file : IterableUtils.emptyIfNull(compactedFiles)) {<a name="line.2137"></a>
-<span class="sourceLineNo">2138</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2138"></a>
-<span class="sourceLineNo">2139</span>      }<a name="line.2139"></a>
-<span class="sourceLineNo">2140</span>      List&lt;HStoreFile&gt; filesToReopen = new ArrayList&lt;&gt;();<a name="line.2140"></a>
-<span class="sourceLineNo">2141</span>      for (KeyValueScanner kvs : currentFileScanners) {<a name="line.2141"></a>
-<span class="sourceLineNo">2142</span>        assert kvs.isFileScanner();<a name="line.2142"></a>
-<span class="sourceLineNo">2143</span>        if (kvs.peek() == null) {<a name="line.2143"></a>
-<span class="sourceLineNo">2144</span>          continue;<a name="line.2144"></a>
-<span class="sourceLineNo">2145</span>        }<a name="line.2145"></a>
-<span class="sourceLineNo">2146</span>        filesToReopen.add(name2File.get(kvs.getFilePath().getName()));<a name="line.2146"></a>
-<span class="sourceLineNo">2147</span>      }<a name="line.2147"></a>
-<span class="sourceLineNo">2148</span>      if (filesToReopen.isEmpty()) {<a name="line.2148"></a>
-<span class="sourceLineNo">2149</span>        return null;<a name="line.2149"></a>
-<span class="sourceLineNo">2150</span>      }<a name="line.2150"></a>
-<span class="sourceLineNo">2151</span>      return getScanners(filesToReopen, cacheBlocks, false, false, matcher, startRow,<a name="line.2151"></a>
-<span class="sourceLineNo">2152</span>        includeStartRow, stopRow, includeStopRow, readPt, false);<a name="line.2152"></a>
-<span class="sourceLineNo">2153</span>    } finally {<a name="line.2153"></a>
-<span class="sourceLineNo">2154</span>      this.lock.readLock().unlock();<a name="line.2154"></a>
-<span class="sourceLineNo">2155</span>    }<a name="line.2155"></a>
-<span class="sourceLineNo">2156</span>  }<a name="line.2156"></a>
-<span class="sourceLineNo">2157</span><a name="line.2157"></a>
-<span class="sourceLineNo">2158</span>  @Override<a name="line.2158"></a>
-<span class="sourceLineNo">2159</span>  public String toString() {<a name="line.2159"></a>
-<span class="sourceLineNo">2160</span>    return this.getColumnFamilyName();<a name="line.2160"></a>
-<span class="sourceLineNo">2161</span>  }<a name="line.2161"></a>
-<span class="sourceLineNo">2162</span><a name="line.2162"></a>
-<span class="sourceLineNo">2163</span>  @Override<a name="line.2163"></a>
-<span class="sourceLineNo">2164</span>  public int getStorefilesCount() {<a name="line.2164"></a>
-<span class="sourceLineNo">2165</span>    return this.storeEngine.getStoreFileManager().getStorefileCount();<a name="line.2165"></a>
-<span class="sourceLineNo">2166</span>  }<a name="line.2166"></a>
-<span class="sourceLineNo">2167</span><a name="line.2167"></a>
-<span class="sourceLineNo">2168</span>  @Override<a name="line.2168"></a>
-<span class="sourceLineNo">2169</span>  public int getCompactedFilesCount() {<a name="line.2169"></a>
-<span class="sourceLineNo">2170</span>    return this.storeEngine.getStoreFileManager().getCompactedFilesCount();<a name="line.2170"></a>
-<span class="sourceLineNo">2171</span>  }<a name="line.2171"></a>
-<span class="sourceLineNo">2172</span><a name="line.2172"></a>
-<span class="sourceLineNo">2173</span>  private LongStream getStoreFileAgeStream() {<a name="line.2173"></a>
-<span class="sourceLineNo">2174</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream().filter(sf -&gt; {<a name="line.2174"></a>
-<span class="sourceLineNo">2175</span>      if (sf.getReader() == null) {<a name="line.2175"></a>
-<span class="sourceLineNo">2176</span>        LOG.warn("StoreFile {} has a null Reader", sf);<a name="line.2176"></a>
-<span class="sourceLineNo">2177</span>        return false;<a name="line.2177"></a>
-<span class="sourceLineNo">2178</span>      } else {<a name="line.2178"></a>
-<span class="sourceLineNo">2179</span>        return true;<a name="line.2179"></a>
-<span class="sourceLineNo">2180</span>      }<a name="line.2180"></a>
-<span class="sourceLineNo">2181</span>    }).filter(HStoreFile::isHFile).mapToLong(sf -&gt; sf.getFileInfo().getCreatedTimestamp())<a name="line.2181"></a>
-<span class="sourceLineNo">2182</span>        .map(t -&gt; EnvironmentEdgeManager.currentTime() - t);<a name="line.2182"></a>
-<span class="sourceLineNo">2183</span>  }<a name="line.2183"></a>
-<span class="sourceLineNo">2184</span><a name="line.2184"></a>
-<span class="sourceLineNo">2185</span>  @Override<a name="line.2185"></a>
-<span class="sourceLineNo">2186</span>  public OptionalLong getMaxStoreFileAge() {<a name="line.2186"></a>
-<span class="sourceLineNo">2187</span>    return getStoreFileAgeStream().max();<a name="line.2187"></a>
-<span class="sourceLineNo">2188</span>  }<a name="line.2188"></a>
-<span class="sourceLineNo">2189</span><a name="line.2189"></a>
-<span class="sourceLineNo">2190</span>  @Override<a name="line.2190"></a>
-<span class="sourceLineNo">2191</span>  public OptionalLong getMinStoreFileAge() {<a name="line.2191"></a>
-<span class="sourceLineNo">2192</span>    return getStoreFileAgeStream().min();<a name="line.2192"></a>
-<span class="sourceLineNo">2193</span>  }<a name="line.2193"></a>
-<span class="sourceLineNo">2194</span><a name="line.2194"></a>
-<span class="sourceLineNo">2195</span>  @Override<a name="line.2195"></a>
-<span class="sourceLineNo">2196</span>  public OptionalDouble getAvgStoreFileAge() {<a name="line.2196"></a>
-<span class="sourceLineNo">2197</span>    return getStoreFileAgeStream().average();<a name="line.2197"></a>
-<span class="sourceLineNo">2198</span>  }<a name="line.2198"></a>
-<span class="sourceLineNo">2199</span><a name="line.2199"></a>
-<span class="sourceLineNo">2200</span>  @Override<a name="line.2200"></a>
-<span class="sourceLineNo">2201</span>  public long getNumReferenceFiles() {<a name="line.2201"></a>
-<span class="sourceLineNo">2202</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2202"></a>
-<span class="sourceLineNo">2203</span>        .filter(HStoreFile::isReference).count();<a name="line.2203"></a>
+<span class="sourceLineNo">1863</span>        }<a name="line.1863"></a>
+<span class="sourceLineNo">1864</span><a name="line.1864"></a>
+<span class="sourceLineNo">1865</span>        // Normal case - coprocessor is not overriding file selection.<a name="line.1865"></a>
+<span class="sourceLineNo">1866</span>        if (!compaction.hasSelection()) {<a name="line.1866"></a>
+<span class="sourceLineNo">1867</span>          boolean isUserCompaction = priority == Store.PRIORITY_USER;<a name="line.1867"></a>
+<span class="sourceLineNo">1868</span>          boolean mayUseOffPeak = offPeakHours.isOffPeakHour() &amp;&amp;<a name="line.1868"></a>
+<span class="sourceLineNo">1869</span>              offPeakCompactionTracker.compareAndSet(false, true);<a name="line.1869"></a>
+<span class="sourceLineNo">1870</span>          try {<a name="line.1870"></a>
+<span class="sourceLineNo">1871</span>            compaction.select(this.filesCompacting, isUserCompaction,<a name="line.1871"></a>
+<span class="sourceLineNo">1872</span>              mayUseOffPeak, forceMajor &amp;&amp; filesCompacting.isEmpty());<a name="line.1872"></a>
+<span class="sourceLineNo">1873</span>          } catch (IOException e) {<a name="line.1873"></a>
+<span class="sourceLineNo">1874</span>            if (mayUseOffPeak) {<a name="line.1874"></a>
+<span class="sourceLineNo">1875</span>              offPeakCompactionTracker.set(false);<a name="line.1875"></a>
+<span class="sourceLineNo">1876</span>            }<a name="line.1876"></a>
+<span class="sourceLineNo">1877</span>            throw e;<a name="line.1877"></a>
+<span class="sourceLineNo">1878</span>          }<a name="line.1878"></a>
+<span class="sourceLineNo">1879</span>          assert compaction.hasSelection();<a name="line.1879"></a>
+<span class="sourceLineNo">1880</span>          if (mayUseOffPeak &amp;&amp; !compaction.getRequest().isOffPeak()) {<a name="line.1880"></a>
+<span class="sourceLineNo">1881</span>            // Compaction policy doesn't want to take advantage of off-peak.<a name="line.1881"></a>
+<span class="sourceLineNo">1882</span>            offPeakCompactionTracker.set(false);<a name="line.1882"></a>
+<span class="sourceLineNo">1883</span>          }<a name="line.1883"></a>
+<span class="sourceLineNo">1884</span>        }<a name="line.1884"></a>
+<span class="sourceLineNo">1885</span>        if (this.getCoprocessorHost() != null) {<a name="line.1885"></a>
+<span class="sourceLineNo">1886</span>          this.getCoprocessorHost().postCompactSelection(<a name="line.1886"></a>
+<span class="sourceLineNo">1887</span>              this, ImmutableList.copyOf(compaction.getRequest().getFiles()), tracker,<a name="line.1887"></a>
+<span class="sourceLineNo">1888</span>              compaction.getRequest(), user);<a name="line.1888"></a>
+<span class="sourceLineNo">1889</span>        }<a name="line.1889"></a>
+<span class="sourceLineNo">1890</span>        // Finally, we have the resulting files list. Check if we have any files at all.<a name="line.1890"></a>
+<span class="sourceLineNo">1891</span>        request = compaction.getRequest();<a name="line.1891"></a>
+<span class="sourceLineNo">1892</span>        Collection&lt;HStoreFile&gt; selectedFiles = request.getFiles();<a name="line.1892"></a>
+<span class="sourceLineNo">1893</span>        if (selectedFiles.isEmpty()) {<a name="line.1893"></a>
+<span class="sourceLineNo">1894</span>          return Optional.empty();<a name="line.1894"></a>
+<span class="sourceLineNo">1895</span>        }<a name="line.1895"></a>
+<span class="sourceLineNo">1896</span><a name="line.1896"></a>
+<span class="sourceLineNo">1897</span>        addToCompactingFiles(selectedFiles);<a name="line.1897"></a>
+<span class="sourceLineNo">1898</span><a name="line.1898"></a>
+<span class="sourceLineNo">1899</span>        // If we're enqueuing a major, clear the force flag.<a name="line.1899"></a>
+<span class="sourceLineNo">1900</span>        this.forceMajor = this.forceMajor &amp;&amp; !request.isMajor();<a name="line.1900"></a>
+<span class="sourceLineNo">1901</span><a name="line.1901"></a>
+<span class="sourceLineNo">1902</span>        // Set common request properties.<a name="line.1902"></a>
+<span class="sourceLineNo">1903</span>        // Set priority, either override value supplied by caller or from store.<a name="line.1903"></a>
+<span class="sourceLineNo">1904</span>        request.setPriority((priority != Store.NO_PRIORITY) ? priority : getCompactPriority());<a name="line.1904"></a>
+<span class="sourceLineNo">1905</span>        request.setDescription(getRegionInfo().getRegionNameAsString(), getColumnFamilyName());<a name="line.1905"></a>
+<span class="sourceLineNo">1906</span>        request.setTracker(tracker);<a name="line.1906"></a>
+<span class="sourceLineNo">1907</span>      }<a name="line.1907"></a>
+<span class="sourceLineNo">1908</span>    } finally {<a name="line.1908"></a>
+<span class="sourceLineNo">1909</span>      this.lock.readLock().unlock();<a name="line.1909"></a>
+<span class="sourceLineNo">1910</span>    }<a name="line.1910"></a>
+<span class="sourceLineNo">1911</span><a name="line.1911"></a>
+<span class="sourceLineNo">1912</span>    if (LOG.isDebugEnabled()) {<a name="line.1912"></a>
+<span class="sourceLineNo">1913</span>      LOG.debug(getRegionInfo().getEncodedName() + " - " + getColumnFamilyName()<a name="line.1913"></a>
+<span class="sourceLineNo">1914</span>          + ": Initiating " + (request.isMajor() ? "major" : "minor") + " compaction"<a name="line.1914"></a>
+<span class="sourceLineNo">1915</span>          + (request.isAllFiles() ? " (all files)" : ""));<a name="line.1915"></a>
+<span class="sourceLineNo">1916</span>    }<a name="line.1916"></a>
+<span class="sourceLineNo">1917</span>    this.region.reportCompactionRequestStart(request.isMajor());<a name="line.1917"></a>
+<span class="sourceLineNo">1918</span>    return Optional.of(compaction);<a name="line.1918"></a>
+<span class="sourceLineNo">1919</span>  }<a name="line.1919"></a>
+<span class="sourceLineNo">1920</span><a name="line.1920"></a>
+<span class="sourceLineNo">1921</span>  /** Adds the files to compacting files. filesCompacting must be locked. */<a name="line.1921"></a>
+<span class="sourceLineNo">1922</span>  private void addToCompactingFiles(Collection&lt;HStoreFile&gt; filesToAdd) {<a name="line.1922"></a>
+<span class="sourceLineNo">1923</span>    if (CollectionUtils.isEmpty(filesToAdd)) {<a name="line.1923"></a>
+<span class="sourceLineNo">1924</span>      return;<a name="line.1924"></a>
+<span class="sourceLineNo">1925</span>    }<a name="line.1925"></a>
+<span class="sourceLineNo">1926</span>    // Check that we do not try to compact the same StoreFile twice.<a name="line.1926"></a>
+<span class="sourceLineNo">1927</span>    if (!Collections.disjoint(filesCompacting, filesToAdd)) {<a name="line.1927"></a>
+<span class="sourceLineNo">1928</span>      Preconditions.checkArgument(false, "%s overlaps with %s", filesToAdd, filesCompacting);<a name="line.1928"></a>
+<span class="sourceLineNo">1929</span>    }<a name="line.1929"></a>
+<span class="sourceLineNo">1930</span>    filesCompacting.addAll(filesToAdd);<a name="line.1930"></a>
+<span class="sourceLineNo">1931</span>    Collections.sort(filesCompacting, storeEngine.getStoreFileManager().getStoreFileComparator());<a name="line.1931"></a>
+<span class="sourceLineNo">1932</span>  }<a name="line.1932"></a>
+<span class="sourceLineNo">1933</span><a name="line.1933"></a>
+<span class="sourceLineNo">1934</span>  private void removeUnneededFiles() throws IOException {<a name="line.1934"></a>
+<span class="sourceLineNo">1935</span>    if (!conf.getBoolean("hbase.store.delete.expired.storefile", true)) {<a name="line.1935"></a>
+<span class="sourceLineNo">1936</span>      return;<a name="line.1936"></a>
+<span class="sourceLineNo">1937</span>    }<a name="line.1937"></a>
+<span class="sourceLineNo">1938</span>    if (getColumnFamilyDescriptor().getMinVersions() &gt; 0) {<a name="line.1938"></a>
+<span class="sourceLineNo">1939</span>      LOG.debug("Skipping expired store file removal due to min version being {}",<a name="line.1939"></a>
+<span class="sourceLineNo">1940</span>          getColumnFamilyDescriptor().getMinVersions());<a name="line.1940"></a>
+<span class="sourceLineNo">1941</span>      return;<a name="line.1941"></a>
+<span class="sourceLineNo">1942</span>    }<a name="line.1942"></a>
+<span class="sourceLineNo">1943</span>    this.lock.readLock().lock();<a name="line.1943"></a>
+<span class="sourceLineNo">1944</span>    Collection&lt;HStoreFile&gt; delSfs = null;<a name="line.1944"></a>
+<span class="sourceLineNo">1945</span>    try {<a name="line.1945"></a>
+<span class="sourceLineNo">1946</span>      synchronized (filesCompacting) {<a name="line.1946"></a>
+<span class="sourceLineNo">1947</span>        long cfTtl = getStoreFileTtl();<a name="line.1947"></a>
+<span class="sourceLineNo">1948</span>        if (cfTtl != Long.MAX_VALUE) {<a name="line.1948"></a>
+<span class="sourceLineNo">1949</span>          delSfs = storeEngine.getStoreFileManager().getUnneededFiles(<a name="line.1949"></a>
+<span class="sourceLineNo">1950</span>              EnvironmentEdgeManager.currentTime() - cfTtl, filesCompacting);<a name="line.1950"></a>
+<span class="sourceLineNo">1951</span>          addToCompactingFiles(delSfs);<a name="line.1951"></a>
+<span class="sourceLineNo">1952</span>        }<a name="line.1952"></a>
+<span class="sourceLineNo">1953</span>      }<a name="line.1953"></a>
+<span class="sourceLineNo">1954</span>    } finally {<a name="line.1954"></a>
+<span class="sourceLineNo">1955</span>      this.lock.readLock().unlock();<a name="line.1955"></a>
+<span class="sourceLineNo">1956</span>    }<a name="line.1956"></a>
+<span class="sourceLineNo">1957</span><a name="line.1957"></a>
+<span class="sourceLineNo">1958</span>    if (CollectionUtils.isEmpty(delSfs)) {<a name="line.1958"></a>
+<span class="sourceLineNo">1959</span>      return;<a name="line.1959"></a>
+<span class="sourceLineNo">1960</span>    }<a name="line.1960"></a>
+<span class="sourceLineNo">1961</span><a name="line.1961"></a>
+<span class="sourceLineNo">1962</span>    Collection&lt;HStoreFile&gt; newFiles = Collections.emptyList(); // No new files.<a name="line.1962"></a>
+<span class="sourceLineNo">1963</span>    writeCompactionWalRecord(delSfs, newFiles);<a name="line.1963"></a>
+<span class="sourceLineNo">1964</span>    replaceStoreFiles(delSfs, newFiles);<a name="line.1964"></a>
+<span class="sourceLineNo">1965</span>    completeCompaction(delSfs);<a name="line.1965"></a>
+<span class="sourceLineNo">1966</span>    LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in "<a name="line.1966"></a>
+<span class="sourceLineNo">1967</span>        + this + " of " + this.getRegionInfo().getRegionNameAsString()<a name="line.1967"></a>
+<span class="sourceLineNo">1968</span>        + "; total size for store is "<a name="line.1968"></a>
+<span class="sourceLineNo">1969</span>        + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1));<a name="line.1969"></a>
+<span class="sourceLineNo">1970</span>  }<a name="line.1970"></a>
+<span class="sourceLineNo">1971</span><a name="line.1971"></a>
+<span class="sourceLineNo">1972</span>  public void cancelRequestedCompaction(CompactionContext compaction) {<a name="line.1972"></a>
+<span class="sourceLineNo">1973</span>    finishCompactionRequest(compaction.getRequest());<a name="line.1973"></a>
+<span class="sourceLineNo">1974</span>  }<a name="line.1974"></a>
+<span class="sourceLineNo">1975</span><a name="line.1975"></a>
+<span class="sourceLineNo">1976</span>  private void finishCompactionRequest(CompactionRequestImpl cr) {<a name="line.1976"></a>
+<span class="sourceLineNo">1977</span>    this.region.reportCompactionRequestEnd(cr.isMajor(), cr.getFiles().size(), cr.getSize());<a name="line.1977"></a>
+<span class="sourceLineNo">1978</span>    if (cr.isOffPeak()) {<a name="line.1978"></a>
+<span class="sourceLineNo">1979</span>      offPeakCompactionTracker.set(false);<a name="line.1979"></a>
+<span class="sourceLineNo">1980</span>      cr.setOffPeak(false);<a name="line.1980"></a>
+<span class="sourceLineNo">1981</span>    }<a name="line.1981"></a>
+<span class="sourceLineNo">1982</span>    synchronized (filesCompacting) {<a name="line.1982"></a>
+<span class="sourceLineNo">1983</span>      filesCompacting.removeAll(cr.getFiles());<a name="line.1983"></a>
+<span class="sourceLineNo">1984</span>    }<a name="line.1984"></a>
+<span class="sourceLineNo">1985</span>  }<a name="line.1985"></a>
+<span class="sourceLineNo">1986</span><a name="line.1986"></a>
+<span class="sourceLineNo">1987</span>  /**<a name="line.1987"></a>
+<span class="sourceLineNo">1988</span>   * Validates a store file by opening and closing it. In HFileV2 this should not be an expensive<a name="line.1988"></a>
+<span class="sourceLineNo">1989</span>   * operation.<a name="line.1989"></a>
+<span class="sourceLineNo">1990</span>   * @param path the path to the store file<a name="line.1990"></a>
+<span class="sourceLineNo">1991</span>   */<a name="line.1991"></a>
+<span class="sourceLineNo">1992</span>  private void validateStoreFile(Path path) throws IOException {<a name="line.1992"></a>
+<span class="sourceLineNo">1993</span>    HStoreFile storeFile = null;<a name="line.1993"></a>
+<span class="sourceLineNo">1994</span>    try {<a name="line.1994"></a>
+<span class="sourceLineNo">1995</span>      storeFile = createStoreFileAndReader(path);<a name="line.1995"></a>
+<span class="sourceLineNo">1996</span>    } catch (IOException e) {<a name="line.1996"></a>
+<span class="sourceLineNo">1997</span>      LOG.error("Failed to open store file : {}, keeping it in tmp location", path, e);<a name="line.1997"></a>
+<span class="sourceLineNo">1998</span>      throw e;<a name="line.1998"></a>
+<span class="sourceLineNo">1999</span>    } finally {<a name="line.1999"></a>
+<span class="sourceLineNo">2000</span>      if (storeFile != null) {<a name="line.2000"></a>
+<span class="sourceLineNo">2001</span>        storeFile.closeStoreFile(false);<a name="line.2001"></a>
+<span class="sourceLineNo">2002</span>      }<a name="line.2002"></a>
+<span class="sourceLineNo">2003</span>    }<a name="line.2003"></a>
+<span class="sourceLineNo">2004</span>  }<a name="line.2004"></a>
+<span class="sourceLineNo">2005</span><a name="line.2005"></a>
+<span class="sourceLineNo">2006</span>  /**<a name="line.2006"></a>
+<span class="sourceLineNo">2007</span>   * Update counts.<a name="line.2007"></a>
+<span class="sourceLineNo">2008</span>   * @param compactedFiles list of files that were compacted<a name="line.2008"></a>
+<span class="sourceLineNo">2009</span>   */<a name="line.2009"></a>
+<span class="sourceLineNo">2010</span>  @VisibleForTesting<a name="line.2010"></a>
+<span class="sourceLineNo">2011</span>  protected void completeCompaction(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.2011"></a>
+<span class="sourceLineNo">2012</span>  // Rename this method! TODO.<a name="line.2012"></a>
+<span class="sourceLineNo">2013</span>    throws IOException {<a name="line.2013"></a>
+<span class="sourceLineNo">2014</span>    this.storeSize.set(0L);<a name="line.2014"></a>
+<span class="sourceLineNo">2015</span>    this.totalUncompressedBytes.set(0L);<a name="line.2015"></a>
+<span class="sourceLineNo">2016</span>    for (HStoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.2016"></a>
+<span class="sourceLineNo">2017</span>      StoreFileReader r = hsf.getReader();<a name="line.2017"></a>
+<span class="sourceLineNo">2018</span>      if (r == null) {<a name="line.2018"></a>
+<span class="sourceLineNo">2019</span>        LOG.warn("StoreFile {} has a null Reader", hsf);<a name="line.2019"></a>
+<span class="sourceLineNo">2020</span>        continue;<a name="line.2020"></a>
+<span class="sourceLineNo">2021</span>      }<a name="line.2021"></a>
+<span class="sourceLineNo">2022</span>      this.storeSize.addAndGet(r.length());<a name="line.2022"></a>
+<span class="sourceLineNo">2023</span>      this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.2023"></a>
+<span class="sourceLineNo">2024</span>    }<a name="line.2024"></a>
+<span class="sourceLineNo">2025</span>  }<a name="line.2025"></a>
+<span class="sourceLineNo">2026</span><a name="line.2026"></a>
+<span class="sourceLineNo">2027</span>  /*<a name="line.2027"></a>
+<span class="sourceLineNo">2028</span>   * @param wantedVersions How many versions were asked for.<a name="line.2028"></a>
+<span class="sourceLineNo">2029</span>   * @return wantedVersions or this families' {@link HConstants#VERSIONS}.<a name="line.2029"></a>
+<span class="sourceLineNo">2030</span>   */<a name="line.2030"></a>
+<span class="sourceLineNo">2031</span>  int versionsToReturn(final int wantedVersions) {<a name="line.2031"></a>
+<span class="sourceLineNo">2032</span>    if (wantedVersions &lt;= 0) {<a name="line.2032"></a>
+<span class="sourceLineNo">2033</span>      throw new IllegalArgumentException("Number of versions must be &gt; 0");<a name="line.2033"></a>
+<span class="sourceLineNo">2034</span>    }<a name="line.2034"></a>
+<span class="sourceLineNo">2035</span>    // Make sure we do not return more than maximum versions for this store.<a name="line.2035"></a>
+<span class="sourceLineNo">2036</span>    int maxVersions = this.family.getMaxVersions();<a name="line.2036"></a>
+<span class="sourceLineNo">2037</span>    return wantedVersions &gt; maxVersions ? maxVersions: wantedVersions;<a name="line.2037"></a>
+<span class="sourceLineNo">2038</span>  }<a name="line.2038"></a>
+<span class="sourceLineNo">2039</span><a name="line.2039"></a>
+<span class="sourceLineNo">2040</span>  @Override<a name="line.2040"></a>
+<span class="sourceLineNo">2041</span>  public boolean canSplit() {<a name="line.2041"></a>
+<span class="sourceLineNo">2042</span>    this.lock.readLock().lock();<a name="line.2042"></a>
+<span class="sourceLineNo">2043</span>    try {<a name="line.2043"></a>
+<span class="sourceLineNo">2044</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2044"></a>
+<span class="sourceLineNo">2045</span>      boolean result = !hasReferences();<a name="line.2045"></a>
+<span class="sourceLineNo">2046</span>      if (!result) {<a name="line.2046"></a>
+<span class="sourceLineNo">2047</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2047"></a>
+<span class="sourceLineNo">2048</span>      }<a name="line.2048"></a>
+<span class="sourceLineNo">2049</span>      return result;<a name="line.2049"></a>
+<span class="sourceLineNo">2050</span>    } finally {<a name="line.2050"></a>
+<span class="sourceLineNo">2051</span>      this.lock.readLock().unlock();<a name="line.2051"></a>
+<span class="sourceLineNo">2052</span>    }<a name="line.2052"></a>
+<span class="sourceLineNo">2053</span>  }<a name="line.2053"></a>
+<span class="sourceLineNo">2054</span><a name="line.2054"></a>
+<span class="sourceLineNo">2055</span>  /**<a name="line.2055"></a>
+<span class="sourceLineNo">2056</span>   * Determines if Store should be split.<a name="line.2056"></a>
+<span class="sourceLineNo">2057</span>   */<a name="line.2057"></a>
+<span class="sourceLineNo">2058</span>  public Optional&lt;byte[]&gt; getSplitPoint() {<a name="line.2058"></a>
+<span class="sourceLineNo">2059</span>    this.lock.readLock().lock();<a name="line.2059"></a>
+<span class="sourceLineNo">2060</span>    try {<a name="line.2060"></a>
+<span class="sourceLineNo">2061</span>      // Should already be enforced by the split policy!<a name="line.2061"></a>
+<span class="sourceLineNo">2062</span>      assert !this.getRegionInfo().isMetaRegion();<a name="line.2062"></a>
+<span class="sourceLineNo">2063</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2063"></a>
+<span class="sourceLineNo">2064</span>      if (hasReferences()) {<a name="line.2064"></a>
+<span class="sourceLineNo">2065</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2065"></a>
+<span class="sourceLineNo">2066</span>        return Optional.empty();<a name="line.2066"></a>
+<span class="sourceLineNo">2067</span>      }<a name="line.2067"></a>
+<span class="sourceLineNo">2068</span>      return this.storeEngine.getStoreFileManager().getSplitPoint();<a name="line.2068"></a>
+<span class="sourceLineNo">2069</span>    } catch(IOException e) {<a name="line.2069"></a>
+<span class="sourceLineNo">2070</span>      LOG.warn("Failed getting store size for {}", this, e);<a name="line.2070"></a>
+<span class="sourceLineNo">2071</span>    } finally {<a name="line.2071"></a>
+<span class="sourceLineNo">2072</span>      this.lock.readLock().unlock();<a name="line.2072"></a>
+<span class="sourceLineNo">2073</span>    }<a name="line.2073"></a>
+<span class="sourceLineNo">2074</span>    return Optional.empty();<a name="line.2074"></a>
+<span class="sourceLineNo">2075</span>  }<a name="line.2075"></a>
+<span class="sourceLineNo">2076</span><a name="line.2076"></a>
+<span class="sourceLineNo">2077</span>  @Override<a name="line.2077"></a>
+<span class="sourceLineNo">2078</span>  public long getLastCompactSize() {<a name="line.2078"></a>
+<span class="sourceLineNo">2079</span>    return this.lastCompactSize;<a name="line.2079"></a>
+<span class="sourceLineNo">2080</span>  }<a name="line.2080"></a>
+<span class="sourceLineNo">2081</span><a name="line.2081"></a>
+<span class="sourceLineNo">2082</span>  @Override<a name="line.2082"></a>
+<span class="sourceLineNo">2083</span>  public long getSize() {<a name="line.2083"></a>
+<span class="sourceLineNo">2084</span>    return storeSize.get();<a name="line.2084"></a>
+<span class="sourceLineNo">2085</span>  }<a name="line.2085"></a>
+<span class="sourceLineNo">2086</span><a name="line.2086"></a>
+<span class="sourceLineNo">2087</span>  public void triggerMajorCompaction() {<a name="line.2087"></a>
+<span class="sourceLineNo">2088</span>    this.forceMajor = true;<a name="line.2088"></a>
+<span class="sourceLineNo">2089</span>  }<a name="line.2089"></a>
+<span class="sourceLineNo">2090</span><a name="line.2090"></a>
+<span class="sourceLineNo">2091</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2091"></a>
+<span class="sourceLineNo">2092</span>  // File administration<a name="line.2092"></a>
+<span class="sourceLineNo">2093</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2093"></a>
+<span class="sourceLineNo">2094</span><a name="line.2094"></a>
+<span class="sourceLineNo">2095</span>  /**<a name="line.2095"></a>
+<span class="sourceLineNo">2096</span>   * Return a scanner for both the memstore and the HStore files. Assumes we are not in a<a name="line.2096"></a>
+<span class="sourceLineNo">2097</span>   * compaction.<a name="line.2097"></a>
+<span class="sourceLineNo">2098</span>   * @param scan Scan to apply when scanning the stores<a name="line.2098"></a>
+<span class="sourceLineNo">2099</span>   * @param targetCols columns to scan<a name="line.2099"></a>
+<span class="sourceLineNo">2100</span>   * @return a scanner over the current key values<a name="line.2100"></a>
+<span class="sourceLineNo">2101</span>   * @throws IOException on failure<a name="line.2101"></a>
+<span class="sourceLineNo">2102</span>   */<a name="line.2102"></a>
+<span class="sourceLineNo">2103</span>  public KeyValueScanner getScanner(Scan scan, final NavigableSet&lt;byte[]&gt; targetCols, long readPt)<a name="line.2103"></a>
+<span class="sourceLineNo">2104</span>      throws IOException {<a name="line.2104"></a>
+<span class="sourceLineNo">2105</span>    lock.readLock().lock();<a name="line.2105"></a>
+<span class="sourceLineNo">2106</span>    try {<a name="line.2106"></a>
+<span class="sourceLineNo">2107</span>      ScanInfo scanInfo;<a name="line.2107"></a>
+<span class="sourceLineNo">2108</span>      if (this.getCoprocessorHost() != null) {<a name="line.2108"></a>
+<span class="sourceLineNo">2109</span>        scanInfo = this.getCoprocessorHost().preStoreScannerOpen(this);<a name="line.2109"></a>
+<span class="sourceLineNo">2110</span>      } else {<a name="line.2110"></a>
+<span class="sourceLineNo">2111</span>        scanInfo = getScanInfo();<a name="line.2111"></a>
+<span class="sourceLineNo">2112</span>      }<a name="line.2112"></a>
+<span class="sourceLineNo">2113</span>      return createScanner(scan, scanInfo, targetCols, readPt);<a name="line.2113"></a>
+<span class="sourceLineNo">2114</span>    } finally {<a name="line.2114"></a>
+<span class="sourceLineNo">2115</span>      lock.readLock().unlock();<a name="line.2115"></a>
+<span class="sourceLineNo">2116</span>    }<a name="line.2116"></a>
+<span class="sourceLineNo">2117</span>  }<a name="line.2117"></a>
+<span class="sourceLineNo">2118</span><a name="line.2118"></a>
+<span class="sourceLineNo">2119</span>  // HMobStore will override this method to return its own implementation.<a name="line.2119"></a>
+<span class="sourceLineNo">2120</span>  protected KeyValueScanner createScanner(Scan scan, ScanInfo scanInfo,<a name="line.2120"></a>
+<span class="sourceLineNo">2121</span>      NavigableSet&lt;byte[]&gt; targetCols, long readPt) throws IOException {<a name="line.2121"></a>
+<span class="sourceLineNo">2122</span>    return scan.isReversed() ? new ReversedStoreScanner(this, scanInfo, scan, targetCols, readPt)<a name="line.2122"></a>
+<span class="sourceLineNo">2123</span>        : new StoreScanner(this, scanInfo, scan, targetCols, readPt);<a name="line.2123"></a>
+<span class="sourceLineNo">2124</span>  }<a name="line.2124"></a>
+<span class="sourceLineNo">2125</span><a name="line.2125"></a>
+<span class="sourceLineNo">2126</span>  /**<a name="line.2126"></a>
+<span class="sourceLineNo">2127</span>   * Recreates the scanners on the current list of active store file scanners<a name="line.2127"></a>
+<span class="sourceLineNo">2128</span>   * @param currentFileScanners the current set of active store file scanners<a name="line.2128"></a>
+<span class="sourceLineNo">2129</span>   * @param cacheBlocks cache the blocks or not<a name="line.2129"></a>
+<span class="sourceLineNo">2130</span>   * @param usePread use pread or not<a name="line.2130"></a>
+<span class="sourceLineNo">2131</span>   * @param isCompaction is the scanner for compaction<a name="line.2131"></a>
+<span class="sourceLineNo">2132</span>   * @param matcher the scan query matcher<a name="line.2132"></a>
+<span class="sourceLineNo">2133</span>   * @param startRow the scan's start row<a name="line.2133"></a>
+<span class="sourceLineNo">2134</span>   * @param includeStartRow should the scan include the start row<a name="line.2134"></a>
+<span class="sourceLineNo">2135</span>   * @param stopRow the scan's stop row<a name="line.2135"></a>
+<span class="sourceLineNo">2136</span>   * @param includeStopRow should the scan include the stop row<a name="line.2136"></a>
+<span class="sourceLineNo">2137</span>   * @param readPt the read point of the current scane<a name="line.2137"></a>
+<span class="sourceLineNo">2138</span>   * @param includeMemstoreScanner whether the current scanner should include memstorescanner<a name="line.2138"></a>
+<span class="sourceLineNo">2139</span>   * @return list of scanners recreated on the current Scanners<a name="line.2139"></a>
+<span class="sourceLineNo">2140</span>   */<a name="line.2140"></a>
+<span class="sourceLineNo">2141</span>  public List&lt;KeyValueScanner&gt; recreateScanners(List&lt;KeyValueScanner&gt; currentFileScanners,<a name="line.2141"></a>
+<span class="sourceLineNo">2142</span>      boolean cacheBlocks, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.2142"></a>
+<span class="sourceLineNo">2143</span>      byte[] startRow, boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.2143"></a>
+<span class="sourceLineNo">2144</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.2144"></a>
+<span class="sourceLineNo">2145</span>    this.lock.readLock().lock();<a name="line.2145"></a>
+<span class="sourceLineNo">2146</span>    try {<a name="line.2146"></a>
+<span class="sourceLineNo">2147</span>      Map&lt;String, HStoreFile&gt; name2File =<a name="line.2147"></a>
+<span class="sourceLineNo">2148</span>          new HashMap&lt;&gt;(getStorefilesCount() + getCompactedFilesCount());<a name="line.2148"></a>
+<span class="sourceLineNo">2149</span>      for (HStoreFile file : getStorefiles()) {<a name="line.2149"></a>
+<span class="sourceLineNo">2150</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2150"></a>
+<span class="sourceLineNo">2151</span>      }<a name="line.2151"></a>
+<span class="sourceLineNo">2152</span>      Collection&lt;HStoreFile&gt; compactedFiles = getCompactedFiles();<a name="line.2152"></a>
+<span class="sourceLineNo">2153</span>      for (HStoreFile file : IterableUtils.emptyIfNull(compactedFiles)) {<a name="line.2153"></a>
+<span class="sourceLineNo">2154</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2154"></a>
+<span class="sourceLineNo">2155</span>      }<a name="line.2155"></a>
+<span class="sourceLineNo">2156</span>      List&lt;HStoreFile&gt; filesToReopen = new ArrayList&lt;&gt;();<a name="line.2156"></a>
+<span class="sourceLineNo">2157</span>      for (KeyValueScanner kvs : currentFileScanners) {<a name="line.2157"></a>
+<span class="sourceLineNo">2158</span>        assert kvs.isFileScanner();<a name="line.2158"></a>
+<span class="sourceLineNo">2159</span>        if (kvs.peek() == null) {<a name="line.2159"></a>
+<span class="sourceLineNo">2160</span>          continue;<a name="line.2160"></a>
+<span class="sourceLineNo">2161</span>        }<a name="line.2161"></a>
+<span class="sourceLineNo">2162</span>        filesToReopen.add(name2File.get(kvs.getFilePath().getName()));<a name="line.2162"></a>
+<span class="sourceLineNo">2163</span>      }<a name="line.2163"></a>
+<span class="sourceLineNo">2164</span>      if (filesToReopen.isEmpty()) {<a name="line.2164"></a>
+<span class="sourceLineNo">2165</span>        return null;<a name="line.2165"></a>
+<span class="sourceLineNo">2166</span>      }<a name="line.2166"></a>
+<span class="sourceLineNo">2167</span>      return getScanners(filesToReopen, cacheBlocks, false, false, matcher, startRow,<a name="line.2167"></a>
+<span class="sourceLineNo">2168</span>        includeStartRow, stopRow, includeStopRow, readPt, false);<a name="line.2168"></a>
+<span class="sourceLineNo">2169</span>    } finally {<a name="line.2169"></a>
+<span class="sourceLineNo">2170</span>      this.lock.readLock().unlock();<a name="line.2170"></a>
+<span class="sourceLineNo">2171</span>    }<a name="line.2171"></a>
+<span class="sourceLineNo">2172</span>  }<a name="line.2172"></a>
+<span class="sourceLineNo">2173</span><a name="line.2173"></a>
+<span class="sourceLineNo">2174</span>  @Override<a name="line.2174"></a>
+<span class="sourceLineNo">2175</span>  public String toString() {<a name="line.2175"></a>
+<span class="sourceLineNo">2176</span>    return this.getColumnFamilyName();<a name="line.2176"></a>
+<span class="sourceLineNo">2177</span>  }<a name="line.2177"></a>
+<span class="sourceLineNo">2178</span><a name="line.2178"></a>
+<span class="sourceLineNo">2179</span>  @Override<a name="line.2179"></a>
+<span class="sourceLineNo">2180</span>  public int getStorefilesCount() {<a name="line.2180"></a>
+<span class="sourceLineNo">2181</span>    return this.storeEngine.getStoreFileManager().getStorefileCount();<a name="line.2181"></a>
+<span class="sourceLineNo">2182</span>  }<a name="line.2182"></a>
+<span class="sourceLineNo">2183</span><a name="line.2183"></a>
+<span class="sourceLineNo">2184</span>  @Override<a name="line.2184"></a>
+<span class="sourceLineNo">2185</span>  public int getCompactedFilesCount() {<a name="line.2185"></a>
+<span class="sourceLineNo">2186</span>    return this.storeEngine.getStoreFileManager().getCompactedFilesCount();<a name="line.2186"></a>
+<span class="sourceLineNo">2187</span>  }<a name="line.2187"></a>
+<span class="sourceLineNo">2188</span><a name="line.2188"></a>
+<span class="sourceLineNo">2189</span>  private LongStream getStoreFileAgeStream() {<a name="line.2189"></a>
+<span class="sourceLineNo">2190</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream().filter(sf -&gt; {<a name="line.2190"></a>
+<span class="sourceLineNo">2191</span>      if (sf.getReader() == null) {<a name="line.2191"></a>
+<span class="sourceLineNo">2192</span>        LOG.warn("StoreFile {} has a null Reader", sf);<a name="line.2192"></a>
+<span class="sourceLineNo">2193</span>        return false;<a name="line.2193"></a>
+<span class="sourceLineNo">2194</span>      } else {<a name="line.2194"></a>
+<span class="sourceLineNo">2195</span>        return true;<a name="line.2195"></a>
+<span class="sourceLineNo">2196</span>      }<a name="line.2196"></a>
+<span class="sourceLineNo">2197</span>    }).filter(HStoreFile::isHFile).mapToLong(sf -&gt; sf.getFileInfo().getCreatedTimestamp())<a name="line.2197"></a>
+<span class="sourceLineNo">2198</span>        .map(t -&gt; EnvironmentEdgeManager.currentTime() - t);<a name="line.2198"></a>
+<span class="sourceLineNo">2199</span>  }<a name="line.2199"></a>
+<span class="sourceLineNo">2200</span><a name="line.2200"></a>
+<span class="sourceLineNo">2201</span>  @Override<a name="line.2201"></a>
+<span class="sourceLineNo">2202</span>  public OptionalLong getMaxStoreFileAge() {<a name="line.2202"></a>
+<span class="sourceLineNo">2203</span>    return getStoreFileAgeStream().max();<a name="line.2203"></a>
 <span class="sourceLineNo">2204</span>  }<a name="line.2204"></a>
 <span class="sourceLineNo">2205</span><a name="line.2205"></a>
 <span class="sourceLineNo">2206</span>  @Override<a name="line.2206"></a>
-<span class="sourceLineNo">2207</span>  public long getNumHFiles() {<a name="line.2207"></a>
-<span class="sourceLineNo">2208</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2208"></a>
-<span class="sourceLineNo">2209</span>        .filter(HStoreFile::isHFile).count();<a name="line.2209"></a>
-<span class="sourceLineNo">2210</span>  }<a name="line.2210"></a>
-<span class="sourceLineNo">2211</span><a name="line.2211"></a>
-<span class="sourceLineNo">2212</span>  @Override<a name="line.2212"></a>
-<span class="sourceLineNo">2213</span>  public long getStoreSizeUncompressed() {<a name="line.2213"></a>
-<span class="sourceLineNo">2214</span>    return this.totalUncompressedBytes.get();<a name="line.2214"></a>
-<span class="sourceLineNo">2215</span>  }<a name="line.2215"></a>
-<span class="sourceLineNo">2216</span><a name="line.2216"></a>
-<span class="sourceLineNo">2217</span>  @Override<a name="line.2217"></a>
-<span class="sourceLineNo">2218</span>  public long getStorefilesSize() {<a name="line.2218"></a>
-<span class="sourceLineNo">2219</span>    // Include all StoreFiles<a name="line.2219"></a>
-<span class="sourceLineNo">2220</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(), sf -&gt; true);<a name="line.2220"></a>
-<span class="sourceLineNo">2221</span>  }<a name="line.2221"></a>
-<span class="sourceLineNo">2222</span><a name="line.2222"></a>
-<span class="sourceLineNo">2223</span>  @Override<a name="line.2223"></a>
-<span class="sourceLineNo">2224</span>  public long getHFilesSize() {<a name="line.2224"></a>
-<span class="sourceLineNo">2225</span>    // Include only StoreFiles which are HFiles<a name="line.2225"></a>
-<span class="sourceLineNo">2226</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(),<a name="line.2226"></a>
-<span class="sourceLineNo">2227</span>      HStoreFile::isHFile);<a name="line.2227"></a>
-<span class="sourceLineNo">2228</span>  }<a name="line.2228"></a>
-<span class="sourceLineNo">2229</span><a name="line.2229"></a>
-<span class="sourceLineNo">2230</span>  private long getTotalUncompressedBytes(List&lt;HStoreFile&gt; files) {<a name="line.2230"></a>
-<span class="sourceLineNo">2231</span>    return files.stream()<a name="line.2231"></a>
-<span class="sourceLineNo">2232</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::getTotalUncompressedBytes))<a name="line.2232"></a>
-<span class="sourceLineNo">2233</span>      .sum();<a name="line.2233"></a>
-<span class="sourceLineNo">2234</span>  }<a name="line.2234"></a>
-<span class="sourceLineNo">2235</span><a name="line.2235"></a>
-<span class="sourceLineNo">2236</span>  private long getStorefilesSize(Collection&lt;HStoreFile&gt; files, Predicate&lt;HStoreFile&gt; predicate) {<a name="line.2236"></a>
-<span class="sourceLineNo">2237</span>    return files.stream().filter(predicate)<a name="line.2237"></a>
-<span class="sourceLineNo">2238</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::length)).sum();<a name="line.2238"></a>
-<span class="sourceLineNo">2239</span>  }<a name="line.2239"></a>
-<span class="sourceLineNo">2240</span><a name="line.2240"></a>
-<span class="sourceLineNo">2241</span>  private long getStorefileFieldSize(HStoreFile file, ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2241"></a>
-<span class="sourceLineNo">2242</span>    if (file == null) {<a name="line.2242"></a>
-<span class="sourceLineNo">2243</span>      return 0L;<a name="line.2243"></a>
-<span class="sourceLineNo">2244</span>    }<a name="line.2244"></a>
-<span class="sourceLineNo">2245</span>    StoreFileReader reader = file.getReader();<a name="line.2245"></a>
-<span class="sourceLineNo">2246</span>    if (reader == null) {<a name="line.2246"></a>
-<span class="sourceLineNo">2247</span>      return 0L;<a name="line.2247"></a>
-<span class="sourceLineNo">2248</span>    }<a name="line.2248"></a>
-<span class="sourceLineNo">2249</span>    return f.applyAsLong(reader);<a name="line.2249"></a>
+<span class="sourceLineNo">2207</span>  public OptionalLong getMinStoreFileAge() {<a name="line.2207"></a>
+<span class="sourceLineNo">2208</span>    return getStoreFileAgeStream().min();<a name="line.2208"></a>
+<span class="sourceLineNo">2209</span>  }<a name="line.2209"></a>
+<span class="sourceLineNo">2210</span><a name="line.2210"></a>
+<span class="sourceLineNo">2211</span>  @Override<a name="line.2211"></a>
+<span class="sourceLineNo">2212</span>  public OptionalDouble getAvgStoreFileAge() {<a name="line.2212"></a>
+<span class="sourceLineNo">2213</span>    return getStoreFileAgeStream().average();<a name="line.2213"></a>
+<span class="sourceLineNo">2214</span>  }<a name="line.2214"></a>
+<span class="sourceLineNo">2215</span><a name="line.2215"></a>
+<span class="sourceLineNo">2216</span>  @Override<a name="line.2216"></a>
+<span class="sourceLineNo">2217</span>  public long getNumReferenceFiles() {<a name="line.2217"></a>
+<span class="sourceLineNo">2218</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2218"></a>
+<span class="sourceLineNo">2219</span>        .filter(HStoreFile::isReference).count();<a name="line.2219"></a>
+<span class="sourceLineNo">2220</span>  }<a name="line.2220"></a>
+<span class="sourceLineNo">2221</span><a name="line.2221"></a>
+<span class="sourceLineNo">2222</span>  @Override<a name="line.2222"></a>
+<span class="sourceLineNo">2223</span>  public long getNumHFiles() {<a name="line.2223"></a>
+<span class="sourceLineNo">2224</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2224"></a>
+<span class="sourceLineNo">2225</span>        .filter(HStoreFile::isHFile).count();<a name="line.2225"></a>
+<span class="sourceLineNo">2226</span>  }<a name="line.2226"></a>
+<span class="sourceLineNo">2227</span><a name="line.2227"></a>
+<span class="sourceLineNo">2228</span>  @Override<a name="line.2228"></a>
+<span class="sourceLineNo">2229</span>  public long getStoreSizeUncompressed() {<a name="line.2229"></a>
+<span class="sourceLineNo">2230</span>    return this.totalUncompressedBytes.get();<a name="line.2230"></a>
+<span class="sourceLineNo">2231</span>  }<a name="line.2231"></a>
+<span class="sourceLineNo">2232</span><a name="line.2232"></a>
+<span class="sourceLineNo">2233</span>  @Override<a name="line.2233"></a>
+<span class="sourceLineNo">2234</span>  public long getStorefilesSize() {<a name="line.2234"></a>
+<span class="sourceLineNo">2235</span>    // Include all StoreFiles<a name="line.2235"></a>
+<span class="sourceLineNo">2236</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(), sf -&gt; true);<a name="line.2236"></a>
+<span class="sourceLineNo">2237</span>  }<a name="line.2237"></a>
+<span class="sourceLineNo">2238</span><a name="line.2238"></a>
+<span class="sourceLineNo">2239</span>  @Override<a name="line.2239"></a>
+<span class="sourceLineNo">2240</span>  public long getHFilesSize() {<a name="line.2240"></a>
+<span class="sourceLineNo">2241</span>    // Include only StoreFiles which are HFiles<a name="line.2241"></a>
+<span class="sourceLineNo">2242</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(),<a name="line.2242"></a>
+<span class="sourceLineNo">2243</span>      HStoreFile::isHFile);<a name="line.2243"></a>
+<span class="sourceLineNo">2244</span>  }<a name="line.2244"></a>
+<span class="sourceLineNo">2245</span><a name="line.2245"></a>
+<span class="sourceLineNo">2246</span>  private long getTotalUncompressedBytes(List&lt;HStoreFile&gt; files) {<a name="line.2246"></a>
+<span class="sourceLineNo">2247</span>    return files.stream()<a name="line.2247"></a>
+<span class="sourceLineNo">2248</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::getTotalUncompressedBytes))<a name="line.2248"></a>
+<span class="sourceLineNo">2249</span>      .sum();<a name="line.2249"></a>
 <span class="sourceLineNo">2250</span>  }<a name="line.2250"></a>
 <span class="sourceLineNo">2251</span><a name="line.2251"></a>
-<span class="sourceLineNo">2252</span>  private long getStorefilesFieldSize(ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2252"></a>
-<span class="sourceLineNo">2253</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2253"></a>
-<span class="sourceLineNo">2254</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, f)).sum();<a name="line.2254"></a>
+<span class="sourceLineNo">2252</span>  private long getStorefilesSize(Collection&lt;HStoreFile&gt; files, Predicate&lt;HStoreFile&gt; predicate) {<a name="line.2252"></a>
+<span class="sourceLineNo">2253</span>    return files.stream().filter(predicate)<a name="line.2253"></a>
+<span class="sourceLineNo">2254</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::length)).sum();<a name="line.2254"></a>
 <span class="sourceLineNo">2255</span>  }<a name="line.2255"></a>
 <span class="sourceLineNo">2256</span><a name="line.2256"></a>
-<span class="sourceLineNo">2257</span>  @Override<a name="line.2257"></a>
-<span class="sourceLineNo">2258</span>  public long getStorefilesRootLevelIndexSize() {<a name="line.2258"></a>
-<span class="sourceLineNo">2259</span>    return getStorefilesFieldSize(StoreFileReader::indexSize);<a name="line.2259"></a>
-<span class="sourceLineNo">2260</span>  }<a name="line.2260"></a>
-<span class="sourceLineNo">2261</span><a name="line.2261"></a>
-<span class="sourceLineNo">2262</span>  @Override<a name="line.2262"></a>
-<span class="sourceLineNo">2263</span>  public long getTotalStaticIndexSize() {<a name="line.2263"></a>
-<span class="sourceLineNo">2264</span>    return getStorefilesFieldSize(StoreFileReader::getUncompressedDataIndexSize);<a name="line.2264"></a>
-<span class="sourceLineNo">2265</span>  }<a name="line.2265"></a>
-<span class="sourceLineNo">2266</span><a name="line.2266"></a>
-<span class="sourceLineNo">2267</span>  @Override<a name="line.2267"></a>
-<span class="sourceLineNo">2268</span>  public long getTotalStaticBloomSize() {<a name="line.2268"></a>
-<span class="sourceLineNo">2269</span>    return getStorefilesFieldSize(StoreFileReader::getTotalBloomSize);<a name="line.2269"></a>
-<span class="sourceLineNo">2270</span>  }<a name="line.2270"></a>
-<span class="sourceLineNo">2271</span><a name="line.2271"></a>
-<span class="sourceLineNo">2272</span>  @Override<a name="line.2272"></a>
-<span class="sourceLineNo">2273</span>  public MemStoreSize getMemStoreSize() {<a name="line.2273"></a>
-<span class="sourceLineNo">2274</span>    return this.memstore.size();<a name="line.2274"></a>
-<span class="sourceLineNo">2275</span>  }<a name="line.2275"></a>
-<span class="sourceLineNo">2276</span><a name="line.2276"></a>
-<span class="sourceLineNo">2277</span>  @Override<a name="line.2277"></a>
-<span class="sourceLineNo">2278</span>  public int getCompactPriority() {<a name="line.2278"></a>
-<span class="sourceLineNo">2279</span>    int priority = this.storeEngine.getStoreFileManager().getStoreCompactionPriority();<a name="line.2279"></a>
-<span class="sourceLineNo">2280</span>    if (priority == PRIORITY_USER) {<a name="line.2280"></a>
-<span class="sourceLineNo">2281</span>      LOG.warn("Compaction priority is USER despite there being no user compaction");<a name="line.2281"></a>
-<span class="sourceLineNo">2282</span>    }<a name="line.2282"></a>
-<span class="sourceLineNo">2283</span>    return priority;<a name="line.2283"></a>
-<span class="sourceLineNo">2284</span>  }<a name="line.2284"></a>
-<span class="sourceLineNo">2285</span><a name="line.2285"></a>
-<span class="sourceLineNo">2286</span>  public boolean throttleCompaction(long compactionSize) {<a name="line.2286"></a>
-<span class="sourceLineNo">2287</span>    return storeEngine.getCompactionPolicy().throttleCompaction(compactionSize);<a name="line.2287"></a>
-<span class="sourceLineNo">2288</span>  }<a name="line.2288"></a>
-<span class="sourceLineNo">2289</span><a name="line.2289"></a>
-<span class="sourceLineNo">2290</span>  public HRegion getHRegion() {<a name="line.2290"></a>
-<span class="sourceLineNo">2291</span>    return this.region;<a name="line.2291"></a>
-<span class="sourceLineNo">2292</span>  }<a name="line.2292"></a>
-<span class="sourceLineNo">2293</span><a name="line.2293"></a>
-<span class="sourceLineNo">2294</span>  public RegionCoprocessorHost getCoprocessorHost() {<a name="line.2294"></a>
-<span class="sourceLineNo">2295</span>    return this.region.getCoprocessorHost();<a name="line.2295"></a>
-<span class="sourceLineNo">2296</span>  }<a name="line.2296"></a>
-<span class="sourceLineNo">2297</span><a name="line.2297"></a>
-<span class="sourceLineNo">2298</span>  @Override<a name="line.2298"></a>
-<span class="sourceLineNo">2299</span>  public RegionInfo getRegionInfo() {<a name="line.2299"></a>
-<span class="sourceLineNo">2300</span>    return this.fs.getRegionInfo();<a name="line.2300"></a>
-<span class="sourceLineNo">2301</span>  }<a name="line.2301"></a>
-<span class="sourceLineNo">2302</span><a name="line.2302"></a>
-<span class="sourceLineNo">2303</span>  @Override<a name="line.2303"></a>
-<span class="sourceLineNo">2304</span>  public boolean areWritesEnabled() {<a name="line.2304"></a>
-<span class="sourceLineNo">2305</span>    return this.region.areWritesEnabled();<a name="line.2305"></a>
-<span class="sourceLineNo">2306</span>  }<a name="line.2306"></a>
-<span class="sourceLineNo">2307</span><a name="line.2307"></a>
-<span class="sourceLineNo">2308</span>  @Override<a name="line.2308"></a>
-<span class="sourceLineNo">2309</span>  public long getSmallestReadPoint() {<a name="line.2309"></a>
-<span class="sourceLineNo">2310</span>    return this.region.getSmallestReadPoint();<a name="line.2310"></a>
-<span class="sourceLineNo">2311</span>  }<a name="line.2311"></a>
-<span class="sourceLineNo">2312</span><a name="line.2312"></a>
-<span class="sourceLineNo">2313</span>  /**<a name="line.2313"></a>
-<span class="sourceLineNo">2314</span>   * Adds or replaces the specified KeyValues.<a name="line.2314"></a>
-<span class="sourceLineNo">2315</span>   * &lt;p&gt;<a name="line.2315"></a>
-<span class="sourceLineNo">2316</span>   * For each KeyValue specified, if a cell with the same row, family, and qualifier exists in<a name="line.2316"></a>
-<span class="sourceLineNo">2317</span>   * MemStore, it will be replaced. Otherwise, it will just be inserted to MemStore.<a name="line.2317"></a>
-<span class="sourceLineNo">2318</span>   * &lt;p&gt;<a name="line.2318"></a>
-<span class="sourceLineNo">2319</span>   * This operation is atomic on each KeyValue (row/family/qualifier) but not necessarily atomic<a name="line.2319"></a>
-<span class="sourceLineNo">2320</span>   * across all of them.<a name="line.2320"></a>
-<span class="sourceLineNo">2321</span>   * @param readpoint readpoint below which we can safely remove duplicate KVs<a name="line.2321"></a>
-<span class="sourceLineNo">2322</span>   */<a name="line.2322"></a>
-<span class="sourceLineNo">2323</span>  public void upsert(Iterable&lt;Cell&gt; cells, long readpoint, MemStoreSizing memstoreSizing)<a name="line.2323"></a>
-<span class="sourceLineNo">2324</span>      throws IOException {<a name="line.2324"></a>
-<span class="sourceLineNo">2325</span>    this.lock.readLock().lock();<a name="line.2325"></a>
-<span class="sourceLineNo">2326</span>    try {<a name="line.2326"></a>
-<span class="sourceLineNo">2327</span>      this.memstore.upsert(cells, readpoint, memstoreSizing);<a name="line.2327"></a>
-<span class="sourceLineNo">2328</span>    } finally {<a name="line.2328"></a>
-<span class="sourceLineNo">2329</span>      this.lock.readLock().unlock();<a name="line.2329"></a>
-<span class="sourceLineNo">2330</span>    }<a name="line.2330"></a>
-<span class="sourceLineNo">2331</span>  }<a name="line.2331"></a>
-<span class="sourceLineNo">2332</span><a name="line.2332"></a>
-<span class="sourceLineNo">2333</span>  public StoreFlushContext createFlushContext(long cacheFlushId, FlushLifeCycleTracker tracker) {<a name="line.2333"></a>
-<span class="sourceLineNo">2334</span>    return new StoreFlusherImpl(cacheFlushId, tracker);<a name="line.2334"></a>
-<span class="sourceLineNo">2335</span>  }<a name="line.2335"></a>
-<span class="sourceLineNo">2336</span><a name="line.2336"></a>
-<span class="sourceLineNo">2337</span>  private final class StoreFlusherImpl implements StoreFlushContext {<a name="line.2337"></a>
-<span class="sourceLineNo">2338</span><a name="line.2338"></a>
-<span class="sourceLineNo">2339</span>    private final FlushLifeCycleTracker tracker;<a name="line.2339"></a>
-<span class="sourceLineNo">2340</span>    private final long cacheFlushSeqNum;<a name="line.2340"></a>
-<span class="sourceLineNo">2341</span>    private MemStoreSnapshot snapshot;<a name="line.2341"></a>
-<span class="sourceLineNo">2342</span>    private List&lt;Path&gt; tempFiles;<a name="line.2342"></a>
-<span class="sourceLineNo">2343</span>    private List&lt;Path&gt; committedFiles;<a name="line.2343"></a>
-<span class="sourceLineNo">2344</span>    private long cacheFlushCount;<a name="line.2344"></a>
-<span class="sourceLineNo">2345</span>    private long cacheFlushSize;<a name="line.2345"></a>
-<span class="sourceLineNo">2346</span>    private long outputFileSize;<a name="line.2346"></a>
-<span class="sourceLineNo">2347</span><a name="line.2347"></a>
-<span class="sourceLineNo">2348</span>    private StoreFlusherImpl(long cacheFlushSeqNum, FlushLifeCycleTracker tracker) {<a name="line.2348"></a>
-<span class="sourceLineNo">2349</span>      this.cacheFlushSeqNum = cacheFlushSeqNum;<a name="line.2349"></a>
-<span class="sourceLineNo">2350</span>      this.tracker = tracker;<a name="line.2350"></a>
-<span class="sourceLineNo">2351</span>    }<a name="line.2351"></a>
+<span class="sourceLineNo">2257</span>  private long getStorefileFieldSize(HStoreFile file, ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2257"></a>
+<span class="sourceLineNo">2258</span>    if (file == null) {<a name="line.2258"></a>
+<span class="sourceLineNo">2259</span>      return 0L;<a name="line.2259"></a>
+<span class="sourceLineNo">2260</span>    }<a name="line.2260"></a>
+<span class="sourceLineNo">2261</span>    StoreFileReader reader = file.getReader();<a name="line.2261"></a>
+<span class="sourceLineNo">2262</span>    if (reader == null) {<a name="line.2262"></a>
+<span class="sourceLineNo">2263</span>      return 0L;<a name="line.2263"></a>
+<span class="sourceLineNo">2264</span>    }<a name="line.2264"></a>
+<span class="sourceLineNo">2265</span>    return f.applyAsLong(reader);<a name="line.2265"></a>
+<span class="sourceLineNo">2266</span>  }<a name="line.2266"></a>
+<span class="sourceLineNo">2267</span><a name="line.2267"></a>
+<span class="sourceLineNo">2268</span>  private long getStorefilesFieldSize(ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2268"></a>
+<span class="sourceLineNo">2269</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2269"></a>
+<span class="sourceLineNo">2270</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, f)).sum();<a name="line.2270"></a>
+<span class="sourceLineNo">2271</span>  }<a name="line.2271"></a>
+<span class="sourceLineNo">2272</span><a name="line.2272"></a>
+<span class="sourceLineNo">2273</span>  @Override<a name="line.2273"></a>
+<span class="sourceLineNo">2274</span>  public long getStorefilesRootLevelIndexSize() {<a name="line.2274"></a>
+<span class="sourceLineNo">2275</span>    return getStorefilesFieldSize(StoreFileReader::indexSize);<a name="line.2275"></a>
+<span class="sourceLineNo">2276</span>  }<a name="line.2276"></a>
+<span class="sourceLineNo">2277</span><a name="line.2277"></a>
+<span class="sourceLineNo">2278</span>  @Override<a name="line.2278"></a>
+<span class="sourceLineNo">2279</span>  public long getTotalStaticIndexSize() {<a name="line.2279"></a>
+<span class="sourceLineNo">2280</span>    return getStorefilesFieldSize(StoreFileReader::getUncompressedDataIndexSize);<a name="line.2280"></a>
+<span class="sourceLineNo">2281</span>  }<a name="line.2281"></a>
+<span class="sourceLineNo">2282</span><a name="line.2282"></a>
+<span class="sourceLineNo">2283</span>  @Override<a name="line.2283"></a>
+<span class="sourceLineNo">2284</span>  public long getTotalStaticBloomSize() {<a name="line.2284"></a>
+<span class="sourceLineNo">2285</span>    return getStorefilesFieldSize(StoreFileReader::getTotalBloomSize);<a name="line.2285"></a>
+<span class="sourceLineNo">2286</span>  }<a name="line.2286"></a>
+<span class="sourceLineNo">2287</span><a name="line.2287"></a>
+<span class="sourceLineNo">2288</span>  @Override<a name="line.2288"></a>
+<span class="sourceLineNo">2289</span>  public MemStoreSize getMemStoreSize() {<a name="line.2289"></a>
+<span class="sourceLineNo">2290</span>    return this.memstore.size();<a name="line.2290"></a>
+<span class="sourceLineNo">2291</span>  }<a name="line.2291"></a>
+<span class="sourceLineNo">2292</span><a name="line.2292"></a>
+<span class="sourceLineNo">2293</span>  @Override<a name="line.2293"></a>
+<span class="sourceLineNo">2294</span>  public int getCompactPriority() {<a name="line.2294"></a>
+<span class="sourceLineNo">2295</span>    int priority = this.storeEngine.getStoreFileManager().getStoreCompactionPriority();<a name="line.2295"></a>
+<span class="sourceLineNo">2296</span>    if (priority == PRIORITY_USER) {<a name="line.2296"></a>
+<span class="sourceLineNo">2297</span>      LOG.warn("Compaction priority is USER despite there being no user compaction");<a name="line.2297"></a>
+<span class="sourceLineNo">2298</span>    }<a name="line.2298"></a>
+<span class="sourceLineNo">2299</span>    return priority;<a name="line.2299"></a>
+<span class="sourceLineNo">2300</span>  }<a name="line.2300"></a>
+<span class="sourceLineNo">2301</span><a name="line.2301"></a>
+<span class="sourceLineNo">2302</span>  public boolean throttleCompaction(long compactionSize) {<a name="line.2302"></a>
+<span class="sourceLineNo">2303</span>    return storeEngine.getCompactionPolicy().throttleCompaction(compactionSize);<a name="line.2303"></a>
+<span class="sourceLineNo">2304</span>  }<a name="line.2304"></a>
+<span class="sourceLineNo">2305</span><a name="line.2305"></a>
+<span class="sourceLineNo">2306</span>  public HRegion getHRegion() {<a name="line.2306"></a>
+<span class="sourceLineNo">2307</span>    return this.region;<a name="line.2307"></a>
+<span class="sourceLineNo">2308</span>  }<a name="line.2308"></a>
+<span class="sourceLineNo">2309</span><a name="line.2309"></a>
+<span class="sourceLineNo">2310</span>  public RegionCoprocessorHost getCoprocessorHost() {<a name="line.2310"></a>
+<span class="sourceLineNo">2311</span>    return this.region.getCoprocessorHost();<a name="line.2311"></a>
+<span class="sourceLineNo">2312</span>  }<a name="line.2312"></a>
+<span class="sourceLineNo">2313</span><a name="line.2313"></a>
+<span class="sourceLineNo">2314</span>  @Override<a name="line.2314"></a>
+<span class="sourceLineNo">2315</span>  public RegionInfo getRegionInfo() {<a name="line.2315"></a>
+<span class="sourceLineNo">2316</span>    return this.fs.getRegionInfo();<a name="line.2316"></a>
+<span class="sourceLineNo">2317</span>  }<a name="line.2317"></a>
+<span class="sourceLineNo">2318</span><a name="line.2318"></a>
+<span class="sourceLineNo">2319</span>  @Override<a name="line.2319"></a>
+<span class="sourceLineNo">2320</span>  public boolean areWritesEnabled() {<a name="line.2320"></a>
+<span class="sourceLineNo">2321</span>    return this.region.areWritesEnabled();<a name="line.2321"></a>
+<span class="sourceLineNo">2322</span>  }<a name="line.2322"></a>
+<span class="sourceLineNo">2323</span><a name="line.2323"></a>
+<span class="sourceLineNo">2324</span>  @Override<a name="line.2324"></a>
+<span class="sourceLineNo">2325</span>  public long getSmallestReadPoint() {<a name="line.2325"></a>
+<span class="sourceLineNo">2326</span>    return this.region.getSmallestReadPoint();<a name="line.2326"></a>
+<span class="sourceLineNo">2327</span>  }<a name="line.2327"></a>
+<span class="sourceLineNo">2328</span><a name="line.2328"></a>
+<span class="sourceLineNo">2329</span>  /**<a name="line.2329"></a>
+<span class="sourceLineNo">2330</span>   * Adds or replaces the specified KeyValues.<a name="line.2330"></a>
+<span class="sourceLineNo">2331</span>   * &lt;p&gt;<a name="line.2331"></a>
+<span class="sourceLineNo">2332</span>   * For each KeyValue specified, if a cell with the same row, family, and qualifier exists in<a name="line.2332"></a>
+<span class="sourceLineNo">2333</span>   * MemStore, it will be replaced. Otherwise, it will just be inserted to MemStore.<a name="line.2333"></a>
+<span class="sourceLineNo">2334</span>   * &lt;p&gt;<a name="line.2334"></a>
+<span class="sourceLineNo">2335</span>   * This operation is atomic on each KeyValue (row/family/qualifier) but not necessarily atomic<a name="line.2335"></a>
+<span class="sourceLineNo">2336</span>   * across all of them.<a name="line.2336"></a>
+<span class="sourceLineNo">2337</span>   * @param readpoint readpoint below which we can safely remove duplicate KVs<a name="line.2337"></a>
+<span class="sourceLineNo">2338</span>   */<a name="line.2338"></a>
+<span class="sourceLineNo">2339</span>  public void upsert(Iterable&lt;Cell&gt; cells, long readpoint, MemStoreSizing memstoreSizing)<a name="line.2339"></a>
+<span class="sourceLineNo">2340</span>      throws IOException {<a name="line.2340"></a>
+<span class="sourceLineNo">2341</span>    this.lock.readLock().lock();<a name="line.2341"></a>
+<span class="sourceLineNo">2342</span>    try {<a name="line.2342"></a>
+<span class="sourceLineNo">2343</span>      this.memstore.upsert(cells, readpoint, memstoreSizing);<a name="line.2343"></a>
+<span class="sourceLineNo">2344</span>    } finally {<a name="line.2344"></a>
+<span class="sourceLineNo">2345</span>      this.lock.readLock().unlock();<a name="line.2345"></a>
+<span class="sourceLineNo">2346</span>    }<a name="line.2346"></a>
+<span class="sourceLineNo">2347</span>  }<a name="line.2347"></a>
+<span class="sourceLineNo">2348</span><a name="line.2348"></a>
+<span class="sourceLineNo">2349</span>  public StoreFlushContext createFlushContext(long cacheFlushId, FlushLifeCycleTracker tracker) {<a name="line.2349"></a>
+<span class="sourceLineNo">2350</span>    return new StoreFlusherImpl(cacheFlushId, tracker);<a name="line.2350"></a>
+<span class="sourceLineNo">2351</span>  }<a name="line.2351"></a>
 <span class="sourceLineNo">2352</span><a name="line.2352"></a>
-<span class="sourceLineNo">2353</span>    /**<a name="line.2353"></a>
-<span class="sourceLineNo">2354</span>     * This is not thread safe. The caller should have a lock on the region or the store.<a name="line.2354"></a>
-<span class="sourceLineNo">2355</span>     * If necessary, the lock can be added with the patch provided in HBASE-10087<a name="line.2355"></a>
-<span class="sourceLineNo">2356</span>     */<a name="line.2356"></a>
-<span class="sourceLineNo">2357</span>    @Override<a name="line.2357"></a>
-<span class="sourceLineNo">2358</span>    public MemStoreSize prepare() {<a name="line.2358"></a>
-<span class="sourceLineNo">2359</span>      // passing the current sequence number of the wal - to allow bookkeeping in the memstore<a name="line.2359"></a>
-<span class="sourceLineNo">2360</span>      this.snapshot = memstore.snapshot();<a name="line.2360"></a>
-<span class="sourceLineNo">2361</span>      this.cacheFlushCount = snapshot.getCellsCount();<a name="line.2361"></a>
-<span class="sourceLineNo">2362</span>      this.cacheFlushSize = snapshot.getDataSize();<a name="line.2362"></a>
-<span class="sourceLineNo">2363</span>      committedFiles = new ArrayList&lt;&gt;(1);<a name="line.2363"></a>
-<span class="sourceLineNo">2364</span>      return snapshot.getMemStoreSize();<a name="line.2364"></a>
-<span class="sourceLineNo">2365</span>    }<a name="line.2365"></a>
-<span class="sourceLineNo">2366</span><a name="line.2366"></a>
-<span class="sourceLineNo">2367</span>    @Override<a name="line.2367"></a>
-<span class="sourceLineNo">2368</span>    public void flushCache(MonitoredTask status) throws IOException {<a name="line.2368"></a>
-<span class="sourceLineNo">2369</span>      RegionServerServices rsService = region.getRegionServerServices();<a name="line.2369"></a>
-<span class="sourceLineNo">2370</span>      ThroughputController throughputController =<a name="line.2370"></a>
-<span class="sourceLineNo">2371</span>          rsService == null ? null : rsService.getFlushThroughputController();<a name="line.2371"></a>
-<span class="sourceLineNo">2372</span>      tempFiles =<a name="line.2372"></a>
-<span class="sourceLineNo">2373</span>          HStore.this.flushCache(cacheFlushSeqNum, snapshot, status, throughputController, tracker);<a name="line.2373"></a>
-<span class="sourceLineNo">2374</span>    }<a name="line.2374"></a>
-<span class="sourceLineNo">2375</span><a name="line.2375"></a>
-<span class="sourceLineNo">2376</span>    @Override<a name="line.2376"></a>
-<span class="sourceLineNo">2377</span>    public boolean commit(MonitoredTask status) throws IOException {<a name="line.2377"></a>
-<span class="sourceLineNo">2378</span>      if (CollectionUtils.isEmpty(this.tempFiles)) {<a name="line.2378"></a>
-<span class="sourceLineNo">2379</span>        return false;<a name="line.2379"></a>
-<span class="sourceLineNo">2380</span>      }<a name="line.2380"></a>
-<span class="sourceLineNo">2381</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(this.tempFiles.size());<a name="line.2381"></a>
-<span class="sourceLineNo">2382</span>      for (Path storeFilePath : tempFiles) {<a name="line.2382"></a>
-<span class="sourceLineNo">2383</span>        try {<a name="line.2383"></a>
-<span class="sourceLineNo">2384</span>          HStoreFile sf = HStore.this.commitFile(storeFilePath, cacheFlushSeqNum, status);<a name="line.2384"></a>
-<span class="sourceLineNo">2385</span>          outputFileSize += sf.getReader().length();<a name="line.2385"></a>
-<span class="sourceLineNo">2386</span>          storeFiles.add(sf);<a name="line.2386"></a>
-<span class="sourceLineNo">2387</span>        } catch (IOException ex) {<a name="line.2387"></a>
-<span class="sourceLineNo">2388</span>          LOG.error("Failed to commit store file {}", storeFilePath, ex);<a name="line.2388"></a>
-<span class="sourceLineNo">2389</span>          // Try to delete the files we have committed before.<a name="line.2389"></a>
-<span class="sourceLineNo">2390</span>          for (HStoreFile sf : storeFiles) {<a name="line.2390"></a>
-<span class="sourceLineNo">2391</span>            Path pathToDelete = sf.getPath();<a name="line.2391"></a>
-<span class="sourceLineNo">2392</span>            try {<a name="line.2392"></a>
-<span class="sourceLineNo">2393</span>              sf.deleteStoreFile();<a name="line.2393"></a>
-<span class="sourceLineNo">2394</span>            } catch (IOException deleteEx) {<a name="line.2394"></a>
-<span class="sourceLineNo">2395</span>              LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, "<a name="line.2395"></a>
-<span class="sourceLineNo">2396</span>                  + "halting {}", pathToDelete, ex);<a name="line.2396"></a>
-<span class="sourceLineNo">2397</span>              Runtime.getRuntime().halt(1);<a name="line.2397"></a>
-<span class="sourceLineNo">2398</span>            }<a name="line.2398"></a>
-<span class="sourceLineNo">2399</span>          }<a name="line.2399"></a>
-<span class="sourceLineNo">2400</span>          throw new IOException("Failed to commit the flush", ex);<a name="line.2400"></a>
-<span class="sourceLineNo">2401</span>        }<a name="line.2401"></a>
-<span class="sourceLineNo">2402</span>      }<a name="line.2402"></a>
-<span class="sourceLineNo">2403</span><a name="line.2403"></a>
-<span class="sourceLineNo">2404</span>      for (HStoreFile sf : storeFiles) {<a name="line.2404"></a>
-<span class="sourceLineNo">2405</span>        if (HStore.this.getCoprocessorHost() != null) {<a name="line.2405"></a>
-<span class="sourceLineNo">2406</span>          HStore.this.getCoprocessorHost().postFlush(HStore.this, sf, tracker);<a name="line.2406"></a>
-<span class="sourceLineNo">2407</span>        }<a name="line.2407"></a>
-<span class="sourceLineNo">2408</span>        committedFiles.add(sf.getPath());<a name="line.2408"></a>
-<span class="sourceLineNo">2409</span>      }<a name="line.2409"></a>
-<span class="sourceLineNo">2410</span><a name="line.2410"></a>
-<span class="sourceLineNo">2411</span>      HStore.this.flushedCellsCount.addAndGet(cacheFlushCount);<a name="line.2411"></a>
-<span class="sourceLineNo">2412</span>      HStore.this.flushedCellsSize.addAndGet(cacheFlushSize);<a name="line.2412"></a>
-<span class="sourceLineNo">2413</span>      HStore.this.flushedOutputFileSize.addAndGet(outputFileSize);<a name="line.2413"></a>
-<span class="sourceLineNo">2414</span><a name="line.2414"></a>
-<span class="sourceLineNo">2415</span>      // Add new file to store files.  Clear snapshot too while we have the Store write lock.<a name="line.2415"></a>
-<span class="sourceLineNo">2416</span>      return HStore.this.updateStorefiles(storeFiles, snapshot.getId());<a name="line.2416"></a>
-<span class="sourceLineNo">2417</span>    }<a name="line.2417"></a>
-<span class="sourceLineNo">2418</span><a name="line.2418"></a>
-<span class="sourceLineNo">2419</span>    @Override<a name="line.2419"></a>
-<span class="sourceLineNo">2420</span>    public long getOutputFileSize() {<a name="line.2420"></a>
-<span class="sourceLineNo">2421</span>      return outputFileSize;<a name="line.2421"></a>
-<span class="sourceLineNo">2422</span>    }<a name="line.2422"></a>
-<span class="sourceLineNo">2423</span><a name="line.2423"></a>
-<span class="sourceLineNo">2424</span>    @Override<a name="line.2424"></a>
-<span class="sourceLineNo">2425</span>    public List&lt;Path&gt; getCommittedFiles() {<a name="line.2425"></a>
-<span class="sourceLineNo">2426</span>      return committedFiles;<a name="line.2426"></a>
-<span class="sourceLineNo">2427</span>    }<a name="line.2427"></a>
-<span class="sourceLineNo">2428</span><a name="line.2428"></a>
-<span class="sourceLineNo">2429</span>    /**<a name="line.2429"></a>
-<span class="sourceLineNo">2430</span>     * Similar to commit, but called in secondary region replicas for replaying the<a name="line.2430"></a>
-<span class="sourceLineNo">2431</span>     * flush cache from primary region. Adds the new files to the store, and drops the<a name="line.2431"></a>
-<span class="sourceLineNo">2432</span>     * snapshot depending on dropMemstoreSnapshot argument.<a name="line.2432"></a>
-<span class="sourceLineNo">2433</span>     * @param fileNames names of the flushed files<a name="line.2433"></a>
-<span class="sourceLineNo">2434</span>     * @param dropMemstoreSnapshot whether to drop the prepared memstore snapshot<a name="line.2434"></a>
-<span class="sourceLineNo">2435</span>     */<a name="line.2435"></a>
-<span class="sourceLineNo">2436</span>    @Override<a name="line.2436"></a>
-<span class="sourceLineNo">2437</span>    public void replayFlush(List&lt;String&gt; fileNames, boolean dropMemstoreSnapshot)<a name="line.2437"></a>
-<span class="sourceLineNo">2438</span>        throws IOException {<a name="line.2438"></a>
-<span class="sourceLineNo">2439</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(fileNames.size());<a name="line.2439"></a>
-<span class="sourceLineNo">2440</span>      for (String file : fileNames) {<a name="line.2440"></a>
-<span class="sourceLineNo">2441</span>        // open the file as a store file (hfile link, etc)<a name="line.2441"></a>
-<span class="sourceLineNo">2442</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file);<a name="line.2442"></a>
-<span class="sourceLineNo">2443</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.2443"></a>
-<span class="sourceLineNo">2444</span>        storeFiles.add(storeFile);<a name="line.2444"></a>
-<span class="sourceLineNo">2445</span>        HStore.this.storeSize.addAndGet(storeFile.getReader().length());<a name="line.2445"></a>
-<span class="sourceLineNo">2446</span>        HStore.this.totalUncompressedBytes<a name="line.2446"></a>
-<span class="sourceLineNo">2447</span>            .addAndGet(storeFile.getReader().getTotalUncompressedBytes());<a name="line.2447"></a>
-<span class="sourceLineNo">2448</span>        if (LOG.isInfoEnabled()) {<a name="line.2448"></a>
-<span class="sourceLineNo">2449</span>          LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() +<a name="line.2449"></a>
-<span class="sourceLineNo">2450</span>            " added " + storeFile + ", entries=" + storeFile.getReader().getEntries() +<a name="line.2450"></a>
-<span class="sourceLineNo">2451</span>              ", sequenceid=" + storeFile.getReader().getSequenceID() + ", filesize="<a name="line.2451"></a>
-<span class="sourceLineNo">2452</span>              + TraditionalBinaryPrefix.long2String(storeFile.getReader().length(), "", 1));<a name="line.2452"></a>
-<span class="sourceLineNo">2453</span>        }<a name="line.2453"></a>
-<span class="sourceLineNo">2454</span>      }<a name="line.2454"></a>
-<span class="sourceLineNo">2455</span><a name="line.2455"></a>
-<span class="sourceLineNo">2456</span>      long snapshotId = -1; // -1 means do not drop<a name="line.2456"></a>
-<span class="sourceLineNo">2457</span>      if (dropMemstoreSnapshot &amp;&amp; snapshot != null) {<a name="line.2457"></a>
-<span class="sourceLineNo">2458</span>        snapshotId = snapshot.getId();<a name="line.2458"></a>
-<span class="sourceLineNo">2459</span>        snapshot.close();<a name="line.2459"></a>
-<span class="sourceLineNo">2460</span>      }<a name="line.2460"></a>
-<span class="sourceLineNo">2461</span>      HStore.this.updateStorefiles(storeFiles, snapshotId);<a name="line.2461"></a>
-<span class="sourceLineNo">2462</span>    }<a name="line.2462"></a>
-<span class="sourceLineNo">2463</span><a name="line.2463"></a>
-<span class="sourceLineNo">2464</span>    /**<a name="line.2464"></a>
-<span class="sourceLineNo">2465</span>     * Abort the snapshot preparation. Drops the snapshot if any.<a name="line.2465"></a>
-<span class="sourceLineNo">2466</span>     */<a name="line.2466"></a>
-<span class="sourceLineNo">2467</span>    @Override<a name="line.2467"></a>
-<span class="sourceLineNo">2468</span>    public void abort() throws IOException {<a name="line.2468"></a>
-<span class="sourceLineNo">2469</span>      if (snapshot != null) {<a name="line.2469"></a>
-<span class="sourceLineNo">2470</span>        //We need to close the snapshot when aborting, otherwise, the segment scanner<a name="line.2470"></a>
-<span class="sourceLineNo">2471</span>        //won't be closed. If we are using MSLAB, the chunk referenced by those scanners<a name="line.2471"></a>
-<span class="sourceLineNo">2472</span>        //can't be released, thus memory leak<a name="line.2472"></a>
-<span class="sourceLineNo">2473</span>        snapshot.close();<a name="line.2473"></a>
-<span class="sourceLineNo">2474</span>        HStore.this.updateStorefiles(Collections.emptyList(), snapshot.getId());<a name="line.2474"></a>
-<span class="sourceLineNo">2475</span>      }<a name="line.2475"></a>
-<span class="sourceLineNo">2476</span>    }<a name="line.2476"></a>
-<span class="sourceLineNo">2477</span>  }<a name="line.2477"></a>
-<span class="sourceLineNo">2478</span><a name="line.2478"></a>
-<span class="sourceLineNo">2479</span>  @Override<a name="line.2479"></a>
-<span class="sourceLineNo">2480</span>  public boolean needsCompaction() {<a name="line.2480"></a>
-<span class="sourceLineNo">2481</span>    List&lt;HStoreFile&gt; filesCompactingClone = null;<a name="line.2481"></a>
-<span class="sourceLineNo">2482</span>    synchronized (filesCompacting) {<a name="line.2482"></a>
-<span class="sourceLineNo">2483</span>      filesCompactingClone = Lists.newArrayList(filesCompacting);<a name="line.2483"></a>
-<span class="sourceLineNo">2484</span>    }<a name="line.2484"></a>
-<span class="sourceLineNo">2485</span>    return this.storeEngine.needsCompaction(filesCompactingClone);<a name="line.2485"></a>
-<span class="sourceLineNo">2486</span>  }<a name="line.2486"></a>
-<span class="sourceLineNo">2487</span><a name="line.2487"></a>
-<span class="sourceLineNo">2488</span>  /**<a name="line.2488"></a>
-<span class="sourceLineNo">2489</span>   * Used for tests.<a name="line.2489"></a>
-<span class="sourceLineNo">2490</span>   * @return cache configuration for this Store.<a name="line.2490"></a>
-<span class="sourceLineNo">2491</span>   */<a name="line.2491"></a>
-<span class="sourceLineNo">2492</span>  @VisibleForTesting<a name="line.2492"></a>
-<span class="sourceLineNo">2493</span>  public CacheConfig getCacheConfig() {<a name="line.2493"></a>
-<span class="sourceLineNo">2494</span>    return this.cacheConf;<a name="line.2494"></a>
-<span class="sourceLineNo">2495</span>  }<a name="line.2495"></a>
-<span class="sourceLineNo">2496</span><a name="line.2496"></a>
-<span class="sourceLineNo">2497</span>  public static final long FIXED_OVERHEAD =<a name="line.2497"></a>
-<span class="sourceLineNo">2498</span>      ClassSize.align(ClassSize.OBJECT + (27 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)<a name="line.2498"></a>
-<span class="sourceLineNo">2499</span>              + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));<a name="line.2499"></a>
-<span class="sourceLineNo">2500</span><a name="line.2500"></a>
-<span class="sourceLineNo">2501</span>  public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD<a name="line.2501"></a>
-<span class="sourceLineNo">2502</span>      + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK<a name="line.2502"></a>
-<span class="sourceLineNo">2503</span>      + ClassSize.CONCURRENT_SKIPLISTMAP<a name="line.2503"></a>
-<span class="sourceLineNo">2504</span>      + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + ClassSize.OBJECT<a name="line.2504"></a>
-<span class="sourceLineNo">2505</span>      + ScanInfo.FIXED_OVERHEAD);<a name="line.2505"></a>
-<span class="sourceLineNo">2506</span><a name="line.2506"></a>
-<span class="sourceLineNo">2507</span>  @Override<a name="line.2507"></a>
-<span class="sourceLineNo">2508</span>  public long heapSize() {<a name="line.2508"></a>
-<span class="sourceLineNo">2509</span>    MemStoreSize memstoreSize = this.memstore.size();<a name="line.2509"></a>
-<span class="sourceLineNo">2510</span>    return DEEP_OVERHEAD + memstoreSize.getHeapSize();<a name="line.2510"></a>
+<span class="sourceLineNo">2353</span>  private final class StoreFlusherImpl implements StoreFlushContext {<a name="line.2353"></a>
+<span class="sourceLineNo">2354</span><a name="line.2354"></a>
+<span class="sourceLineNo">2355</span>    private final FlushLifeCycleTracker tracker;<a name="line.2355"></a>
+<span class="sourceLineNo">2356</span>    private final long cacheFlushSeqNum;<a name="line.2356"></a>
+<span class="sourceLineNo">2357</span>    private MemStoreSnapshot snapshot;<a name="line.2357"></a>
+<span class="sourceLineNo">2358</span>    private List&lt;Path&gt; tempFiles;<a name="line.2358"></a>
+<span class="sourceLineNo">2359</span>    private List&lt;Path&gt; committedFiles;<a name="line.2359"></a>
+<span class="sourceLineNo">2360</span>    private long cacheFlushCount;<a name="line.2360"></a>
+<span class="sourceLineNo">2361</span>    private long cacheFlushSize;<a name="line.2361"></a>
+<span class="sourceLineNo">2362</span>    private long outputFileSize;<a name="line.2362"></a>
+<span class="sourceLineNo">2363</span><a name="line.2363"></a>
+<span class="sourceLineNo">2364</span>    private StoreFlusherImpl(long cacheFlushSeqNum, FlushLifeCycleTracker tracker) {<a name="line.2364"></a>
+<span class="sourceLineNo">2365</span>      this.cacheFlushSeqNum = cacheFlushSeqNum;<a name="line.2365"></a>
+<span class="sourceLineNo">2366</span>      this.tracker = tracker;<a name="line.2366"></a>
+<span class="sourceLineNo">2367</span>    }<a name="line.2367"></a>
+<span class="sourceLineNo">2368</span><a name="line.2368"></a>
+<span class="sourceLineNo">2369</span>    /**<a name="line.2369"></a>
+<span class="sourceLineNo">2370</span>     * This is not thread safe. The caller should have a lock on the region or the store.<a name="line.2370"></a>
+<span class="sourceLineNo">2371</span>     * If necessary, the lock can be added with the patch provided in HBASE-10087<a name="line.2371"></a>
+<span class="sourceLineNo">2372</span>     */<a name="line.2372"></a>
+<span class="sourceLineNo">2373</span>    @Override<a name="line.2373"></a>
+<span class="sourceLineNo">2374</span>    public MemStoreSize prepare() {<a name="line.2374"></a>
+<span class="sourceLineNo">2375</span>      // passing the current sequence number of the wal - to allow bookkeeping in the memstore<a name="line.2375"></a>
+<span class="sourceLineNo">2376</span>      this.snapshot = memstore.snapshot();<a name="line.2376"></a>
+<span class="sourceLineNo">2377</span>      this.cacheFlushCount = snapshot.getCellsCount();<a name="line.2377"></a>
+<span class="sourceLineNo">2378</span>      this.cacheFlushSize = snapshot.getDataSize();<a name="line.2378"></a>
+<span class="sourceLineNo">2379</span>      committedFiles = new ArrayList&lt;&gt;(1);<a name="line.2379"></a>
+<span class="sourceLineNo">2380</span>      return snapshot.getMemStoreSize();<a name="line.2380"></a>
+<span class="sourceLineNo">2381</span>    }<a name="line.2381"></a>
+<span class="sourceLineNo">2382</span><a name="line.2382"></a>
+<span class="sourceLineNo">2383</span>    @Override<a name="line.2383"></a>
+<span class="sourceLineNo">2384</span>    public void flushCache(MonitoredTask status) throws IOException {<a name="line.2384"></a>
+<span class="sourceLineNo">2385</span>      RegionServerServices rsService = region.getRegionServerServices();<a name="line.2385"></a>
+<span class="sourceLineNo">2386</span>      ThroughputController throughputController =<a name="line.2386"></a>
+<span class="sourceLineNo">2387</span>          rsService == null ? null : rsService.getFlushThroughputController();<a name="line.2387"></a>
+<span class="sourceLineNo">2388</span>      tempFiles =<a name="line.2388"></a>
+<span class="sourceLineNo">2389</span>          HStore.this.flushCache(cacheFlushSeqNum, snapshot, status, throughputController, tracker);<a name="line.2389"></a>
+<span class="sourceLineNo">2390</span>    }<a name="line.2390"></a>
+<span class="sourceLineNo">2391</span><a name="line.2391"></a>
+<span class="sourceLineNo">2392</span>    @Override<a name="line.2392"></a>
+<span class="sourceLineNo">2393</span>    public boolean commit(MonitoredTask status) throws IOException {<a name="line.2393"></a>
+<span class="sourceLineNo">2394</span>      if (CollectionUtils.isEmpty(this.tempFiles)) {<a name="line.2394"></a>
+<span class="sourceLineNo">2395</span>        return false;<a name="line.2395"></a>
+<span class="sourceLineNo">2396</span>      }<a name="line.2396"></a>
+<span class="sourceLineNo">2397</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(this.tempFiles.size());<a name="line.2397"></a>
+<span class="sourceLineNo">2398</span>      for (Path storeFilePath : tempFiles) {<a name="line.2398"></a>
+<span class="sourceLineNo">2399</span>        try {<a name="line.2399"></a>
+<span class="sourceLineNo">2400</span>          HStoreFile sf = HStore.this.commitFile(storeFilePath, cacheFlushSeqNum, status);<a name="line.2400"></a>
+<span class="sourceLineNo">2401</span>          outputFileSize += sf.getReader().length();<a name="line.2401"></a>
+<span class="sourceLineNo">2402</span>          storeFiles.add(sf);<a name="line.2402"></a>
+<span class="sourceLineNo">2403</span>        } catch (IOException ex) {<a name="line.2403"></a>
+<span class="sourceLineNo">2404</span>          LOG.error("Failed to commit store file {}", storeFilePath, ex);<a name="line.2404"></a>
+<span class="sourceLineNo">2405</span>          // Try to delete the files we have committed before.<a name="line.2405"></a>
+<span class="sourceLineNo">2406</span>          for (HStoreFile sf : storeFiles) {<a name="line.2406"></a>
+<span class="sourceLineNo">2407</span>            Path pathToDelete = sf.getPath();<a name="line.2407"></a>
+<span class="sourceLineNo">2408</span>            try {<a name="line.2408"></a>
+<span class="sourceLineNo">2409</span>              sf.deleteStoreFile();<a name="line.2409"></a>
+<span class="sourceLineNo">2410</span>            } catch (IOException deleteEx) {<a name="line.2410"></a>
+<span class="sourceLineNo">2411</span>              LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, "<a name="line.2411"></a>
+<span class="sourceLineNo">2412</span>                  + "halting {}", pathToDelete, ex);<a name="line.2412"></a>
+<span class="sourceLineNo">2413</span>              Runtime.getRuntime().halt(1);<a name="line.2413"></a>
+<span class="sourceLineNo">2414</span>            }<a name="line.2414"></a>
+<span class="sourceLineNo">2415</span>          }<a name="line.2415"></a>
+<span class="sourceLineNo">2416</span>          throw new IOException("Failed to commit the flush", ex);<a name="line.2416"></a>
+<span class="sourceLineNo">2417</span>        }<a name="line.2417"></a>
+<span class="sourceLineNo">2418</span>      }<a name="line.2418"></a>
+<span class="sourceLineNo">2419</span><a name="line.2419"></a>
+<span class="sourceLineNo">2420</span>      for (HStoreFile sf : storeFiles) {<a name="line.2420"></a>
+<span class="sourceLineNo">2421</span>        if (HStore.this.getCoprocessorHost() != null) {<a name="line.2421"></a>
+<span class="sourceLineNo">2422</span>          HStore.this.getCoprocessorHost().postFlush(HStore.this, sf, tracker);<a name="line.2422"></a>
+<span class="sourceLineNo">2423</span>        }<a name="line.2423"></a>
+<span class="sourceLineNo">2424</span>        committedFiles.add(sf.getPath());<a name="line.2424"></a>
+<span class="sourceLineNo">2425</span>      }<a name="line.2425"></a>
+<span class="sourceLineNo">2426</span><a name="line.2426"></a>
+<span class="sourceLineNo">2427</span>      HStore.this.flushedCellsCount.addAndGet(cacheFlushCount);<a name="line.2427"></a>
+<span class="sourceLineNo">2428</span>      HStore.this.flushedCellsSize.addAndGet(cacheFlushSize);<a name="line.2428"></a>
+<span class="sourceLineNo">2429</span>      HStore.this.flushedOutputFileSize.addAndGet(outputFileSize);<a name="line.2429"></a>
+<span class="sourceLineNo">2430</span><a name="line.2430"></a>
+<span class="sourceLineNo">2431</span>      // Add new file to store files.  Clear snapshot too while we have the Store write lock.<a name="line.2431"></a>
+<span class="sourceLineNo">2432</span>      return HStore.this.updateStorefiles(storeFiles, snapshot.getId());<a name="line.2432"></a>
+<span class="sourceLineNo">2433</span>    }<a name="line.2433"></a>
+<span class="sourceLineNo">2434</span><a name="line.2434"></a>
+<span class="sourceLineNo">2435</span>    @Override<a name="line.2435"></a>
+<span class="sourceLineNo">2436</span>    public long getOutputFileSize() {<a name="line.2436"></a>
+<span class="sourceLineNo">2437</span>      return outputFileSize;<a name="line.2437"></a>
+<span class="sourceLineNo">2438</span>    }<a name="line.2438"></a>
+<span class="sourceLineNo">2439</span><a name="line.2439"></a>
+<span class="sourceLineNo">2440</span>    @Override<a name="line.2440"></a>
+<span class="sourceLineNo">2441</span>    public List&lt;Path&gt; getCommittedFiles() {<a name="line.2441"></a>
+<span class="sourceLineNo">2442</span>      return committedFiles;<a name="line.2442"></a>
+<span class="sourceLineNo">2443</span>    }<a name="line.2443"></a>
+<span class="sourceLineNo">2444</span><a name="line.2444"></a>
+<span class="sourceLineNo">2445</span>    /**<a name="line.2445"></a>
+<span class="sourceLineNo">2446</span>     * Similar to commit, but called in secondary region replicas for replaying the<a name="line.2446"></a>
+<span class="sourceLineNo">2447</span>     * flush cache from primary region. Adds the new files to the store, and drops the<a name="line.2447"></a>
+<span class="sourceLineNo">2448</span>     * snapshot depending on dropMemstoreSnapshot argument.<a name="line.2448"></a>
+<span class="sourceLineNo">2449</span>     * @param fileNames names of the flushed files<a name="line.2449"></a>
+<span class="sourceLineNo">2450</span>     * @param dropMemstoreSnapshot whether to drop the prepared memstore snapshot<a name="line.2450"></a>
+<span class="sourceLineNo">2451</span>     */<a name="line.2451"></a>
+<span class="sourceLineNo">2452</span>    @Override<a name="line.2452"></a>
+<span class="sourceLineNo">2453</span>    public void replayFlush(List&lt;String&gt; fileNames, boolean dropMemstoreSnapshot)<a name="line.2453"></a>
+<span class="sourceLineNo">2454</span>        throws IOException {<a name="line.2454"></a>
+<span class="sourceLineNo">2455</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(fileNames.size());<a name="line.2455"></a>
+<span class="sourceLineNo">2456</span>      for (String file : fileNames) {<a name="line.2456"></a>
+<span class="sourceLineNo">2457</span>        // open the file as a store file (hfile link, etc)<a name="line.2457"></a>
+<span class="sourceLineNo">2458</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file);<a name="line.2458"></a>
+<span class="sourceLineNo">2459</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.2459"></a>
+<span class="sourceLineNo">2460</span>        storeFiles.add(storeFile);<a name="line.2460"></a>
+<span class="sourceLineNo">2461</span>        HStore.this.storeSize.addAndGet(storeFile.getReader().length());<a name="line.2461"></a>
+<span class="sourceLineNo">2462</span>        HStore.this.totalUncompressedBytes<a name="line.2462"></a>
+<span class="sourceLineNo">2463</span>            .addAndGet(storeFile.getReader().getTotalUncompressedBytes());<a name="line.2463"></a>
+<span class="sourceLineNo">2464</span>        if (LOG.isInfoEnabled()) {<a name="line.2464"></a>
+<span class="sourceLineNo">2465</span>          LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() +<a name="line.2465"></a>
+<span class="sourceLineNo">2466</span>            " added " + storeFile + ", entries=" + storeFile.getReader().getEntries() +<a name="line.2466"></a>
+<span class="sourceLineNo">2467</span>              ", sequenceid=" + storeFile.getReader().getSequenceID() + ", filesize="<a name="line.2467"></a>
+<span class="sourceLineNo">2468</span>              + TraditionalBinaryPrefix.long2String(storeFile.getReader().length(), "", 1));<a name="line.2468"></a>
+<span class="sourceLineNo">2469</span>        }<a name="line.2469"></a>
+<span class="sourceLineNo">2470</span>      }<a name="line.2470"></a>
+<span class="sourceLineNo">2471</span><a name="line.2471"></a>
+<span class="sourceLineNo">2472</span>      long snapshotId = -1; // -1 means do not drop<a name="line.2472"></a>
+<span class="sourceLineNo">2473</span>      if (dropMemstoreSnapshot &amp;&amp; snapshot != null) {<a name="line.2473"></a>
+<span class="sourceLineNo">2474</span>        snapshotId = snapshot.getId();<a name="line.2474"></a>
+<span class="sourceLineNo">2475</span>        snapshot.close();<a name="line.2475"></a>
+<span class="sourceLineNo">2476</span>      }<a name="line.2476"></a>
+<span class="sourceLineNo">2477</span>      HStore.this.updateStorefiles(storeFiles, snapshotId);<a name="line.2477"></a>
+<span class="sourceLineNo">2478</span>    }<a name="line.2478"></a>
+<span class="sourceLineNo">2479</span><a name="line.2479"></a>
+<span class="sourceLineNo">2480</span>    /**<a name="line.2480"></a>
+<span class="sourceLineNo">2481</span>     * Abort the snapshot preparation. Drops the snapshot if any.<a name="line.2481"></a>
+<span class="sourceLineNo">2482</span>     */<a name="line.2482"></a>
+<span class="sourceLineNo">2483</span>    @Override<a name="line.2483"></a>
+<span class="sourceLineNo">2484</span>    public void abort() throws IOException {<a name="line.2484"></a>
+<span class="sourceLineNo">2485</span>      if (snapshot != null) {<a name="line.2485"></a>
+<span class="sourceLineNo">2486</span>        //We need to close the snapshot when aborting, otherwise, the segment scanner<a name="line.2486"></a>
+<span class="sourceLineNo">2487</span>        //won't be closed. If we are using MSLAB, the chunk referenced by those scanners<a name="line.2487"></a>
+<span class="sourceLineNo">2488</span>        //can't be released, thus memory leak<a name="line.2488"></a>
+<span class="sourceLineNo">2489</span>        snapshot.close();<a name="line.2489"></a>
+<span class="sourceLineNo">2490</span>        HStore.this.updateStorefiles(Collections.emptyList(), snapshot.getId());<a name="line.2490"></a>
+<span class="sourceLineNo">2491</span>      }<a name="line.2491"></a>
+<span class="sourceLineNo">2492</span>    }<a name="line.2492"></a>
+<span class="sourceLineNo">2493</span>  }<a name="line.2493"></a>
+<span class="sourceLineNo">2494</span><a name="line.2494"></a>
+<span class="sourceLineNo">2495</span>  @Override<a name="line.2495"></a>
+<span class="sourceLineNo">2496</span>  public boolean needsCompaction() {<a name="line.2496"></a>
+<span class="sourceLineNo">2497</span>    List&lt;HStoreFile&gt; filesCompactingClone = null;<a name="line.2497"></a>
+<span class="sourceLineNo">2498</span>    synchronized (filesCompacting) {<a name="line.2498"></a>
+<span class="sourceLineNo">2499</span>      filesCompactingClone = Lists.newArrayList(filesCompacting);<a name="line.2499"></a>
+<span class="sourceLineNo">2500</span>    }<a name="line.2500"></a>
+<span class="sourceLineNo">2501</span>    return this.storeEngine.needsCompaction(filesCompactingClone);<a name="line.2501"></a>
+<span class="sourceLineNo">2502</span>  }<a name="line.2502"></a>
+<span class="sourceLineNo">2503</span><a name="line.2503"></a>
+<span class="sourceLineNo">2504</span>  /**<a name="line.2504"></a>
+<span class="sourceLineNo">2505</span>   * Used for tests.<a name="line.2505"></a>
+<span class="sourceLineNo">2506</span>   * @return cache configuration for this Store.<a name="line.2506"></a>
+<span class="sourceLineNo">2507</span>   */<a name="line.2507"></a>
+<span class="sourceLineNo">2508</span>  @VisibleForTesting<a name="line.2508"></a>
+<span class="sourceLineNo">2509</span>  public CacheConfig getCacheConfig() {<a name="line.2509"></a>
+<span class="sourceLineNo">2510</span>    return this.cacheConf;<a name="line.2510"></a>
 <span class="sourceLineNo">2511</span>  }<a name="line.2511"></a>
 <span class="sourceLineNo">2512</span><a name="line.2512"></a>
-<span class="sourceLineNo">2513</span>  @Override<a name="line.2513"></a>
-<span class="sourceLineNo">2514</span>  public CellComparator getComparator() {<a name="line.2514"></a>
-<span class="sourceLineNo">2515</span>    return comparator;<a name="line.2515"></a>
-<span class="sourceLineNo">2516</span>  }<a name="line.2516"></a>
-<span class="sourceLineNo">2517</span><a name="line.2517"></a>
-<span class="sourceLineNo">2518</span>  public ScanInfo getScanInfo() {<a name="line.2518"></a>
-<span class="sourceLineNo">2519</span>    return scanInfo;<a name="line.2519"></a>
-<span class="sourceLineNo">2520</span>  }<a name="line.2520"></a>
-<span class="sourceLineNo">2521</span><a name="line.2521"></a>
-<span class="sourceLineNo">2522</span>  /**<a name="line.2522"></a>
-<span class="sourceLineNo">2523</span>   * Set scan info, used by test<a name="line.2523"></a>
-<span class="sourceLineNo">2524</span>   * @param scanInfo new scan info to use for test<a name="line.2524"></a>
-<span class="sourceLineNo">2525</span>   */<a name="line.2525"></a>
-<span class="sourceLineNo">2526</span>  void setScanInfo(ScanInfo scanInfo) {<a name="line.2526"></a>
-<span class="sourceLineNo">2527</span>    this.scanInfo = scanInfo;<a name="line.2527"></a>
-<span class="sourceLineNo">2528</span>  }<a name="line.2528"></a>
-<span class="sourceLineNo">2529</span><a name="line.2529"></a>
-<span class="sourceLineNo">2530</span>  @Override<a name="line.2530"></a>
-<span class="sourceLineNo">2531</span>  public boolean hasTooManyStoreFiles() {<a name="line.2531"></a>
-<span class="sourceLineNo">2532</span>    return getStorefilesCount() &gt; this.blockingFileCount;<a name="line.2532"></a>
-<span class="sourceLineNo">2533</span>  }<a name="line.2533"></a>
-<span class="sourceLineNo">2534</span><a name="line.2534"></a>
-<span class="sourceLineNo">2535</span>  @Override<a name="line.2535"></a>
-<span class="sourceLineNo">2536</span>  public long getFlushedCellsCount() {<a name="line.2536"></a>
-<span class="sourceLineNo">2537</span>    return flushedCellsCount.get();<a name="line.2537"></a>
-<span class="sourceLineNo">2538</span>  }<a name="line.2538"></a>
-<span class="sourceLineNo">2539</span><a name="line.2539"></a>
-<span class="sourceLineNo">2540</span>  @Override<a name="line.2540"></a>
-<span class="sourceLineNo">2541</span>  public long getFlushedCellsSize() {<a name="line.2541"></a>
-<span class="sourceLineNo">2542</span>    return flushedCellsSize.get();<a name="line.2542"></a>
-<span class="sourceLineNo">2543</span>  }<a name="line.2543"></a>
-<span class="sourceLineNo">2544</span><a name="line.2544"></a>
-<span class="sourceLineNo">2545</span>  @Override<a name="line.2545"></a>
-<span class="sourceLineNo">2546</span>  public long getFlushedOutputFileSize() {<a name="line.2546"></a>
-<span class="sourceLineNo">2547</span>    return flushedOutputFileSize.get();<a name="line.2547"></a>
-<span class="sourceLineNo">2548</span>  }<a name="line.2548"></a>
-<span class="sourceLineNo">2549</span><a name="line.2549"></a>
-<span class="sourceLineNo">2550</span>  @Override<a name="line.2550"></a>
-<span class="sourceLineNo">2551</span>  public long getCompactedCellsCount() {<a name="line.2551"></a>
-<span class="sourceLineNo">2552</span>    return compactedCellsCount.get();<a name="line.2552"></a>
-<span class="sourceLineNo">2553</span>  }<a name="line.2553"></a>
-<span class="sourceLineNo">2554</span><a name="line.2554"></a>
-<span class="sourceLineNo">2555</span>  @Override<a name="line.2555"></a>
-<span class="sourceLineNo">2556</span>  public long getCompactedCellsSize() {<a name="line.2556"></a>
-<span class="sourceLineNo">2557</span>    return compactedCellsSize.get();<a name="line.2557"></a>
-<span class="sourceLineNo">2558</span>  }<a name="line.2558"></a>
-<span class="sourceLineNo">2559</span><a name="line.2559"></a>
-<span class="sourceLineNo">2560</span>  @Override<a name="line.2560"></a>
-<span class="sourceLineNo">2561</span>  public long getMajorCompactedCellsCount() {<a name="line.2561"></a>
-<span class="sourceLineNo">2562</span>    return majorCompactedCellsCount.get();<a name="line.2562"></a>
-<span class="sourceLineNo">2563</span>  }<a name="line.2563"></a>
-<span class="sourceLineNo">2564</span><a name="line.2564"></a>
-<span class="sourceLineNo">2565</span>  @Override<a name="line.2565"></a>
-<span class="sourceLineNo">2566</span>  public long getMajorCompactedCellsSize() {<a name="line.2566"></a>
-<span class="sourceLineNo">2567</span>    return majorCompactedCellsSize.get();<a name="line.2567"></a>
-<span class="sourceLineNo">2568</span>  }<a name="line.2568"></a>
-<span class="sourceLineNo">2569</span><a name="line.2569"></a>
-<span class="sourceLineNo">2570</span>  /**<a name="line.2570"></a>
-<span class="sourceLineNo">2571</span>   * Returns the StoreEngine that is backing this concrete implementation of Store.<a name="line.2571"></a>
-<span class="sourceLineNo">2572</span>   * @return Returns the {@link StoreEngine} object used internally inside this HStore object.<a name="line.2572"></a>
-<span class="sourceLineNo">2573</span>   */<a name="line.2573"></a>
-<span class="sourceLineNo">2574</span>  @VisibleForTesting<a name="line.2574"></a>
-<span class="sourceLineNo">2575</span>  public StoreEngine&lt;?, ?, ?, ?&gt; getStoreEngine() {<a name="line.2575"></a>
-<span class="sourceLineNo">2576</span>    return this.storeEngine;<a name="line.2576"></a>
-<span class="sourceLineNo">2577</span>  }<a name="line.2577"></a>
-<span class="sourceLineNo">2578</span><a name="line.2578"></a>
-<span class="sourceLineNo">2579</span>  protected OffPeakHours getOffPeakHours() {<a name="line.2579"></a>
-<span class="sourceLineNo">2580</span>    return this.offPeakHours;<a name="line.2580"></a>
-<span class="sourceLineNo">2581</span>  }<a name="line.2581"></a>
-<span class="sourceLineNo">2582</span><a name="line.2582"></a>
-<span class="sourceLineNo">2583</span>  /**<a name="line.2583"></a>
-<span class="sourceLineNo">2584</span>   * {@inheritDoc}<a name="line.2584"></a>
-<span class="sourceLineNo">2585</span>   */<a name="line.2585"></a>
-<span class="sourceLineNo">2586</span>  @Override<a name="line.2586"></a>
-<span class="sourceLineNo">2587</span>  public void onConfigurationChange(Configuration conf) {<a name="line.2587"></a>
-<span class="sourceLineNo">2588</span>    this.conf = new CompoundConfiguration()<a name="line.2588"></a>
-<span class="sourceLineNo">2589</span>            .add(conf)<a name="line.2589"></a>
-<span class="sourceLineNo">2590</span>            .addBytesMap(family.getValues());<a name="line.2590"></a>
-<span class="sourceLineNo">2591</span>    this.storeEngine.compactionPolicy.setConf(conf);<a name="line.2591"></a>
-<span class="sourceLineNo">2592</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.2592"></a>
+<span class="sourceLineNo">2513</span>  public static final long FIXED_OVERHEAD =<a name="line.2513"></a>
+<span class="sourceLineNo">2514</span>      ClassSize.align(ClassSize.OBJECT + (27 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)<a name="line.2514"></a>
+<span class="sourceLineNo">2515</span>              + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));<a name="line.2515"></a>
+<span class="sourceLineNo">2516</span><a name="line.2516"></a>
+<span class="sourceLineNo">2517</span>  public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD<a name="line.2517"></a>
+<span class="sourceLineNo">2518</span>      + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK<a name="line.2518"></a>
+<span class="sourceLineNo">2519</span>      + ClassSize.CONCURRENT_SKIPLISTMAP<a name="line.2519"></a>
+<span class="sourceLineNo">2520</span>      + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + ClassSize.OBJECT<a name="line.2520"></a>
+<span class="sourceLineNo">2521</span>      + ScanInfo.FIXED_OVERHEAD);<a name="line.2521"></a>
+<span class="sourceLineNo">2522</span><a name="line.2522"></a>
+<span class="sourceLineNo">2523</span>  @Override<a name="line.2523"></a>
+<span class="sourceLineNo">2524</span>  public long heapSize() {<a name="line.2524"></a>
+<span class="sourceLineNo">2525</span>    MemStoreSize memstoreSize = this.memstore.size();<a name="line.2525"></a>
+<span class="sourceLineNo">2526</span>    return DEEP_OVERHEAD + memstoreSize.getHeapSize();<a name="line.2526"></a>
+<span class="sourceLineNo">2527</span>  }<a name="line.2527"></a>
+<span class="sourceLineNo">2528</span><a name="line.2528"></a>
+<span class="sourceLineNo">2529</span>  @Override<a name="line.2529"></a>
+<span class="sourceLineNo">2530</span>  public CellComparator getComparator() {<a name="line.2530"></a>
+<span class="sourceLineNo">2531</span>    return comparator;<a name="line.2531"></a>
+<span class="sourceLineNo">2532</span>  }<a name="line.2532"></a>
+<span class="sourceLineNo">2533</span><a name="line.2533"></a>
+<span class="sourceLineNo">2534</span>  public ScanInfo getScanInfo() {<a name="line.2534"></a>
+<span class="sourceLineNo">2535</span>    return scanInfo;<a name="line.2535"></a>
+<span class="sourceLineNo">2536</span>  }<a name="line.2536"></a>
+<span class="sourceLineNo">2537</span><a name="line.2537"></a>
+<span class="sourceLineNo">2538</span>  /**<a name="line.2538"></a>
+<span class="sourceLineNo">2539</span>   * Set scan info, used by test<a name="line.2539"></a>
+<span class="sourceLineNo">2540</span>   * @param scanInfo new scan info to use for test<a name="line.2540"></a>
+<span class="sourceLineNo">2541</span>   */<a name="line.2541"></a>
+<span class="sourceLineNo">2542</span>  void setScanInfo(ScanInfo scanInfo) {<a name="line.2542"></a>
+<span class="sourceLineNo">2543</span>    this.scanInfo = scanInfo;<a name="line.2543"></a>
+<span class="sourceLineNo">2544</span>  }<a name="line.2544"></a>
+<span class="sourceLineNo">2545</span><a name="line.2545"></a>
+<span class="sourceLineNo">2546</span>  @Override<a name="line.2546"></a>
+<span class="sourceLineNo">2547</span>  public boolean hasTooManyStoreFiles() {<a name="line.2547"></a>
+<span class="sourceLineNo">2548</span>    return getStorefilesCount() &gt; this.blockingFileCount;<a name="line.2548"></a>
+<span class="sourceLineNo">2549</span>  }<a name="line.2549"></a>
+<span class="sourceLineNo">2550</span><a name="line.2550"></a>
+<span class="sourceLineNo">2551</span>  @Override<a name="line.2551"></a>
+<span class="sourceLineNo">2552</span>  public long getFlushedCellsCount() {<a name="line.2552"></a>
+<span class="sourceLineNo">2553</span>    return flushedCellsCount.get();<a name="line.2553"></a>
+<span class="sourceLineNo">2554</span>  }<a name="line.2554"></a>
+<span class="sourceLineNo">2555</span><a name="line.2555"></a>
+<span class="sourceLineNo">2556</span>  @Override<a name="line.2556"></a>
+<span class="sourceLineNo">2557</span>  public long getFlushedCellsSize() {<a name="line.2557"></a>
+<span class="sourceLineNo">2558</span>    return flushedCellsSize.get();<a name="line.2558"></a>
+<span class="sourceLineNo">2559</span>  }<a name="line.2559"></a>
+<span class="sourceLineNo">2560</span><a name="line.2560"></a>
+<span class="sourceLineNo">2561</span>  @Override<a name="line.2561"></a>
+<span class="sourceLineNo">2562</span>  public long getFlushedOutputFileSize() {<a name="line.2562"></a>
+<span class="sourceLineNo">2563</span>    return flushedOutputFileSize.get();<a name="line.2563"></a>
+<span class="sourceLineNo">2564</span>  }<a name="line.2564"></a>
+<span class="sourceLineNo">2565</span><a name="line.2565"></a>
+<span class="sourceLineNo">2566</span>  @Override<a name="line.2566"></a>
+<span class="sourceLineNo">2567</span>  public long getCompactedCellsCount() {<a name="line.2567"></a>
+<span class="sourceLineNo">2568</span>    return compactedCellsCount.get();<a name="line.2568"></a>
+<span class="sourceLineNo">2569</span>  }<a name="line.2569"></a>
+<span class="sourceLineNo">2570</span><a name="line.2570"></a>
+<span class="sourceLineNo">2571</span>  @Override<a name="line.2571"></a>
+<span class="sourceLineNo">2572</span>  public long getCompactedCellsSize() {<a name="line.2572"></a>
+<span class="sourceLineNo">2573</span>    return compactedCellsSize.get();<a name="line.2573"></a>
+<span class="sourceLineNo">2574</span>  }<a name="line.2574"></a>
+<span class="sourceLineNo">2575</span><a name="line.2575"></a>
+<span class="sourceLineNo">2576</span>  @Override<a name="line.2576"></a>
+<span class="sourceLineNo">2577</span>  public long getMajorCompactedCellsCount() {<a name="line.2577"></a>
+<span class="sourceLineNo">2578</span>    return majorCompactedCellsCount.get();<a name="line.2578"></a>
+<span class="sourceLineNo">2579</span>  }<a name="line.2579"></a>
+<span class="sourceLineNo">2580</span><a name="line.2580"></a>
+<span class="sourceLineNo">2581</span>  @Override<a name="line.2581"></a>
+<span class="sourceLineNo">2582</span>  public long getMajorCompactedCellsSize() {<a name="line.2582"></a>
+<span class="sourceLineNo">2583</span>    return majorCompactedCellsSize.get();<a name="line.2583"></a>
+<span class="sourceLineNo">2584</span>  }<a name="line.2584"></a>
+<span class="sourceLineNo">2585</span><a name="line.2585"></a>
+<span class="sourceLineNo">2586</span>  /**<a name="line.2586"></a>
+<span class="sourceLineNo">2587</span>   * Returns the StoreEngine that is backing this concrete implementation of Store.<a name="line.2587"></a>
+<span class="sourceLineNo">2588</span>   * @return Returns the {@link StoreEngine} object used internally inside this HStore object.<a name="line.2588"></a>
+<span class="sourceLineNo">2589</span>   */<a name="line.2589"></a>
+<span class="sourceLineNo">2590</span>  @VisibleForTesting<a name="line.2590"></a>
+<span class="sourceLineNo">2591</span>  public StoreEngine&lt;?, ?, ?, ?&gt; getStoreEngine() {<a name="line.2591"></a>
+<span class="sourceLineNo">2592</span>    return this.storeEngine;<a name="line.2592"></a>
 <span class="sourceLineNo">2593</span>  }<a name="line.2593"></a>
 <span class="sourceLineNo">2594</span><a name="line.2594"></a>
-<span class="sourceLineNo">2595</span>  /**<a name="line.2595"></a>
-<span class="sourceLineNo">2596</span>   * {@inheritDoc}<a name="line.2596"></a>
-<span class="sourceLineNo">2597</span>   */<a name="line.2597"></a>
-<span class="sourceLineNo">2598</span>  @Override<a name="line.2598"></a>
-<span class="sourceLineNo">2599</span>  public void registerChildren(ConfigurationManager manager) {<a name="line.2599"></a>
-<span class="sourceLineNo">2600</span>    // No children to register<a name="line.2600"></a>
-<span class="sourceLineNo">2601</span>  }<a name="line.2601"></a>
-<span class="sourceLineNo">2602</span><a name="line.2602"></a>
-<span class="sourceLineNo">2603</span>  /**<a name="line.2603"></a>
-<span class="sourceLineNo">2604</span>   * {@inheritDoc}<a name="line.2604"></a>
-<span class="sourceLineNo">2605</span>   */<a name="line.2605"></a>
-<span class="sourceLineNo">2606</span>  @Override<a name="line.2606"></a>
-<span class="sourceLineNo">2607</span>  public void deregisterChildren(ConfigurationManager manager) {<a name="line.2607"></a>
-<span class="sourceLineNo">2608</span>    // No children to deregister<a name="line.2608"></a>
+<span class="sourceLineNo">2595</span>  protected OffPeakHours getOffPeakHours() {<a name="line.2595"></a>
+<span class="sourceLineNo">2596</span>    return this.offPeakHours;<a name="line.2596"></a>
+<span class="sourceLineNo">2597</span>  }<a name="line.2597"></a>
+<span class="sourceLineNo">2598</span><a name="line.2598"></a>
+<span class="sourceLineNo">2599</span>  /**<a name="line.2599"></a>
+<span class="sourceLineNo">2600</span>   * {@inheritDoc}<a name="line.2600"></a>
+<span class="sourceLineNo">2601</span>   */<a name="line.2601"></a>
+<span class="sourceLineNo">2602</span>  @Override<a name="line.2602"></a>
+<span class="sourceLineNo">2603</span>  public void onConfigurationChange(Configuration conf) {<a name="line.2603"></a>
+<span class="sourceLineNo">2604</span>    this.conf = new CompoundConfiguration()<a name="line.2604"></a>
+<span class="sourceLineNo">2605</span>            .add(conf)<a name="line.2605"></a>
+<span class="sourceLineNo">2606</span>            .addBytesMap(family.getValues());<a name="line.2606"></a>
+<span class="sourceLineNo">2607</span>    this.storeEngine.compactionPolicy.setConf(conf);<a name="line.2607"></a>
+<span class="sourceLineNo">2608</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.2608"></a>
 <span class="sourceLineNo">2609</span>  }<a name="line.2609"></a>
 <span class="sourceLineNo">2610</span><a name="line.2610"></a>
-<span class="sourceLineNo">2611</span>  @Override<a name="line.2611"></a>
-<span class="sourceLineNo">2612</span>  public double getCompactionPressure() {<a name="line.2612"></a>
-<span class="sourceLineNo">2613</span>    return storeEngine.getStoreFileManager().getCompactionPressure();<a name="line.2613"></a>
-<span class="sourceLineNo">2614</span>  }<a name="line.2614"></a>
-<span class="sourceLineNo">2615</span><a name="line.2615"></a>
-<span class="sourceLineNo">2616</span>  @Override<a name="line.2616"></a>
-<span class="sourceLineNo">2617</span>  public boolean isPrimaryReplicaStore() {<a name="line.2617"></a>
-<span class="sourceLineNo">2618</span>    return getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID;<a name="line.2618"></a>
-<span class="sourceLineNo">2619</span>  }<a name="line.2619"></a>
-<span class="sourceLineNo">2620</span><a name="line.2620"></a>
-<span class="sourceLineNo">2621</span>  /**<a name="line.2621"></a>
-<span class="sourceLineNo">2622</span>   * Sets the store up for a region level snapshot operation.<a name="line.2622"></a>
-<span class="sourceLineNo">2623</span>   * @see #postSnapshotOperation()<a name="line.2623"></a>
-<span class="sourceLineNo">2624</span>   */<a name="line.2624"></a>
-<span class="sourceLineNo">2625</span>  public void preSnapshotOperation() {<a name="line.2625"></a>
-<span class="sourceLineNo">2626</span>    archiveLock.lock();<a name="line.2626"></a>
-<span class="sourceLineNo">2627</span>  }<a name="line.2627"></a>
-<span class="sourceLineNo">2628</span><a name="line.2628"></a>
-<span class="sourceLineNo">2629</span>  /**<a name="line.2629"></a>
-<span class="sourceLineNo">2630</span>   * Perform tasks needed after the completion of snapshot operation.<a name="line.2630"></a>
-<span class="sourceLineNo">2631</span>   * @see #preSnapshotOperation()<a name="line.2631"></a>
-<span class="sourceLineNo">2632</span>   */<a name="line.2632"></a>
-<span class="sourceLineNo">2633</span>  public void postSnapshotOperation() {<a name="line.2633"></a>
-<span class="sourceLineNo">2634</span>    archiveLock.unlock();<a name="line.2634"></a>
+<span class="sourceLineNo">2611</span>  /**<a name="line.2611"></a>
+<span class="sourceLineNo">2612</span>   * {@inheritDoc}<a name="line.2612"></a>
+<span class="sourceLineNo">2613</span>   */<a name="line.2613"></a>
+<span class="sourceLineNo">2614</span>  @Override<a name="line.2614"></a>
+<span class="sourceLineNo">2615</span>  public void registerChildren(ConfigurationManager manager) {<a name="line.2615"></a>
+<span class="sourceLineNo">2616</span>    // No children to register<a name="line.2616"></a>
+<span class="sourceLineNo">2617</span>  }<a name="line.2617"></a>
+<span class="sourceLineNo">2618</span><a name="line.2618"></a>
+<span class="sourceLineNo">2619</span>  /**<a name="line.2619"></a>
+<span class="sourceLineNo">2620</span>   * {@inheritDoc}<a name="line.2620"></a>
+<span class="sourceLineNo">2621</span>   */<a name="line.2621"></a>
+<span class="sourceLineNo">2622</span>  @Override<a name="line.2622"></a>
+<span class="sourceLineNo">2623</span>  public void deregisterChildren(ConfigurationManager manager) {<a name="line.2623"></a>
+<span class="sourceLineNo">2624</span>    // No children to deregister<a name="line.2624"></a>
+<span class="sourceLineNo">2625</span>  }<a name="line.2625"></a>
+<span class="sourceLineNo">2626</span><a name="line.2626"></a>
+<span class="sourceLineNo">2627</span>  @Override<a name="line.2627"></a>
+<span class="sourceLineNo">2628</span>  public double getCompactionPressure() {<a name="line.2628"></a>
+<span class="sourceLineNo">2629</span>    return storeEngine.getStoreFileManager().getCompactionPressure();<a name="line.2629"></a>
+<span class="sourceLineNo">2630</span>  }<a name="line.2630"></a>
+<span class="sourceLineNo">2631</span><a name="line.2631"></a>
+<span class="sourceLineNo">2632</span>  @Override<a name="line.2632"></a>
+<span class="sourceLineNo">2633</span>  public boolean isPrimaryReplicaStore() {<a name="line.2633"></a>
+<span class="sourceLineNo">2634</span>    return getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID;<a name="line.2634"></a>
 <span class="sourceLineNo">2635</span>  }<a name="line.2635"></a>
 <span class="sourceLineNo">2636</span><a name="line.2636"></a>
 <span class="sourceLineNo">2637</span>  /**<a name="line.2637"></a>
-<span class="sourceLineNo">2638</span>   * Closes and archives the compacted files under this store<a name="line.2638"></a>
-<span class="sourceLineNo">2639</span>   */<a name="line.2639"></a>
-<span class="sourceLineNo">2640</span>  public synchronized void closeAndArchiveCompactedFiles() throws IOException {<a name="line.2640"></a>
-<span class="sourceLineNo">2641</span>    // ensure other threads do not attempt to archive the same files on close()<a name="line.2641"></a>
+<span class="sourceLineNo">2638</span>   * Sets the store up for a region level snapshot operation.<a name="line.2638"></a>
+<span class="sourceLineNo">2639</span>   * @see #postSnapshotOperation()<a name="line.2639"></a>
+<span class="sourceLineNo">2640</span>   */<a name="line.2640"></a>
+<span class="sourceLineNo">2641</span>  public void preSnapshotOperation() {<a name="line.2641"></a>
 <span class="sourceLineNo">2642</span>    archiveLock.lock();<a name="line.2642"></a>
-<span class="sourceLineNo">2643</span>    try {<a name="line.2643"></a>
-<span class="sourceLineNo">2644</span>      lock.readLock().lock();<a name="line.2644"></a>
-<span class="sourceLineNo">2645</span>      Collection&lt;HStoreFile&gt; copyCompactedfiles = null;<a name="line.2645"></a>
-<span class="sourceLineNo">2646</span>      try {<a name="line.2646"></a>
-<span class="sourceLineNo">2647</span>        Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.2647"></a>
-<span class="sourceLineNo">2648</span>            this.getStoreEngine().getStoreFileManager().getCompactedfiles();<a name="line.2648"></a>
-<span class="sourceLineNo">2649</span>        if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.2649"></a>
-<span class="sourceLineNo">2650</span>          // Do a copy under read lock<a name="line.2650"></a>
-<span class="sourceLineNo">2651</span>          copyCompactedfiles = new ArrayList&lt;&gt;(compactedfiles);<a name="line.2651"></a>
-<span class="sourceLineNo">2652</span>        } else {<a name="line.2652"></a>
-<span class="sourceLineNo">2653</span>          LOG.trace("No compacted files to archive");<a name="line.2653"></a>
-<span class="sourceLineNo">2654</span>        }<a name="line.2654"></a>
-<span class="sourceLineNo">2655</span>      } finally {<a name="line.2655"></a>
-<span class="sourceLineNo">2656</span>        lock.readLock().unlock();<a name="line.2656"></a>
-<span class="sourceLineNo">2657</span>      }<a name="line.2657"></a>
-<span class="sourceLineNo">2658</span>      if (CollectionUtils.isNotEmpty(copyCompactedfiles)) {<a name="line.2658"></a>
-<span class="sourceLineNo">2659</span>        removeCompactedfiles(copyCompactedfiles);<a name="line.2659"></a>
-<span class="sourceLineNo">2660</span>      }<a name="line.2660"></a>
-<span class="sourceLineNo">2661</span>    } finally {<a name="line.2661"></a>
-<span class="sourceLineNo">2662</span>      archiveLock.unlock();<a name="line.2662"></a>
-<span class="sourceLineNo">2663</span>    }<a name="line.2663"></a>
-<span class="sourceLineNo">2664</span>  }<a name="line.2664"></a>
-<span class="sourceLineNo">2665</span><a name="line.2665"></a>
-<span class="sourceLineNo">2666</span>  /**<a name="line.2666"></a>
-<span class="sourceLineNo">2667</span>   * Archives and removes the compacted files<a name="line.2667"></a>
-<span class="sourceLineNo">2668</span>   * @param compactedfiles The compacted files in this store that are not active in reads<a name="line.2668"></a>
-<span class="sourceLineNo">2669</span>   */<a name="line.2669"></a>
-<span class="sourceLineNo">2670</span>  private void removeCompactedfiles(Collection&lt;HStoreFile&gt; compactedfiles)<a name="line.2670"></a>
-<span class="sourceLineNo">2671</span>      throws IOException {<a name="line.2671"></a>
-<span class="sourceLineNo">2672</span>    final List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2672"></a>
-<span class="sourceLineNo">2673</span>    final List&lt;Long&gt; storeFileSizes = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2673"></a>
-<span class="sourceLineNo">2674</span>    for (final HStoreFile file : compactedfiles) {<a name="line.2674"></a>
-<span class="sourceLineNo">2675</span>      synchronized (file) {<a name="line.2675"></a>
-<span class="sourceLineNo">2676</span>        try {<a name="line.2676"></a>
-<span class="sourceLineNo">2677</span>          StoreFileReader r = file.getReader();<a name="line.2677"></a>
-<span class="sourceLineNo">2678</span>          if (r == null) {<a name="line.2678"></a>
-<span class="sourceLineNo">2679</span>            LOG.debug("The file {} was closed but still not archived", file);<a name="line.2679"></a>
-<span class="sourceLineNo">2680</span>            // HACK: Temporarily re-open the reader so we can get the size of the file. Ideally,<a name="line.2680"></a>
-<span class="sourceLineNo">2681</span>            // we should know the size of an HStoreFile without having to ask the HStoreFileReader<a name="line.2681"></a>
-<span class="sourceLineNo">2682</span>            // for that.<a name="line.2682"></a>
-<span class="sourceLineNo">2683</span>            long length = getStoreFileSize(file);<a name="line.2683"></a>
-<span class="sourceLineNo">2684</span>            filesToRemove.add(file);<a name="line.2684"></a>
-<span class="sourceLineNo">2685</span>            storeFileSizes.add(length);<a name="line.2685"></a>
-<span class="sourceLineNo">2686</span>            continue;<a name="line.2686"></a>
-<span class="sourceLineNo">2687</span>          }<a name="line.2687"></a>
-<span class="sourceLineNo">2688</span><a name="line.2688"></a>
-<span class="sourceLineNo">2689</span>          if (file.isCompactedAway() &amp;&amp; !file.isReferencedInReads()) {<a name="line.2689"></a>
-<span class="sourceLineNo">2690</span>            // Even if deleting fails we need not bother as any new scanners won't be<a name="line.2690"></a>
-<span class="sourceLineNo">2691</span>            // able to use the compacted file as the status is already compactedAway<a name="line.2691"></a>
-<span class="sourceLineNo">2692</span>            LOG.trace("Closing and archiving the file {}", file);<a name="line.2692"></a>
-<span class="sourceLineNo">2693</span>            // Copy the file size before closing the reader<a name="line.2693"></a>
-<span class="sourceLineNo">2694</span>            final long length = r.length();<a name="line.2694"></a>
-<span class="sourceLineNo">2695</span>            r.close(true);<a name="line.2695"></a>
-<span class="sourceLineNo">2696</span>            // Just close and return<a name="line.2696"></a>
-<span class="sourceLineNo">2697</span>            filesToRemove.add(file);<a name="line.2697"></a>
-<span class="sourceLineNo">2698</span>            // Only add the length if we successfully added the file to `filesToRemove`<a name="line.2698"></a>
-<span class="sourceLineNo">2699</span>            storeFileSizes.add(length);<a name="line.2699"></a>
-<span class="sourceLineNo">2700</span>          } else {<a name="line.2700"></a>
-<span class="sourceLineNo">2701</span>            LOG.info("Can't archive compacted file " + file.getPath()<a name="line.2701"></a>
-<span class="sourceLineNo">2702</span>                + " because of either isCompactedAway=" + file.isCompactedAway()<a name="line.2702"></a>
-<span class="sourceLineNo">2703</span>                + " or file has reference, isReferencedInReads=" + file.isReferencedInReads()<a name="line.2703"></a>
-<span class="sourceLineNo">2704</span>                + ", refCount=" + r.getRefCount() + ", skipping for now.");<a name="line.2704"></a>
-<span class="sourceLineNo">2705</span>          }<a name="line.2705"></a>
-<span class="sourceLineNo">2706</span>        } catch (Exception e) {<a name="line.2706"></a>
-<span class="sourceLineNo">2707</span>          LOG.error("Exception while trying to close the compacted store file {}", file.getPath(),<a name="line.2707"></a>
-<span class="sourceLineNo">2708</span>              e);<a name="line.2708"></a>
-<span class="sourceLineNo">2709</span>        }<a name="line.2709"></a>
-<span class="sourceLineNo">2710</span>      }<a name="line.2710"></a>
-<span class="sourceLineNo">2711</span>    }<a name="line.2711"></a>
-<span class="sourceLineNo">2712</span>    if (this.isPrimaryReplicaStore()) {<a name="line.2712"></a>
-<span class="sourceLineNo">2713</span>      // Only the primary region is allowed to move the file to archive.<a name="line.2713"></a>
-<span class="sourceLineNo">2714</span>      // The secondary region does not move the files to archive. Any active reads from<a name="line.2714"></a>
-<span class="sourceLineNo">2715</span>      // the secondary region will still work because the file as such has active readers on it.<a name="line.2715"></a>
-<span class="sourceLineNo">2716</span>      if (!filesToRemove.isEmpty()) {<a name="line.2716"></a>
-<span class="sourceLineNo">2717</span>        LOG.debug("Moving the files {} to archive", filesToRemove);<a name="line.2717"></a>
-<span class="sourceLineNo">2718</span>        // Only if this is successful it has to be removed<a name="line.2718"></a>
-<span class="sourceLineNo">2719</span>        try {<a name="line.2719"></a>
-<span class="sourceLineNo">2720</span>          this.fs.removeStoreFiles(this.getColumnFamilyDescriptor().getNameAsString(),<a name="line.2720"></a>
-<span class="sourceLineNo">2721</span>            filesToRemove);<a name="line.2721"></a>
-<span class="sourceLineNo">2722</span>        } catch (FailedArchiveException fae) {<a name="line.2722"></a>
-<span class="sourceLineNo">2723</span>          // Even if archiving some files failed, we still need to clear out any of the<a name="line.2723"></a>
-<span class="sourceLineNo">2724</span>          // files which were successfully archived.  Otherwise we will receive a<a name="line.2724"></a>
-<span class="sourceLineNo">2725</span>          // FileNotFoundException when we attempt to re-archive them in the next go around.<a name="line.2725"></a>
-<span class="sourceLineNo">2726</span>          Collection&lt;Path&gt; failedFiles = fae.getFailedFiles();<a name="line.2726"></a>
-<span class="sourceLineNo">2727</span>          Iterator&lt;HStoreFile&gt; iter = filesToRemove.iterator();<a name="line.2727"></a>
-<span class="sourceLineNo">2728</span>          Iterator&lt;Long&gt; sizeIter = storeFileSizes.iterator();<a name="line.2728"></a>
-<span class="sourceLineNo">2729</span>          while (iter.hasNext()) {<a name="line.2729"></a>
-<span class="sourceLineNo">2730</span>            sizeIter.next();<a name="line.2730"></a>
-<span class="sourceLineNo">2731</span>            if (failedFiles.contains(iter.next().getPath())) {<a name="line.2731"></a>
-<span class="sourceLineNo">2732</span>              iter.remove();<a name="line.2732"></a>
-<span class="sourceLineNo">2733</span>              sizeIter.remove();<a name="line.2733"></a>
-<span class="sourceLineNo">2734</span>            }<a name="line.2734"></a>
-<span class="sourceLineNo">2735</span>          }<a name="line.2735"></a>
-<span class="sourceLineNo">2736</span>          if (!filesToRemove.isEmpty()) {<a name="line.2736"></a>
-<span class="sourceLineNo">2737</span>            clearCompactedfiles(filesToRemove);<a name="line.2737"></a>
-<span class="sourceLineNo">2738</span>          }<a name="line.2738"></a>
-<span class="sourceLineNo">2739</span>          throw fae;<a name="line.2739"></a>
-<span class="sourceLineNo">2740</span>        }<a name="line.2740"></a>
-<span class="sourceLineNo">2741</span>      }<a name="line.2741"></a>
-<span class="sourceLineNo">2742</span>    }<a name="line.2742"></a>
-<span class="sourceLineNo">2743</span>    if (!filesToRemove.isEmpty()) {<a name="line.2743"></a>
-<span class="sourceLineNo">2744</span>      // Clear the compactedfiles from the store file manager<a name="line.2744"></a>
-<span class="sourceLineNo">2745</span>      clearCompactedfiles(filesToRemove);<a name="line.2745"></a>
-<span class="sourceLineNo">2746</span>      // Try to send report of this archival to the Master for updating quota usage faster<a name="line.2746"></a>
-<span class="sourceLineNo">2747</span>      reportArchivedFilesForQuota(filesToRemove, storeFileSizes);<a name="line.2747"></a>
-<span class="sourceLineNo">2748</span>    }<a name="line.2748"></a>
-<span class="sourceLineNo">2749</span>  }<a name="line.2749"></a>
-<span class="sourceLineNo">2750</span><a name="line.2750"></a>
-<span class="sourceLineNo">2751</span>  /**<a name="line.2751"></a>
-<span class="sourceLineNo">2752</span>   * Computes the length of a store file without succumbing to any errors along the way. If an<a name="line.2752"></a>
-<span class="sourceLineNo">2753</span>   * error is encountered, the implementation returns {@code 0} instead of the actual size.<a name="line.2753"></a>
-<span class="sourceLineNo">2754</span>   *<a name="line.2754"></a>
-<span class="sourceLineNo">2755</span>   * @param file The file to compute the size of.<a name="line.2755"></a>
-<span class="sourceLineNo">2756</span>   * @return The size in bytes of the provided {@code file}.<a name="line.2756"></a>
-<span class="sourceLineNo">2757</span>   */<a name="line.2757"></a>
-<span class="sourceLineNo">2758</span>  long getStoreFileSize(HStoreFile file) {<a name="line.2758"></a>
-<span class="sourceLineNo">2759</span>    long length = 0;<a name="line.2759"></a>
-<span class="sourceLineNo">2760</span>    try {<a name="line.2760"></a>
-<span class="sourceLineNo">2761</span>      file.initReader();<a name="line.2761"></a>
-<span class="sourceLineNo">2762</span>      length = file.getReader().length();<a name="line.2762"></a>
-<span class="sourceLineNo">2763</span>    } catch (IOException e) {<a name="line.2763"></a>
-<span class="sourceLineNo">2764</span>      LOG.trace("Failed to open reader when trying to compute store file size, ignoring", e);<a name="line.2764"></a>
-<span class="sourceLineNo">2765</span>    } finally {<a name="line.2765"></a>
-<span class="sourceLineNo">2766</span>      try {<a name="line.2766"></a>
-<span class="sourceLineNo">2767</span>        file.closeStoreFile(<a name="line.2767"></a>
-<span class="sourceLineNo">2768</span>            file.getCacheConf() != null ? file.getCacheConf().shouldEvictOnClose() : true);<a name="line.2768"></a>
-<span class="sourceLineNo">2769</span>      } catch (IOException e) {<a name="line.2769"></a>
-<span class="sourceLineNo">2770</span>        LOG.trace("Failed to close reader after computing store file size, ignoring", e);<a name="line.2770"></a>
-<span class="sourceLineNo">2771</span>      }<a name="line.2771"></a>
-<span class="sourceLineNo">2772</span>    }<a name="line.2772"></a>
-<span class="sourceLineNo">2773</span>    return length;<a name="line.2773"></a>
-<span class="sourceLineNo">2774</span>  }<a name="line.2774"></a>
-<span class="sourceLineNo">2775</span><a name="line.2775"></a>
-<span class="sourceLineNo">2776</span>  public Long preFlushSeqIDEstimation() {<a name="line.2776"></a>
-<span class="sourceLineNo">2777</span>    return memstore.preFlushSeqIDEstimation();<a name="line.2777"></a>
-<span class="sourceLineNo">2778</span>  }<a name="line.2778"></a>
-<span class="sourceLineNo">2779</span><a name="line.2779"></a>
-<span class="sourceLineNo">2780</span>  @Override<a name="line.2780"></a>
-<span class="sourceLineNo">2781</span>  public boolean isSloppyMemStore() {<a name="line.2781"></a>
-<span class="sourceLineNo">2782</span>    return this.memstore.isSloppy();<a name="line.2782"></a>
-<span class="sourceLineNo">2783</span>  }<a name="line.2783"></a>
-<span class="sourceLineNo">2784</span><a name="line.2784"></a>
-<span class="sourceLineNo">2785</span>  private void clearCompactedfiles(List&lt;HStoreFile&gt; filesToRemove) throws IOException {<a name="line.2785"></a>
-<span class="sourceLineNo">2786</span>    LOG.trace("Clearing the compacted file {} from this store", filesToRemove);<a name="line.2786"></a>
-<span class="sourceLineNo">2787</span>    try {<a name="line.2787"></a>
-<span class="sourceLineNo">2788</span>      lock.writeLock().lock();<a name="line.2788"></a>
-<span class="sourceLineNo">2789</span>      this.getStoreEngine().getStoreFileManager().removeCompactedFiles(filesToRemove);<a name="line.2789"></a>
-<span class="sourceLineNo">2790</span>    } finally {<a name="line.2790"></a>
-<span class="sourceLineNo">2791</span>      lock.writeLock().unlock();<a name="line.2791"></a>
-<span class="sourceLineNo">2792</span>    }<a name="line.2792"></a>
-<span class="sourceLineNo">2793</span>  }<a name="line.2793"></a>
-<span class="sourceLineNo">2794</span><a name="line.2794"></a>
-<span class="sourceLineNo">2795</span>  void reportArchivedFilesForQuota(List&lt;? extends StoreFile&gt; archivedFiles, List&lt;Long&gt; fileSizes) {<a name="line.2795"></a>
-<span class="sourceLineNo">2796</span>    // Sanity check from the caller<a name="line.2796"></a>
-<span class="sourceLineNo">2797</span>    if (archivedFiles.size() != fileSizes.size()) {<a name="line.2797"></a>
-<span class="sourceLineNo">2798</span>      throw new RuntimeException("Coding error: should never see lists of varying size");<a name="line.2798"></a>
-<span class="sourceLineNo">2799</span>    }<a name="line.2799"></a>
-<span class="sourceLineNo">2800</span>    RegionServerServices rss = this.region.getRegionServerServices();<a name="line.2800"></a>
-<span class="sourceLineNo">2801</span>    if (rss == null) {<a name="line.2801"></a>
-<span class="sourceLineNo">2802</span>      return;<a name="line.2802"></a>
-<span class="sourceLineNo">2803</span>    }<a name="line.2803"></a>
-<span class="sourceLineNo">2804</span>    List&lt;Entry&lt;String,Long&gt;&gt; filesWithSizes = new ArrayList&lt;&gt;(archivedFiles.size());<a name="line.2804"></a>
-<span class="sourceLineNo">2805</span>    Iterator&lt;Long&gt; fileSizeIter = fileSizes.iterator();<a name="line.2805"></a>
-<span class="sourceLineNo">2806</span>    for (StoreFile storeFile : archivedFiles) {<a name="line.2806"></a>
-<span class="sourceLineNo">2807</span>      final long fileSize = fileSizeIter.next();<a name="line.2807"></a>
-<span class="sourceLineNo">2808</span>      if (storeFile.isHFile() &amp;&amp; fileSize != 0) {<a name="line.2808"></a>
-<span class="sourceLineNo">2809</span>        filesWithSizes.add(Maps.immutableEntry(storeFile.getPath().getName(), fileSize));<a name="line.2809"></a>
-<span class="sourceLineNo">2810</span>      }<a name="line.2810"></a>
-<span class="sourceLineNo">2811</span>    }<a name="line.2811"></a>
-<span class="sourceLineNo">2812</span>    if (LOG.isTraceEnabled()) {<a name="line.2812"></a>
-<span class="sourceLineNo">2813</span>      LOG.trace("Files archived: " + archivedFiles + ", reporting the following to the Master: "<a name="line.2813"></a>
-<span class="sourceLineNo">2814</span>          + filesWithSizes);<a name="line.2814"></a>
+<span class="sourceLineNo">2643</span>  }<a name="line.2643"></a>
+<span class="sourceLineNo">2644</span><a name="line.2644"></a>
+<span class="sourceLineNo">2645</span>  /**<a name="line.2645"></a>
+<span class="sourceLineNo">2646</span>   * Perform tasks needed after the completion of snapshot operation.<a name="line.2646"></a>
+<span class="sourceLineNo">2647</span>   * @see #preSnapshotOperation()<a name="line.2647"></a>
+<span class="sourceLineNo">2648</span>   */<a name="line.2648"></a>
+<span class="sourceLineNo">2649</span>  public void postSnapshotOperation() {<a name="line.2649"></a>
+<span class="sourceLineNo">2650</span>    archiveLock.unlock();<a name="line.2650"></a>
+<span class="sourceLineNo">2651</span>  }<a name="line.2651"></a>
+<span class="sourceLineNo">2652</span><a name="line.2652"></a>
+<span class="sourceLineNo">2653</span>  /**<a name="line.2653"></a>
+<span class="sourceLineNo">2654</span>   * Closes and archives the compacted files under this store<a name="line.2654"></a>
+<span class="sourceLineNo">2655</span>   */<a name="line.2655"></a>
+<span class="sourceLineNo">2656</span>  public synchronized void closeAndArchiveCompactedFiles() throws IOException {<a name="line.2656"></a>
+<span class="sourceLineNo">2657</span>    // ensure other threads do not attempt to archive the same files on close()<a name="line.2657"></a>
+<span class="sourceLineNo">2658</span>    archiveLock.lock();<a name="line.2658"></a>
+<span class="sourceLineNo">2659</span>    try {<a name="line.2659"></a>
+<span class="sourceLineNo">2660</span>      lock.readLock().lock();<a name="line.2660"></a>
+<span class="sourceLineNo">2661</span>      Collection&lt;HStoreFile&gt; copyCompactedfiles = null;<a name="line.2661"></a>
+<span class="sourceLineNo">2662</span>      try {<a name="line.2662"></a>
+<span class="sourceLineNo">2663</span>        Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.2663"></a>
+<span class="sourceLineNo">2664</span>            this.getStoreEngine().getStoreFileManager().getCompactedfiles();<a name="line.2664"></a>
+<span class="sourceLineNo">2665</span>        if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.2665"></a>
+<span class="sourceLineNo">2666</span>          // Do a copy under read lock<a name="line.2666"></a>
+<span class="sourceLineNo">2667</span>          copyCompactedfiles = new ArrayList&lt;&gt;(compactedfiles);<a name="line.2667"></a>
+<span class="sourceLineNo">2668</span>        } else {<a name="line.2668"></a>
+<span class="sourceLineNo">2669</span>          LOG.trace("No compacted files to archive");<a name="line.2669"></a>
+<span class="sourceLineNo">2670</span>        }<a name="line.2670"></a>
+<span class="sourceLineNo">2671</span>      } finally {<a name="line.2671"></a>
+<span class="sourceLineNo">2672</span>        lock.readLock().unlock();<a name="line.2672"></a>
+<span class="sourceLineNo">2673</span>      }<a name="line.2673"></a>
+<span class="sourceLineNo">2674</span>      if (CollectionUtils.isNotEmpty(copyCompactedfiles)) {<a name="line.2674"></a>
+<span class="sourceLineNo">2675</span>        removeCompactedfiles(copyCompactedfiles);<a name="line.2675"></a>
+<span class="sourceLineNo">2676</span>      }<a name="line.2676"></a>
+<span class="sourceLineNo">2677</span>    } finally {<a name="line.2677"></a>
+<span class="sourceLineNo">2678</span>      archiveLock.unlock();<a name="line.2678"></a>
+<span class="sourceLineNo">2679</span>    }<a name="line.2679"></a>
+<span class="sourceLineNo">2680</span>  }<a name="line.2680"></a>
+<span class="sourceLineNo">2681</span><a name="line.2681"></a>
+<span class="sourceLineNo">2682</span>  /**<a name="line.2682"></a>
+<span class="sourceLineNo">2683</span>   * Archives and removes the compacted files<a name="line.2683"></a>
+<span class="sourceLineNo">2684</span>   * @param compactedfiles The compacted files in this store that are not active in reads<a name="line.2684"></a>
+<span class="sourceLineNo">2685</span>   */<a name="line.2685"></a>
+<span class="sourceLineNo">2686</span>  private void removeCompactedfiles(Collection&lt;HStoreFile&gt; compactedfiles)<a name="line.2686"></a>
+<span class="sourceLineNo">2687</span>      throws IOException {<a name="line.2687"></a>
+<span class="sourceLineNo">2688</span>    final List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2688"></a>
+<span class="sourceLineNo">2689</span>    final List&lt;Long&gt; storeFileSizes = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2689"></a>
+<span class="sourceLineNo">2690</span>    for (final HStoreFile file : compactedfiles) {<a name="line.2690"></a>
+<span class="sourceLineNo">2691</span>      synchronized (file) {<a name="line.2691"></a>
+<span class="sourceLineNo">2692</span>        try {<a name="line.2692"></a>
+<span class="sourceLineNo">2693</span>          StoreFileReader r = file.getReader();<a name="line.2693"></a>
+<span class="sourceLineNo">2694</span>          if (r == null) {<a name="line.2694"></a>
+<span class="sourceLineNo">2695</span>            LOG.debug("The file {} was closed but still not archived", file);<a name="line.2695"></a>
+<span class="sourceLineNo">2696</span>            // HACK: Temporarily re-open the reader so we can get the size of the file. Ideally,<a name="line.2696"></a>
+<span class="sourceLineNo">2697</span>            // we should know the size of an HStoreFile without having to ask the HStoreFileReader<a name="line.2697"></a>
+<span class="sourceLineNo">2698</span>            // for that.<a name="line.2698"></a>
+<span class="sourceLineNo">2699</span>            long length = getStoreFileSize(file);<a name="line.2699"></a>
+<span class="sourceLineNo">2700</span>            filesToRemove.add(file);<a name="line.2700"></a>
+<span class="sourceLineNo">2701</span>            storeFileSizes.add(length);<a name="line.2701"></a>
+<span class="sourceLineNo">2702</span>            continue;<a name="line.2702"></a>
+<span class="sourceLineNo">2703</span>          }<a name="line.2703"></a>
+<span class="sourceLineNo">2704</span><a name="line.2704"></a>
+<span class="sourceLineNo">2705</span>          if (file.isCompactedAway() &amp;&amp; !file.isReferencedInReads()) {<a name="line.2705"></a>
+<span class="sourceLineNo">2706</span>            // Even if deleting fails we need not bother as any new scanners won't be<a name="line.2706"></a>
+<span class="sourceLineNo">2707</span>            // able to use the compacted file as the status is already compactedAway<a name="line.2707"></a>
+<span class="sourceLineNo">2708</span>            LOG.trace("Closing and archiving the file {}", file);<a name="line.2708"></a>
+<span class="sourceLineNo">2709</span>            // Copy the file size before closing the reader<a name="line.2709"></a>
+<span class="sourceLineNo">2710</span>            final long length = r.length();<a name="line.2710"></a>
+<span class="sourceLineNo">2711</span>            r.close(true);<a name="line.2711"></a>
+<span class="sourceLineNo">2712</span>            // Just close and return<a name="line.2712"></a>
+<span class="sourceLineNo">2713</span>            filesToRemove.add(file);<a name="line.2713"></a>
+<span class="sourceLineNo">2714</span>            // Only add the length if we successfully added the file to `filesToRemove`<a name="line.2714"></a>
+<span class="sourceLineNo">2715</span>            storeFileSizes.add(length);<a name="line.2715"></a>
+<span class="sourceLineNo">2716</span>          } else {<a name="line.2716"></a>
+<span class="sourceLineNo">2717</span>            LOG.info("Can't archive compacted file " + file.getPath()<a name="line.2717"></a>
+<span class="sourceLineNo">2718</span>                + " because of either isCompactedAway=" + file.isCompactedAway()<a name="line.2718"></a>
+<span class="sourceLineNo">2719</span>                + " or file has reference, isReferencedInReads=" + file.isReferencedInReads()<a name="line.2719"></a>
+<span class="sourceLineNo">2720</span>                + ", refCount=" + r.getRefCount() + ", skipping for now.");<a name="line.2720"></a>
+<span class="sourceLineNo">2721</span>          }<a name="line.2721"></a>
+<span class="sourceLineNo">2722</span>        } catch (Exception e) {<a name="line.2722"></a>
+<span class="sourceLineNo">2723</span>          LOG.error("Exception while trying to close the compacted store file {}", file.getPath(),<a name="line.2723"></a>
+<span class="sourceLineNo">2724</span>              e);<a name="line.2724"></a>
+<span class="sourceLineNo">2725</span>        }<a name="line.2725"></a>
+<span class="sourceLineNo">2726</span>      }<a name="line.2726"></a>
+<span class="sourceLineNo">2727</span>    }<a name="line.2727"></a>
+<span class="sourceLineNo">2728</span>    if (this.isPrimaryReplicaStore()) {<a name="line.2728"></a>
+<span class="sourceLineNo">2729</span>      // Only the primary region is allowed to move the file to archive.<a name="line.2729"></a>
+<span class="sourceLineNo">2730</span>      // The secondary region does not move the files to archive. Any active reads from<a name="line.2730"></a>
+<span class="sourceLineNo">2731</span>      // the secondary region will still work because the file as such has active readers on it.<a name="line.2731"></a>
+<span class="sourceLineNo">2732</span>      if (!filesToRemove.isEmpty()) {<a name="line.2732"></a>
+<span class="sourceLineNo">2733</span>        LOG.debug("Moving the files {} to archive", filesToRemove);<a name="line.2733"></a>
+<span class="sourceLineNo">2734</span>        // Only if this is successful it has to be removed<a name="line.2734"></a>
+<span class="sourceLineNo">2735</span>        try {<a name="line.2735"></a>
+<span class="sourceLineNo">2736</span>          this.fs.removeStoreFiles(this.getColumnFamilyDescriptor().getNameAsString(),<a name="line.2736"></a>
+<span class="sourceLineNo">2737</span>            filesToRemove);<a name="line.2737"></a>
+<span class="sourceLineNo">2738</span>        } catch (FailedArchiveException fae) {<a name="line.2738"></a>
+<span class="sourceLineNo">2739</span>          // Even if archiving some files failed, we still need to clear out any of the<a name="line.2739"></a>
+<span class="sourceLineNo">2740</span>          // files which were successfully archived.  Otherwise we will receive a<a name="line.2740"></a>
+<span class="sourceLineNo">2741</span>          // FileNotFoundException when we attempt to re-archive them in the next go around.<a name="line.2741"></a>
+<span class="sourceLineNo">2742</span>          Collection&lt;Path&gt; failedFiles = fae.getFailedFiles();<a name="line.2742"></a>
+<span class="sourceLineNo">2743</span>          Iterator&lt;HStoreFile&gt; iter = filesToRemove.iterator();<a name="line.2743"></a>
+<span class="sourceLineNo">2744</span>          Iterator&lt;Long&gt; sizeIter = storeFileSizes.iterator();<a name="line.2744"></a>
+<span class="sourceLineNo">2745</span>          while (iter.hasNext()) {<a name="line.2745"></a>
+<span class="sourceLineNo">2746</span>            sizeIter.next();<a name="line.2746"></a>
+<span class="sourceLineNo">2747</span>            if (failedFiles.contains(iter.next().getPath())) {<a name="line.2747"></a>
+<span class="sourceLineNo">2748</span>              iter.remove();<a name="line.2748"></a>
+<span class="sourceLineNo">2749</span>              sizeIter.remove();<a name="line.2749"></a>
+<span class="sourceLineNo">2750</span>            }<a name="line.2750"></a>
+<span class="sourceLineNo">2751</span>          }<a name="line.2751"></a>
+<span class="sourceLineNo">2752</span>          if (!filesToRemove.isEmpty()) {<a name="line.2752"></a>
+<span class="sourceLineNo">2753</span>            clearCompactedfiles(filesToRemove);<a name="line.2753"></a>
+<span class="sourceLineNo">2754</span>          }<a name="line.2754"></a>
+<span class="sourceLineNo">2755</span>          throw fae;<a name="line.2755"></a>
+<span class="sourceLineNo">2756</span>        }<a name="line.2756"></a>
+<span class="sourceLineNo">2757</span>      }<a name="line.2757"></a>
+<span class="sourceLineNo">2758</span>    }<a name="line.2758"></a>
+<span class="sourceLineNo">2759</span>    if (!filesToRemove.isEmpty()) {<a name="line.2759"></a>
+<span class="sourceLineNo">2760</span>      // Clear the compactedfiles from the store file manager<a name="line.2760"></a>
+<span class="sourceLineNo">2761</span>      clearCompactedfiles(filesToRemove);<a name="line.2761"></a>
+<span class="sourceLineNo">2762</span>      // Try to send report of this archival to the Master for updating quota usage faster<a name="line.2762"></a>
+<span class="sourceLineNo">2763</span>      reportArchivedFilesForQuota(filesToRemove, storeFileSizes);<a name="line.2763"></a>
+<span class="sourceLineNo">2764</span>    }<a name="line.2764"></a>
+<span class="sourceLineNo">2765</span>  }<a name="line.2765"></a>
+<span class="sourceLineNo">2766</span><a name="line.2766"></a>
+<span class="sourceLineNo">2767</span>  /**<a name="line.2767"></a>
+<span class="sourceLineNo">2768</span>   * Computes the length of a store file without succumbing to any errors along the way. If an<a name="line.2768"></a>
+<span class="sourceLineNo">2769</span>   * error is encountered, the implementation returns {@code 0} instead of the actual size.<a name="line.2769"></a>
+<span class="sourceLineNo">2770</span>   *<a name="line.2770"></a>
+<span class="sourceLineNo">2771</span>   * @param file The file to compute the size of.<a name="line.2771"></a>
+<span class="sourceLineNo">2772</span>   * @return The size in bytes of the provided {@code file}.<a name="line.2772"></a>
+<span class="sourceLineNo">2773</span>   */<a name="line.2773"></a>
+<span class="sourceLineNo">2774</span>  long getStoreFileSize(HStoreFile file) {<a name="line.2774"></a>
+<span class="sourceLineNo">2775</span>    long length = 0;<a name="line.2775"></a>
+<span class="sourceLineNo">2776</span>    try {<a name="line.2776"></a>
+<span class="sourceLineNo">2777</span>      file.initReader();<a name="line.2777"></a>
+<span class="sourceLineNo">2778</span>      length = file.getReader().length();<a name="line.2778"></a>
+<span class="sourceLineNo">2779</span>    } catch (IOException e) {<a name="line.2779"></a>
+<span class="sourceLineNo">2780</span>      LOG.trace("Failed to open reader when trying to compute store file size, ignoring", e);<a name="line.2780"></a>
+<span class="sourceLineNo">2781</span>    } finally {<a name="line.2781"></a>
+<span class="sourceLineNo">2782</span>      try {<a name="line.2782"></a>
+<span class="sourceLineNo">2783</span>        file.closeStoreFile(<a name="line.2783"></a>
+<span class="sourceLineNo">2784</span>            file.getCacheConf() != null ? file.getCacheConf().shouldEvictOnClose() : true);<a name="line.2784"></a>
+<span class="sourceLineNo">2785</span>      } catch (IOException e) {<a name="line.2785"></a>
+<span class="sourceLineNo">2786</span>        LOG.trace("Failed to close reader after computing store file size, ignoring", e);<a name="line.2786"></a>
+<span class="sourceLineNo">2787</span>      }<a name="line.2787"></a>
+<span class="sourceLineNo">2788</span>    }<a name="line.2788"></a>
+<span class="sourceLineNo">2789</span>    return length;<a name="line.2789"></a>
+<span class="sourceLineNo">2790</span>  }<a name="line.2790"></a>
+<span class="sourceLineNo">2791</span><a name="line.2791"></a>
+<span class="sourceLineNo">2792</span>  public Long preFlushSeqIDEstimation() {<a name="line.2792"></a>
+<span class="sourceLineNo">2793</span>    return memstore.preFlushSeqIDEstimation();<a name="line.2793"></a>
+<span class="sourceLineNo">2794</span>  }<a name="line.2794"></a>
+<span class="sourceLineNo">2795</span><a name="line.2795"></a>
+<span class="sourceLineNo">2796</span>  @Override<a name="line.2796"></a>
+<span class="sourceLineNo">2797</span>  public boolean isSloppyMemStore() {<a name="line.2797"></a>
+<span class="sourceLineNo">2798</span>    return this.memstore.isSloppy();<a name="line.2798"></a>
+<span class="sourceLineNo">2799</span>  }<a name="line.2799"></a>
+<span class="sourceLineNo">2800</span><a name="line.2800"></a>
+<span class="sourceLineNo">2801</span>  private void clearCompactedfiles(List&lt;HStoreFile&gt; filesToRemove) throws IOException {<a name="line.2801"></a>
+<span class="sourceLineNo">2802</span>    LOG.trace("Clearing the compacted file {} from this store", filesToRemove);<a name="line.2802"></a>
+<span class="sourceLineNo">2803</span>    try {<a name="line.2803"></a>
+<span class="sourceLineNo">2804</span>      lock.writeLock().lock();<a name="line.2804"></a>
+<span class="sourceLineNo">2805</span>      this.getStoreEngine().getStoreFileManager().removeCompactedFiles(filesToRemove);<a name="line.2805"></a>
+<span class="sourceLineNo">2806</span>    } finally {<a name="line.2806"></a>
+<span class="sourceLineNo">2807</span>      lock.writeLock().unlock();<a name="line.2807"></a>
+<span class="sourceLineNo">2808</span>    }<a name="line.2808"></a>
+<span class="sourceLineNo">2809</span>  }<a name="line.2809"></a>
+<span class="sourceLineNo">2810</span><a name="line.2810"></a>
+<span class="sourceLineNo">2811</span>  void reportArchivedFilesForQuota(List&lt;? extends StoreFile&gt; archivedFiles, List&lt;Long&gt; fileSizes) {<a name="line.2811"></a>
+<span class="sourceLineNo">2812</span>    // Sanity check from the caller<a name="line.2812"></a>
+<span class="sourceLineNo">2813</span>    if (archivedFiles.size() != fileSizes.size()) {<a name="line.2813"></a>
+<span class="sourceLineNo">2814</span>      throw new RuntimeException("Coding error: should never see lists of varying size");<a name="line.2814"></a>
 <span class="sourceLineNo">2815</span>    }<a name="line.2815"></a>
-<span class="sourceLineNo">2816</span>    boolean success = rss.reportFileArchivalForQuotas(getTableName(), filesWithSizes);<a name="line.2816"></a>
-<span class="sourceLineNo">2817</span>    if (!success) {<a name="line.2817"></a>
-<span class="sourceLineNo">2818</span>      LOG.warn("Failed to report archival of files: " + filesWithSizes);<a name="line.2818"></a>
+<span class="sourceLineNo">2816</span>    RegionServerServices rss = this.region.getRegionServerServices();<a name="line.2816"></a>
+<span class="sourceLineNo">2817</span>    if (rss == null) {<a name="line.2817"></a>
+<span class="sourceLineNo">2818</span>      return;<a name="line.2818"></a>
 <span class="sourceLineNo">2819</span>    }<a name="line.2819"></a>
-<span class="sourceLineNo">2820</span>  }<a name="line.2820"></a>
-<span class="sourceLineNo">2821</span><a name="line.2821"></a>
-<span class="sourceLineNo">2822</span>  @Override<a name="line.2822"></a>
-<span class="sourceLineNo">2823</span>  public int getCurrentParallelPutCount() {<a name="line.2823"></a>
-<span class="sourceLineNo">2824</span>    return currentParallelPutCount.get();<a name="line.2824"></a>
-<span class="sourceLineNo">2825</span>  }<a name="line.2825"></a>
-<span class="sourceLineNo">2826</span><a name="line.2826"></a>
-<span class="sourceLineNo">2827</span>  public int getStoreRefCount() {<a name="line.2827"></a>
-<span class="sourceLineNo">2828</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2828"></a>
-<span class="sourceLineNo">2829</span>      .filter(sf -&gt; sf.getReader() != null).filter(HStoreFile::isHFile)<a name="line.2829"></a>
-<span class="sourceLineNo">2830</span>      .mapToInt(HStoreFile::getRefCount).sum();<a name="line.2830"></a>
-<span class="sourceLineNo">2831</span>  }<a name="line.2831"></a>
-<span class="sourceLineNo">2832</span><a name="line.2832"></a>
-<span class="sourceLineNo">2833</span>  /**<a name="line.2833"></a>
-<span class="sourceLineNo">2834</span>   * @return get maximum ref count of storeFile among all compacted HStore Files<a name="line.2834"></a>
-<span class="sourceLineNo">2835</span>   *   for the HStore<a name="line.2835"></a>
-<span class="sourceLineNo">2836</span>   */<a name="line.2836"></a>
-<span class="sourceLineNo">2837</span>  public int getMaxCompactedStoreFileRefCount() {<a name="line.2837"></a>
-<span class="sourceLineNo">2838</span>    OptionalInt maxCompactedStoreFileRefCount = this.storeEngine.getStoreFileManager()<a name="line.2838"></a>
-<span class="sourceLineNo">2839</span>      .getCompactedfiles()<a name="line.2839"></a>
-<span class="sourceLineNo">2840</span>      .stream()<a name="line.2840"></a>
-<span class="sourceLineNo">2841</span>      .filter(sf -&gt; sf.getReader() != null)<a name="line.2841"></a>
-<span class="sourceLineNo">2842</span>      .filter(HStoreFile::isHFile)<a name="line.2842"></a>
-<span class="sourceLineNo">2843</span>      .mapToInt(HStoreFile::getRefCount)<a name="line.2843"></a>
-<span class="sourceLineNo">2844</span>      .max();<a name="line.2844"></a>
-<span class="sourceLineNo">2845</span>    return maxCompactedStoreFileRefCount.isPresent()<a name="line.2845"></a>
-<span class="sourceLineNo">2846</span>      ? maxCompactedStoreFileRefCount.getAsInt() : 0;<a name="line.2846"></a>
+<span class="sourceLineNo">2820</span>    List&lt;Entry&lt;String,Long&gt;&gt; filesWithSizes = new ArrayList&lt;&gt;(archivedFiles.size());<a name="line.2820"></a>
+<span class="sourceLineNo">2821</span>    Iterator&lt;Long&gt; fileSizeIter = fileSizes.iterator();<a name="line.2821"></a>
+<span class="sourceLineNo">2822</span>    for (StoreFile storeFile : archivedFiles) {<a name="line.2822"></a>
+<span class="sourceLineNo">2823</span>      final long fileSize = fileSizeIter.next();<a name="line.2823"></a>
+<span class="sourceLineNo">2824</span>      if (storeFile.isHFile() &amp;&amp; fileSize != 0) {<a name="line.2824"></a>
+<span class="sourceLineNo">2825</span>        filesWithSizes.add(Maps.immutableEntry(storeFile.getPath().getName(), fileSize));<a name="line.2825"></a>
+<span class="sourceLineNo">2826</span>      }<a name="line.2826"></a>
+<span class="sourceLineNo">2827</span>    }<a name="line.2827"></a>
+<span class="sourceLineNo">2828</span>    if (LOG.isTraceEnabled()) {<a name="line.2828"></a>
+<span class="sourceLineNo">2829</span>      LOG.trace("Files archived: " + archivedFiles + ", reporting the following to the Master: "<a name="line.2829"></a>
+<span class="sourceLineNo">2830</span>          + filesWithSizes);<a name="line.2830"></a>
+<span class="sourceLineNo">2831</span>    }<a name="line.2831"></a>
+<span class="sourceLineNo">2832</span>    boolean success = rss.reportFileArchivalForQuotas(getTableName(), filesWithSizes);<a name="line.2832"></a>
+<span class="sourceLineNo">2833</span>    if (!success) {<a name="line.2833"></a>
+<span class="sourceLineNo">2834</span>      LOG.warn("Failed to report archival of files: " + filesWithSizes);<a name="line.2834"></a>
+<span class="sourceLineNo">2835</span>    }<a name="line.2835"></a>
+<span class="sourceLineNo">2836</span>  }<a name="line.2836"></a>
+<span class="sourceLineNo">2837</span><a name="line.2837"></a>
+<span class="sourceLineNo">2838</span>  @Override<a name="line.2838"></a>
+<span class="sourceLineNo">2839</span>  public int getCurrentParallelPutCount() {<a name="line.2839"></a>
+<span class="sourceLineNo">2840</span>    return currentParallelPutCount.get();<a name="line.2840"></a>
+<span class="sourceLineNo">2841</span>  }<a name="line.2841"></a>
+<span class="sourceLineNo">2842</span><a name="line.2842"></a>
+<span class="sourceLineNo">2843</span>  public int getStoreRefCount() {<a name="line.2843"></a>
+<span class="sourceLineNo">2844</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2844"></a>
+<span class="sourceLineNo">2845</span>      .filter(sf -&gt; sf.getReader() != null).filter(HStoreFile::isHFile)<a name="line.2845"></a>
+<span class="sourceLineNo">2846</span>      .mapToInt(HStoreFile::getRefCount).sum();<a name="line.2846"></a>
 <span class="sourceLineNo">2847</span>  }<a name="line.2847"></a>
 <span class="sourceLineNo">2848</span><a name="line.2848"></a>
-<span class="sourceLineNo">2849</span>}<a name="line.2849"></a>
+<span class="sourceLineNo">2849</span>  /**<a name="line.2849"></a>
+<span class="sourceLineNo">2850</span>   * @return get maximum ref count of storeFile among all compacted HStore Files<a name="line.2850"></a>
+<span class="sourceLineNo">2851</span>   *   for the HStore<a name="line.2851"></a>
+<span class="sourceLineNo">2852</span>   */<a name="line.2852"></a>
+<span class="sourceLineNo">2853</span>  public int getMaxCompactedStoreFileRefCount() {<a name="line.2853"></a>
+<span class="sourceLineNo">2854</span>    OptionalInt maxCompactedStoreFileRefCount = this.storeEngine.getStoreFileManager()<a name="line.2854"></a>
+<span class="sourceLineNo">2855</span>      .getCompactedfiles()<a name="line.2855"></a>
+<span class="sourceLineNo">2856</span>      .stream()<a name="line.2856"></a>
+<span class="sourceLineNo">2857</span>      .filter(sf -&gt; sf.getReader() != null)<a name="line.2857"></a>
+<span class="sourceLineNo">2858</span>      .filter(HStoreFile::isHFile)<a name="line.2858"></a>
+<span class="sourceLineNo">2859</span>      .mapToInt(HStoreFile::getRefCount)<a name="line.2859"></a>
+<span class="sourceLineNo">2860</span>      .max();<a name="line.2860"></a>
+<span class="sourceLineNo">2861</span>    return maxCompactedStoreFileRefCount.isPresent()<a name="line.2861"></a>
+<span class="sourceLineNo">2862</span>      ? maxCompactedStoreFileRefCount.getAsInt() : 0;<a name="line.2862"></a>
+<span class="sourceLineNo">2863</span>  }<a name="line.2863"></a>
+<span class="sourceLineNo">2864</span><a name="line.2864"></a>
+<span class="sourceLineNo">2865</span>}<a name="line.2865"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
index b73c964..aae2eca 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -1118,1743 +1118,1759 @@
 <span class="sourceLineNo">1110</span>    return sf;<a name="line.1110"></a>
 <span class="sourceLineNo">1111</span>  }<a name="line.1111"></a>
 <span class="sourceLineNo">1112</span><a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>  /**<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span>   * @param compression Compression algorithm to use<a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>   * @param includesTag - includesTag or not<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>   */<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>  // compaction<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>      boolean shouldDropBehind) throws IOException {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>    final CacheConfig writerCacheConf;<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>    if (isCompaction) {<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      writerCacheConf = new CacheConfig(cacheConf);<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      final boolean cacheCompactedBlocksOnWrite =<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>        cacheConf.shouldCacheCompactedBlocksOnWrite();<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span>      // if data blocks are to be cached on write<a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>      // during compaction, we should forcefully<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>      // cache index and bloom blocks as well<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span>      if (cacheCompactedBlocksOnWrite) {<a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>        if (!cacheOnWriteLogged) {<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>          LOG.info("For Store {} , cacheCompactedBlocksOnWrite is true, hence enabled " +<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>              "cacheOnWrite for Data blocks, Index blocks and Bloom filter blocks",<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>            getColumnFamilyName());<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>          cacheOnWriteLogged = true;<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>        }<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>      } else {<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>      }<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    } else {<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>      writerCacheConf = cacheConf;<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>      if (shouldCacheDataOnWrite) {<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>        if (!cacheOnWriteLogged) {<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>          LOG.info("For Store {} , cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>            "Index blocks and Bloom filter blocks", getColumnFamilyName());<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>          cacheOnWriteLogged = true;<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>        }<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      }<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span>    }<a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>    if (region.getRegionServerServices() != null) {<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>          region.getRegionInfo().getEncodedName());<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span>    }<a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      cryptoContext);<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>        this.getFileSystem())<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>            .withOutputDir(familyTempDir)<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>            .withBloomType(family.getBloomFilterType())<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>            .withFavoredNodes(favoredNodes)<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>            .withFileContext(hFileContext)<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>    return builder.build();<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>  }<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span><a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>    if (compression == null) {<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span>    }<a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span>                                .withIncludesTags(includesTag)<a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>                                .withCompression(compression)<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>                                .withCompressTags(family.isCompressTags())<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>                                .withChecksumType(checksumType)<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>                                .withBlockSize(blocksize)<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>                                .withHBaseCheckSum(true)<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>                                .withEncryptionContext(cryptoContext)<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>                                .withColumnFamily(family.getName())<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>                                .withTableName(region.getTableDescriptor()<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>                                    .getTableName().getName())<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>                                .withCellComparator(this.comparator)<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>                                .build();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    return hFileContext;<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>  }<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span><a name="line.1201"></a>
-<span class="sourceLineNo">1202</span><a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span>  }<a name="line.1205"></a>
-<span class="sourceLineNo">1206</span><a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>  /**<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>   * @param sfs Store files<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>   * @return Whether compaction is required.<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span>   */<a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>    this.lock.writeLock().lock();<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>    try {<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      if (snapshotId &gt; 0) {<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span>      }<a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>    } finally {<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // or changing the memstore. Let us release it before calling<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      // the lock.<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>      this.lock.writeLock().unlock();<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>    }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span>    // notify to be called here - only in case of flushes<a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>    notifyChangedReadersObservers(sfs);<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>    if (LOG.isTraceEnabled()) {<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      long totalSize = getTotalSize(sfs);<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>      LOG.trace(traceMessage);<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>    }<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>    return needsCompaction();<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span>  }<a name="line.1237"></a>
-<span class="sourceLineNo">1238</span><a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>  /**<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>   * Notify all observers that set of Readers has changed.<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>   */<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  private void notifyChangedReadersObservers(List&lt;HStoreFile&gt; sfs) throws IOException {<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span>    for (ChangedReadersObserver o : this.changedReaderObservers) {<a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>      List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>      this.lock.readLock().lock();<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>      try {<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>        memStoreScanners = this.memstore.getScanners(o.getReadPoint());<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>      } finally {<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>        this.lock.readLock().unlock();<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>      }<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>      o.updateReaders(sfs, memStoreScanners);<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>    }<a name="line.1252"></a>
+<span class="sourceLineNo">1113</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    boolean shouldDropBehind) throws IOException {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>    return createWriterInTmp(maxKeyCount, compression, isCompaction, includeMVCCReadpoint,<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      includesTag, shouldDropBehind, -1);<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>  }<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span><a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>  /**<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span>   * @param compression Compression algorithm to use<a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>   * @param includesTag - includesTag or not<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>   */<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>  // compaction<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>      boolean shouldDropBehind, long totalCompactedFilesSize) throws IOException {<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>    // creating new cache config for each new writer<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span>    final CacheConfig writerCacheConf = new CacheConfig(cacheConf);<a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>    if (isCompaction) {<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>      // Cache only when total file size remains lower than configured threshold<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>      final boolean cacheCompactedBlocksOnWrite =<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>        cacheConf.shouldCacheCompactedBlocksOnWrite();<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>      // if data blocks are to be cached on write<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>      // during compaction, we should forcefully<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>      // cache index and bloom blocks as well<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>      if (cacheCompactedBlocksOnWrite &amp;&amp; totalCompactedFilesSize &lt;= cacheConf<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>        .getCacheCompactedBlocksOnWriteThreshold()) {<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>        if (!cacheOnWriteLogged) {<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>          LOG.info("For Store {} , cacheCompactedBlocksOnWrite is true, hence enabled " +<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>              "cacheOnWrite for Data blocks, Index blocks and Bloom filter blocks",<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>            getColumnFamilyName());<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>          cacheOnWriteLogged = true;<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>        }<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>      } else {<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>        if (totalCompactedFilesSize &gt; cacheConf.getCacheCompactedBlocksOnWriteThreshold()) {<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>          // checking condition once again for logging<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span>          LOG.debug(<a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>            "For Store {}, setting cacheCompactedBlocksOnWrite as false as total size of compacted "<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>              + "files - {}, is greater than cacheCompactedBlocksOnWriteThreshold - {}",<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>            getColumnFamilyName(), totalCompactedFilesSize,<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>            cacheConf.getCacheCompactedBlocksOnWriteThreshold());<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span>        }<a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      }<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>    } else {<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span>      if (shouldCacheDataOnWrite) {<a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>        if (!cacheOnWriteLogged) {<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>          LOG.info("For Store {} , cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>            "Index blocks and Bloom filter blocks", getColumnFamilyName());<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>          cacheOnWriteLogged = true;<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>        }<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>      }<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>    }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>    if (region.getRegionServerServices() != null) {<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>          region.getRegionInfo().getEncodedName());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>    }<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>      cryptoContext);<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        this.getFileSystem())<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span>            .withOutputDir(familyTempDir)<a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>            .withBloomType(family.getBloomFilterType())<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>            .withFavoredNodes(favoredNodes)<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>            .withFileContext(hFileContext)<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span>    return builder.build();<a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  }<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span><a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>    if (compression == null) {<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    }<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>                                .withIncludesTags(includesTag)<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>                                .withCompression(compression)<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>                                .withCompressTags(family.isCompressTags())<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>                                .withChecksumType(checksumType)<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>                                .withBlockSize(blocksize)<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>                                .withHBaseCheckSum(true)<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>                                .withEncryptionContext(cryptoContext)<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span>                                .withColumnFamily(family.getName())<a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>                                .withTableName(region.getTableDescriptor()<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>                                    .getTableName().getName())<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>                                .withCellComparator(this.comparator)<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span>                                .build();<a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>    return hFileContext;<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>  }<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span><a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>  }<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>  /**<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>   * @param sfs Store files<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span>   * @return Whether compaction is required.<a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>   */<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>    this.lock.writeLock().lock();<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>    try {<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>      if (snapshotId &gt; 0) {<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>      }<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>    } finally {<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      // or changing the memstore. Let us release it before calling<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>      // the lock.<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>      this.lock.writeLock().unlock();<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span>    }<a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>    // notify to be called here - only in case of flushes<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    notifyChangedReadersObservers(sfs);<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (LOG.isTraceEnabled()) {<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>      long totalSize = getTotalSize(sfs);<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>      LOG.trace(traceMessage);<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>    }<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>    return needsCompaction();<a name="line.1252"></a>
 <span class="sourceLineNo">1253</span>  }<a name="line.1253"></a>
 <span class="sourceLineNo">1254</span><a name="line.1254"></a>
 <span class="sourceLineNo">1255</span>  /**<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>   * @param cacheBlocks cache the blocks or not<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span>   * @param usePread true to use pread, false if not<a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * @param matcher the scan query matcher<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   * @param startRow the start row<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>   * @param stopRow the stop row<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>   * @param readPt the read point of the current scan<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>   * @return all scanners for this store<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>   */<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean isGet, boolean usePread,<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt)<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      throws IOException {<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    return getScanners(cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow, false,<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>      readPt);<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span>  }<a name="line.1271"></a>
-<span class="sourceLineNo">1272</span><a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>  /**<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>   * @param cacheBlocks cache the blocks or not<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>   * @param usePread true to use pread, false if not<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>   * @param matcher the scan query matcher<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>   * @param startRow the start row<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>   * @param includeStartRow true to include start row, false if not<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>   * @param stopRow the stop row<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>   * @param readPt the read point of the current scan<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>   * @return all scanners for this store<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>   */<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean usePread,<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, boolean includeStartRow,<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      byte[] stopRow, boolean includeStopRow, long readPt) throws IOException {<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    Collection&lt;HStoreFile&gt; storeFilesToScan;<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>    List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span>    this.lock.readLock().lock();<a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>    try {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>      storeFilesToScan = this.storeEngine.getStoreFileManager().getFilesForScan(startRow,<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>        includeStartRow, stopRow, includeStopRow);<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span>      memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>    } finally {<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>      this.lock.readLock().unlock();<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>    }<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span><a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>    try {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>      // First the store file scanners<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span><a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      // TODO this used to get the store files in descending order,<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>      // but now we get them in ascending order, which I think is<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>      // actually more correct, since memstore get put at the end.<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>        .getScannersForStoreFiles(storeFilesToScan, cacheBlocks, usePread, isCompaction, false,<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>          matcher, readPt);<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>      scanners.addAll(sfScanners);<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // Then the memstore scanners<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      scanners.addAll(memStoreScanners);<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>      return scanners;<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span>    } catch (Throwable t) {<a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>      clearAndClose(memStoreScanners);<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    }<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span><a name="line.1319"></a>
-<span class="sourceLineNo">1320</span>  private static void clearAndClose(List&lt;KeyValueScanner&gt; scanners) {<a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>    if (scanners == null) {<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>      return;<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>    }<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>    for (KeyValueScanner s : scanners) {<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>      s.close();<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    }<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    scanners.clear();<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>  }<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span><a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>  /**<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>   * (that happens further down the line).<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span>   * @param files the list of files on which the scanners has to be created<a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>   * @param cacheBlocks cache the blocks or not<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * @param usePread true to use pread, false if not<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>   * @param matcher the scan query matcher<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>   * @param startRow the start row<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>   * @param stopRow the stop row<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>   * @param readPt the read point of the current scan<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>   */<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>      boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span>      byte[] startRow, byte[] stopRow, long readPt, boolean includeMemstoreScanner)<a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>      throws IOException {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>    return getScanners(files, cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow,<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      false, readPt, includeMemstoreScanner);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>  }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>  /**<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span>   * (that happens further down the line).<a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>   * @param files the list of files on which the scanners has to be created<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>   * @param cacheBlocks ache the blocks or not<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>   * @param usePread true to use pread, false if not<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>   * @param matcher the scan query matcher<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>   * @param startRow the start row<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>   * @param includeStartRow true to include start row, false if not<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>   * @param stopRow the stop row<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>   * @param readPt the read point of the current scan<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>   */<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>      boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow,<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span>      boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>    List&lt;KeyValueScanner&gt; memStoreScanners = null;<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>    if (includeMemstoreScanner) {<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      this.lock.readLock().lock();<a name="line.1374"></a>
-<span class="sourceLineNo">1375</span>      try {<a name="line.1375"></a>
-<span class="sourceLineNo">1376</span>        memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      } finally {<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>        this.lock.readLock().unlock();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      }<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>    }<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>    try {<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>        .getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, false, matcher,<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span>          readPt);<a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      scanners.addAll(sfScanners);<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>      // Then the memstore scanners<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>      if (memStoreScanners != null) {<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        scanners.addAll(memStoreScanners);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>      }<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>      return scanners;<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>    } catch (Throwable t) {<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>      clearAndClose(memStoreScanners);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>    }<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>  }<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span><a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>  /**<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>   * @param o Observer who wants to know about changes in set of Readers<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>   */<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>  public void addChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>    this.changedReaderObservers.add(o);<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>  }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span><a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>  /**<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>   * @param o Observer no longer interested in changes in set of Readers.<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>   */<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>  public void deleteChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span>    // We don't check if observer present; it may not be (legitimately)<a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    this.changedReaderObservers.remove(o);<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>  }<a name="line.1411"><