hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From git-site-r...@apache.org
Subject [hbase-site] branch asf-site updated: Published site at 33f45d441385c37d5cb00a40a29d0aaa0caac7db.
Date Mon, 06 Jan 2020 14:44:10 GMT
This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 706e86a  Published site at 33f45d441385c37d5cb00a40a29d0aaa0caac7db.
706e86a is described below

commit 706e86acbbaca7a18a072f0a62423c0b8ccfffc5
Author: jenkins <builds@apache.org>
AuthorDate: Mon Jan 6 14:43:48 2020 +0000

    Published site at 33f45d441385c37d5cb00a40a29d0aaa0caac7db.
---
 acid-semantics.html                                |    2 +-
 apache_hbase_reference_guide.pdf                   |    4 +-
 book.html                                          |    2 +-
 bulk-loads.html                                    |    2 +-
 checkstyle-aggregate.html                          |   88 +-
 checkstyle.rss                                     |   16 +-
 coc.html                                           |    2 +-
 dependencies.html                                  |    2 +-
 dependency-convergence.html                        |    2 +-
 dependency-info.html                               |    2 +-
 dependency-management.html                         |    2 +-
 devapidocs/constant-values.html                    |    4 +-
 devapidocs/index-all.html                          |    2 +
 .../apache/hadoop/hbase/backup/package-tree.html   |    4 +-
 .../apache/hadoop/hbase/client/package-tree.html   |   20 +-
 .../apache/hadoop/hbase/executor/package-tree.html |    2 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |    8 +-
 .../hadoop/hbase/hbtop/field/package-tree.html     |    2 +-
 .../hadoop/hbase/hbtop/terminal/package-tree.html  |    2 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |    2 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    4 +-
 .../org/apache/hadoop/hbase/ipc/package-tree.html  |    2 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |    2 +-
 .../hbase/master/assignment/AssignmentManager.html |    6 +-
 .../assignment/class-use/RegionStateNode.html      |    2 +-
 .../hbase/master/assignment/package-tree.html      |    2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |    6 +-
 .../master/procedure/ServerCrashProcedure.html     |    6 +-
 .../hbase/master/procedure/package-tree.html       |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   14 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    6 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |    2 +-
 .../regionserver/HStore.StoreFlusherImpl.html      |   34 +-
 .../apache/hadoop/hbase/regionserver/HStore.html   |  463 +-
 .../hadoop/hbase/regionserver/package-tree.html    |   20 +-
 .../regionserver/querymatcher/package-tree.html    |    2 +-
 .../hbase/regionserver/wal/package-tree.html       |    4 +-
 .../hadoop/hbase/replication/package-tree.html     |    2 +-
 .../replication/regionserver/package-tree.html     |    2 +-
 .../hadoop/hbase/rest/model/package-tree.html      |    2 +-
 .../hadoop/hbase/security/access/package-tree.html |    6 +-
 .../apache/hadoop/hbase/security/package-tree.html |    2 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |    8 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 .../src-html/org/apache/hadoop/hbase/Version.html  |    4 +-
 ...ignmentManager.DeadServerMetricRegionChore.html |    2 +-
 .../AssignmentManager.RegionInTransitionChore.html |    2 +-
 .../AssignmentManager.RegionInTransitionStat.html  |    2 +-
 ...AssignmentManager.RegionMetaLoadingVisitor.html |    2 +-
 .../hbase/master/assignment/AssignmentManager.html |    2 +-
 .../master/procedure/ServerCrashProcedure.html     |   84 +-
 .../regionserver/HStore.StoreFlusherImpl.html      | 5202 ++++++++++----------
 .../apache/hadoop/hbase/regionserver/HStore.html   | 5202 ++++++++++----------
 downloads.html                                     |    2 +-
 export_control.html                                |    2 +-
 index.html                                         |    2 +-
 issue-tracking.html                                |    2 +-
 mail-lists.html                                    |    2 +-
 metrics.html                                       |    2 +-
 old_news.html                                      |    2 +-
 plugin-management.html                             |    2 +-
 plugins.html                                       |    2 +-
 poweredbyhbase.html                                |    2 +-
 project-info.html                                  |    2 +-
 project-reports.html                               |    2 +-
 project-summary.html                               |    2 +-
 pseudo-distributed.html                            |    2 +-
 replication.html                                   |    2 +-
 resources.html                                     |    2 +-
 source-repository.html                             |    2 +-
 sponsors.html                                      |    2 +-
 supportingprojects.html                            |    2 +-
 team-list.html                                     |    2 +-
 testdevapidocs/allclasses-frame.html               |    3 +
 testdevapidocs/allclasses-noframe.html             |    3 +
 testdevapidocs/index-all.html                      |   36 +
 .../apache/hadoop/hbase/backup/package-tree.html   |    2 +-
 .../hadoop/hbase/chaos/actions/package-tree.html   |    2 +-
 .../hadoop/hbase/class-use/HBaseClassTestRule.html |   24 +-
 .../hbase/class-use/HBaseTestingUtility.html       |   22 +-
 ...erRestartFailover.AssignmentManagerForTest.html |    2 +-
 ...TestOpenRegionProcedureHang.HMasterForTest.html |    4 +-
 ...BetweenSCPAndDTP.AssignmentManagerForTest.html} |   76 +-
 ...> TestRaceBetweenSCPAndDTP.HMasterForTest.html} |   63 +-
 ...PAndTRSP.html => TestRaceBetweenSCPAndDTP.html} |  110 +-
 .../assignment/TestRaceBetweenSCPAndTRSP.html      |    4 +-
 ...eBetweenSCPAndDTP.AssignmentManagerForTest.html |  125 +
 .../TestRaceBetweenSCPAndDTP.HMasterForTest.html   |  125 +
 .../class-use/TestRaceBetweenSCPAndDTP.html        |  125 +
 .../hbase/master/assignment/package-frame.html     |    3 +
 .../hbase/master/assignment/package-summary.html   |   86 +-
 .../hbase/master/assignment/package-tree.html      |    3 +
 .../org/apache/hadoop/hbase/package-tree.html      |   12 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    2 +-
 .../hadoop/hbase/regionserver/package-tree.html    |    4 +-
 .../org/apache/hadoop/hbase/test/package-tree.html |    2 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 testdevapidocs/overview-tree.html                  |    3 +
 ...eBetweenSCPAndDTP.AssignmentManagerForTest.html |  211 +
 .../TestRaceBetweenSCPAndDTP.HMasterForTest.html   |  211 +
 .../assignment/TestRaceBetweenSCPAndDTP.html       |  211 +
 101 files changed, 6942 insertions(+), 5847 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index a235d5b..a85ab19 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -443,7 +443,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index ccdfe3f..a4418d5 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20200104144012+00'00')
-/CreationDate (D:20200104144012+00'00')
+/ModDate (D:20200106144047+00'00')
+/CreationDate (D:20200106144047+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index 6dcb26b..5c4243a 100644
--- a/book.html
+++ b/book.html
@@ -44205,7 +44205,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2020-01-04 14:30:04 UTC
+Last updated 2020-01-06 14:30:10 UTC
 </div>
 </div>
 </body>
diff --git a/bulk-loads.html b/bulk-loads.html
index 69704ef..685f580 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -148,7 +148,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index f316f79..6527d31 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -142,7 +142,7 @@
 <th><img src="images/icon_warning_sml.gif" alt="" />&#160;Warnings</th>
 <th><img src="images/icon_error_sml.gif" alt="" />&#160;Errors</th></tr>
 <tr class="b">
-<td>4133</td>
+<td>4134</td>
 <td>0</td>
 <td>0</td>
 <td>11128</td></tr></table></div>
@@ -58081,253 +58081,253 @@
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>238</td></tr>
+<td>240</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>241</td></tr>
+<td>243</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>242</td></tr>
+<td>244</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>399</td></tr>
+<td>402</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>annotation</td>
 <td>MissingDeprecated</td>
 <td>Must include both @java.lang.Deprecated annotation and @deprecated Javadoc tag with description.</td>
-<td>522</td></tr>
+<td>525</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>annotation</td>
 <td>MissingDeprecated</td>
 <td>Must include both @java.lang.Deprecated annotation and @deprecated Javadoc tag with description.</td>
-<td>534</td></tr>
+<td>537</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>558</td></tr>
+<td>561</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 113).</td>
-<td>574</td></tr>
+<td>577</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>596</td></tr>
+<td>599</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>598</td></tr>
+<td>601</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>650</td></tr>
+<td>653</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>665</td></tr>
+<td>668</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>671</td></tr>
+<td>674</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>672</td></tr>
+<td>675</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>673</td></tr>
+<td>676</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'if' child has incorrect indentation level 6, expected level should be 8.</td>
-<td>868</td></tr>
+<td>871</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method call' child has incorrect indentation level 9, expected level should be 10.</td>
-<td>869</td></tr>
+<td>872</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method call' child has incorrect indentation level 9, expected level should be 10.</td>
-<td>870</td></tr>
+<td>873</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>873</td></tr>
+<td>876</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>881</td></tr>
+<td>884</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>993</td></tr>
+<td>996</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>999</td></tr>
+<td>1002</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1027</td></tr>
+<td>1030</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1028</td></tr>
+<td>1031</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1029</td></tr>
+<td>1032</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1081</td></tr>
+<td>1084</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1082</td></tr>
+<td>1085</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1084</td></tr>
+<td>1087</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1107</td></tr>
+<td>1110</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1197</td></tr>
+<td>1207</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1198</td></tr>
+<td>1208</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1230</td></tr>
+<td>1240</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1447</td></tr>
+<td>1457</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>1514</td></tr>
+<td>1524</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 114).</td>
-<td>1553</td></tr>
+<td>1563</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1657</td></tr>
+<td>1667</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1910</td></tr>
+<td>1920</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2113</td></tr>
+<td>2123</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2312</td></tr>
+<td>2322</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2426</td></tr>
+<td>2436</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2458</td></tr>
+<td>2468</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>2713</td></tr></table></div>
+<td>2723</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.regionserver.HStoreFile.java">org/apache/hadoop/hbase/regionserver/HStoreFile.java</h3>
 <table border="0" class="table table-striped">
@@ -88330,7 +88330,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle.rss b/checkstyle.rss
index b023b05..b59daec 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,7 +25,7 @@ under the License.
     <language>en-us</language>
     <copyright>&#169;2007 - 2020 The Apache Software Foundation</copyright>
     <item>
-      <title>File: 4133,
+      <title>File: 4134,
              Errors: 11128,
              Warnings: 0,
              Infos: 0
@@ -7494,6 +7494,20 @@ under the License.
               </tr>
                           <tr>
                 <td>
+                  <a href="https://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.assignment.TestRaceBetweenSCPAndDTP.java">org/apache/hadoop/hbase/master/assignment/TestRaceBetweenSCPAndDTP.java</a>
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+              </tr>
+                          <tr>
+                <td>
                   <a href="https://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.security.access.AuthManager.java">org/apache/hadoop/hbase/security/access/AuthManager.java</a>
                 </td>
                 <td>
diff --git a/coc.html b/coc.html
index 6bc7446..ad6f492 100644
--- a/coc.html
+++ b/coc.html
@@ -217,7 +217,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index d887c9a..3a65d6f 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -289,7 +289,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 9ce9214..ad56fee 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -534,7 +534,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index d6ea7f2..f26f0bd 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -170,7 +170,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index dd90a6e..63cd2c7 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -898,7 +898,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-04</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-01-06</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 910b681..8402767 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -4102,14 +4102,14 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Sat Jan  4 14:36:01 UTC 2020"</code></td>
+<td class="colLast"><code>"Mon Jan  6 14:36:43 UTC 2020"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"ab9766599dfae624330631ca6352d8a00bc2e607"</code></td>
+<td class="colLast"><code>"33f45d441385c37d5cb00a40a29d0aaa0caac7db"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index cc95557..1c38f3c 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -8803,6 +8803,8 @@
 <dd>
 <div class="block">Whether to cache-on-write compound Bloom filter chunks</div>
 </dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HStore.html#cacheOnWriteLogged">cacheOnWriteLogged</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/CopyTable.html#cacheRow">cacheRow</a></span> - Variable in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/CopyTable.html" title="class in org.apache.hadoop.hbase.mapreduce">CopyTable</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/AsyncTableResultScanner.html#cacheSize">cacheSize</a></span> - Variable in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/AsyncTableResultScanner.html" title="class in org.apache.hadoop.hbase.client">AsyncTableResultScanner</a></dt>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 4e23761..f8bc810 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,9 +167,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 2b9ed88..ddedef9 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -426,21 +426,21 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index 880a54a..0192935 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 8c3e163..609a4a5 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -190,13 +190,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
index d9bf09d..cd793c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
@@ -92,8 +92,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/Field.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">Field</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
index b8f3866..007e90f 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
@@ -107,8 +107,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/KeyPress.Type.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">KeyPress.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index 16404f4..cdd6240 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -140,8 +140,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 604b327..ee9fda4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -307,9 +307,9 @@
 <ul>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 36f4cc5..dbaf190 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -358,8 +358,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index d7ec268..3b4d03f 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -296,9 +296,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
index dd588a9..8ba4b7e 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
@@ -766,7 +766,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </td>
 </tr>
 <tr id="i79" class="rowColor">
-<td class="colFirst"><code>(package private) void</code></td>
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/master/assignment/AssignmentManager.html#regionClosedAbnormally-org.apache.hadoop.hbase.master.assignment.RegionStateNode-">regionClosedAbnormally</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/RegionStateNode.html" title="class in org.apache.hadoop.hbase.master.assignment">RegionStateNode</a>&nbsp;regionNode)</code>&nbsp;</td>
 </tr>
 <tr id="i80" class="altColor">
@@ -2538,8 +2538,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>regionClosedAbnormally</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html#line.1767">regionClosedAbnormally</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/RegionStateNode.html" title="class in org.apache.hadoop.hbase.master.assignment">RegionStateNode</a>&nbsp;regionNode)
-                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html#line.1767">regionClosedAbnormally</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/RegionStateNode.html" title="class in org.apache.hadoop.hbase.master.assignment">RegionStateNode</a>&nbsp;regionNode)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStateNode.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStateNode.html
index 2197b51..ca20203 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStateNode.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/class-use/RegionStateNode.html
@@ -356,7 +356,7 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>(package private) void</code></td>
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">AssignmentManager.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/master/assignment/AssignmentManager.html#regionClosedAbnormally-org.apache.hadoop.hbase.master.assignment.RegionStateNode-">regionClosedAbnormally</a></span>(<a href="../../../../../../../org/apache/hadoop/hbase/master/assignment/RegionStateNode.html" title="class in org.apache.hadoop.hbase.master.assignment">RegionStateNode</a> [...]
 </tr>
 <tr class="altColor">
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
index f8add9f..5c89e3f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
@@ -151,8 +151,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/ServerState.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">ServerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/ServerState.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">ServerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 40534d5..b0525e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -362,12 +362,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
index b1cd713..411703c 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
@@ -1046,7 +1046,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/S
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcedureMetrics</h4>
-<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/procedure2/ProcedureMetrics.html" title="interface in org.apache.hadoop.hbase.procedure2">ProcedureMetrics</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.501">getProcedureMetrics</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEn [...]
+<pre>protected&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/procedure2/ProcedureMetrics.html" title="interface in org.apache.hadoop.hbase.procedure2">ProcedureMetrics</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.513">getProcedureMetrics</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEn [...]
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getProcedureMetrics-TEnvironment-">Procedure</a></code></span></div>
 <div class="block">Override this method to provide procedure specific counters for submitted count, failed
  count and time histogram.</div>
@@ -1066,7 +1066,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/S
 <ul class="blockList">
 <li class="blockList">
 <h4>holdLock</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.506">holdLock</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env)</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.518">holdLock</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#holdLock-TEnvironment-">Procedure</a></code></span></div>
 <div class="block">Used to keep the procedure lock even when the procedure is yielding or suspended.</div>
 <dl>
@@ -1083,7 +1083,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/master/procedure/S
 <ul class="blockListLast">
 <li class="blockList">
 <h4>updateProgress</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.510">updateProgress</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html#line.522">updateProgress</a>(<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.html" title="class in org.apache.hadoop.hbase.master.procedure">MasterProcedureEnv</a>&nbsp;env,
                                   long&nbsp;parentId)</pre>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 75c27d3..4d619ce 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -221,9 +221,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index fcce9cf..9f369c8 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -430,19 +430,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index e63a243..38b2ce9 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 63b58e8..826ebf0 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -243,9 +243,9 @@
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index 80d12eb..3f80a38 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2328">HStore.StoreFlusherImpl</a>
+<pre>private final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2338">HStore.StoreFlusherImpl</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a></pre>
 </li>
@@ -279,7 +279,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>tracker</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2330">tracker</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2340">tracker</a></pre>
 </li>
 </ul>
 <a name="cacheFlushSeqNum">
@@ -288,7 +288,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushSeqNum</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2331">cacheFlushSeqNum</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2341">cacheFlushSeqNum</a></pre>
 </li>
 </ul>
 <a name="snapshot">
@@ -297,7 +297,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2332">snapshot</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2342">snapshot</a></pre>
 </li>
 </ul>
 <a name="tempFiles">
@@ -306,7 +306,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>tempFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2333">tempFiles</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2343">tempFiles</a></pre>
 </li>
 </ul>
 <a name="committedFiles">
@@ -315,7 +315,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>committedFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2334">committedFiles</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2344">committedFiles</a></pre>
 </li>
 </ul>
 <a name="cacheFlushCount">
@@ -324,7 +324,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushCount</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2335">cacheFlushCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2345">cacheFlushCount</a></pre>
 </li>
 </ul>
 <a name="cacheFlushSize">
@@ -333,7 +333,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>cacheFlushSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2336">cacheFlushSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2346">cacheFlushSize</a></pre>
 </li>
 </ul>
 <a name="outputFileSize">
@@ -342,7 +342,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>outputFileSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2337">outputFileSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2347">outputFileSize</a></pre>
 </li>
 </ul>
 </li>
@@ -359,7 +359,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StoreFlusherImpl</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2339">StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2349">StoreFlusherImpl</a>(long&nbsp;cacheFlushSeqNum,
                          <a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a>&nbsp;tracker)</pre>
 </li>
 </ul>
@@ -377,7 +377,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>prepare</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2349">prepare</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2359">prepare</a>()</pre>
 <div class="block">This is not thread safe. The caller should have a lock on the region or the store.
  If necessary, the lock can be added with the patch provided in HBASE-10087</div>
 <dl>
@@ -394,7 +394,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>flushCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2359">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2369">flushCache</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#flushCache-org.apache.hadoop.hbase.monitoring.MonitoredTask-">StoreFlushContext</a></code></span></div>
 <div class="block">Flush the cache (create the new store file)
@@ -415,7 +415,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>commit</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2368">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2378">commit</a>(<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#commit-org.apache.hadoop.hbase.monitoring.MonitoredTask-">StoreFlushContext</a></code></span></div>
 <div class="block">Commit the flush - add the store file to the store and clear the
@@ -440,7 +440,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputFileSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2411">getOutputFileSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2421">getOutputFileSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#getOutputFileSize--">getOutputFileSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a></code></dd>
@@ -455,7 +455,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>getCommittedFiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2416">getCommittedFiles</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2426">getCommittedFiles</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html#getCommittedFiles--">StoreFlushContext</a></code></span></div>
 <div class="block">Returns the newly committed files from the flush. Called only if commit returns true</div>
 <dl>
@@ -472,7 +472,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockList">
 <li class="blockList">
 <h4>replayFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2429">replayFlush</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2439">replayFlush</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fileNames,
                         boolean&nbsp;dropMemstoreSnapshot)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Similar to commit, but called in secondary region replicas for replaying the
@@ -495,7 +495,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlu
 <ul class="blockListLast">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2461">abort</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html#line.2471">abort</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Abort the snapshot preparation. Drops the snapshot if any.</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index cf3de71..d992ab8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -200,180 +200,184 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#cacheConf">cacheConf</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><code>private boolean</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#cacheOnWriteLogged">cacheOnWriteLogged</a></span></code>&nbsp;</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#changedReaderObservers">changedReaderObservers</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#checksumType">checksumType</a></span></code>
 <div class="block">Checksum configuration</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#closeCheckInterval">closeCheckInterval</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY">COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#compactedCellsCount">compactedCellsCount</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#compactedCellsSize">compactedCellsSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#compactionCheckMultiplier">compactionCheckMultiplier</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#comparator">comparator</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected org.apache.hadoop.conf.Configuration</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#conf">conf</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/io/crypto/Encryption.Context.html" title="class in org.apache.hadoop.hbase.io.crypto">Encryption.Context</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#cryptoContext">cryptoContext</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#currentParallelPutCount">currentParallelPutCount</a></span></code>
 <div class="block">Use this counter to track concurrent puts.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#dataBlockEncoder">dataBlockEncoder</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#DEEP_OVERHEAD">DEEP_OVERHEAD</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#DEFAULT_BLOCK_STORAGE_POLICY">DEFAULT_BLOCK_STORAGE_POLICY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#DEFAULT_BLOCKING_STOREFILE_COUNT">DEFAULT_BLOCKING_STOREFILE_COUNT</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER">DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#DEFAULT_FLUSH_RETRIES_NUMBER">DEFAULT_FLUSH_RETRIES_NUMBER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#family">family</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#filesCompacting">filesCompacting</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#FIXED_OVERHEAD">FIXED_OVERHEAD</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushedCellsCount">flushedCellsCount</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushedCellsSize">flushedCellsSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushedOutputFileSize">flushedOutputFileSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushRetriesNumber">flushRetriesNumber</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#forceMajor">forceMajor</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#fs">fs</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#lastCompactSize">lastCompactSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#lock">lock</a></span></code>
 <div class="block">RWLock for store operations.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#majorCompactedCellsCount">majorCompactedCellsCount</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#majorCompactedCellsSize">majorCompactedCellsSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStore.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStore</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#memstore">memstore</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#MEMSTORE_CLASS_NAME">MEMSTORE_CLASS_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#offPeakCompactionTracker">offPeakCompactionTracker</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#offPeakHours">offPeakHours</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#parallelPutCountPrintThreshold">parallelPutCountPrintThreshold</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#pauseTime">pauseTime</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#region">region</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#scanInfo">scanInfo</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#storeEngine">storeEngine</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#storeSize">storeSize</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#totalUncompressedBytes">totalUncompressedBytes</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#verifyBulkLoads">verifyBulkLoads</a></span></code>&nbsp;</td>
 </tr>
@@ -1482,13 +1486,22 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.160">totalUncompressedBytes</a></pre>
 </li>
 </ul>
+<a name="cacheOnWriteLogged">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>cacheOnWriteLogged</h4>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.162">cacheOnWriteLogged</a></pre>
+</li>
+</ul>
 <a name="lock">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>lock</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.171">lock</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.173">lock</a></pre>
 <div class="block">RWLock for store operations.
  Locked in shared mode when the list of component stores is looked at:
    - all reads/writes to table data
@@ -1504,7 +1517,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>archiveLock</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.183">archiveLock</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.185">archiveLock</a></pre>
 <div class="block">Lock specific to archiving compacted store files.  This avoids races around
  the combination of retrieving the list of compacted files and moving them to
  the archive directory.  Since this is usually a background process (other than
@@ -1522,7 +1535,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyBulkLoads</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.185">verifyBulkLoads</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.187">verifyBulkLoads</a></pre>
 </li>
 </ul>
 <a name="currentParallelPutCount">
@@ -1531,7 +1544,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>currentParallelPutCount</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.192">currentParallelPutCount</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.194">currentParallelPutCount</a></pre>
 <div class="block">Use this counter to track concurrent puts. If TRACE-log is enabled, if we are over the
  threshold set by hbase.region.store.parallel.put.print.threshold (Default is 50) we will
  log a message that identifies the Store experience this high-level of concurrency.</div>
@@ -1543,7 +1556,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>parallelPutCountPrintThreshold</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.193">parallelPutCountPrintThreshold</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.195">parallelPutCountPrintThreshold</a></pre>
 </li>
 </ul>
 <a name="scanInfo">
@@ -1552,7 +1565,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>scanInfo</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.195">scanInfo</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.197">scanInfo</a></pre>
 </li>
 </ul>
 <a name="filesCompacting">
@@ -1561,7 +1574,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>filesCompacting</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.199">filesCompacting</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.201">filesCompacting</a></pre>
 </li>
 </ul>
 <a name="changedReaderObservers">
@@ -1570,7 +1583,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>changedReaderObservers</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.202">changedReaderObservers</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.204">changedReaderObservers</a></pre>
 </li>
 </ul>
 <a name="blocksize">
@@ -1579,7 +1592,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>blocksize</h4>
-<pre>protected final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.205">blocksize</a></pre>
+<pre>protected final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.207">blocksize</a></pre>
 </li>
 </ul>
 <a name="dataBlockEncoder">
@@ -1588,7 +1601,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>dataBlockEncoder</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.206">dataBlockEncoder</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.208">dataBlockEncoder</a></pre>
 </li>
 </ul>
 <a name="checksumType">
@@ -1597,7 +1610,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>checksumType</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.209">checksumType</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.211">checksumType</a></pre>
 <div class="block">Checksum configuration</div>
 </li>
 </ul>
@@ -1607,7 +1620,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>bytesPerChecksum</h4>
-<pre>protected&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.210">bytesPerChecksum</a></pre>
+<pre>protected&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.212">bytesPerChecksum</a></pre>
 </li>
 </ul>
 <a name="comparator">
@@ -1616,7 +1629,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>protected final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.213">comparator</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.215">comparator</a></pre>
 </li>
 </ul>
 <a name="storeEngine">
@@ -1625,7 +1638,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>storeEngine</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.215">storeEngine</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.217">storeEngine</a></pre>
 </li>
 </ul>
 <a name="offPeakCompactionTracker">
@@ -1634,7 +1647,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>offPeakCompactionTracker</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.217">offPeakCompactionTracker</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.219">offPeakCompactionTracker</a></pre>
 </li>
 </ul>
 <a name="offPeakHours">
@@ -1643,7 +1656,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>offPeakHours</h4>
-<pre>private volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.218">offPeakHours</a></pre>
+<pre>private volatile&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.220">offPeakHours</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_FLUSH_RETRIES_NUMBER">
@@ -1652,7 +1665,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_FLUSH_RETRIES_NUMBER</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.220">DEFAULT_FLUSH_RETRIES_NUMBER</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.222">DEFAULT_FLUSH_RETRIES_NUMBER</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HStore.DEFAULT_FLUSH_RETRIES_NUMBER">Constant Field Values</a></dd>
@@ -1665,7 +1678,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>flushRetriesNumber</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.221">flushRetriesNumber</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.223">flushRetriesNumber</a></pre>
 </li>
 </ul>
 <a name="pauseTime">
@@ -1674,7 +1687,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>pauseTime</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.222">pauseTime</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.224">pauseTime</a></pre>
 </li>
 </ul>
 <a name="blockingFileCount">
@@ -1683,7 +1696,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>blockingFileCount</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.224">blockingFileCount</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.226">blockingFileCount</a></pre>
 </li>
 </ul>
 <a name="compactionCheckMultiplier">
@@ -1692,7 +1705,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compactionCheckMultiplier</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.225">compactionCheckMultiplier</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.227">compactionCheckMultiplier</a></pre>
 </li>
 </ul>
 <a name="cryptoContext">
@@ -1701,7 +1714,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>cryptoContext</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/crypto/Encryption.Context.html" title="class in org.apache.hadoop.hbase.io.crypto">Encryption.Context</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.226">cryptoContext</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/crypto/Encryption.Context.html" title="class in org.apache.hadoop.hbase.io.crypto">Encryption.Context</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.228">cryptoContext</a></pre>
 </li>
 </ul>
 <a name="flushedCellsCount">
@@ -1710,7 +1723,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>flushedCellsCount</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.228">flushedCellsCount</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.230">flushedCellsCount</a></pre>
 </li>
 </ul>
 <a name="compactedCellsCount">
@@ -1719,7 +1732,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compactedCellsCount</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.229">compactedCellsCount</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.231">compactedCellsCount</a></pre>
 </li>
 </ul>
 <a name="majorCompactedCellsCount">
@@ -1728,7 +1741,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>majorCompactedCellsCount</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.230">majorCompactedCellsCount</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.232">majorCompactedCellsCount</a></pre>
 </li>
 </ul>
 <a name="flushedCellsSize">
@@ -1737,7 +1750,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>flushedCellsSize</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.231">flushedCellsSize</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.233">flushedCellsSize</a></pre>
 </li>
 </ul>
 <a name="flushedOutputFileSize">
@@ -1746,7 +1759,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>flushedOutputFileSize</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.232">flushedOutputFileSize</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.234">flushedOutputFileSize</a></pre>
 </li>
 </ul>
 <a name="compactedCellsSize">
@@ -1755,7 +1768,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compactedCellsSize</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.233">compactedCellsSize</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.235">compactedCellsSize</a></pre>
 </li>
 </ul>
 <a name="majorCompactedCellsSize">
@@ -1764,7 +1777,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>majorCompactedCellsSize</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.234">majorCompactedCellsSize</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.236">majorCompactedCellsSize</a></pre>
 </li>
 </ul>
 <a name="FIXED_OVERHEAD">
@@ -1773,7 +1786,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>FIXED_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2490">FIXED_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2500">FIXED_OVERHEAD</a></pre>
 </li>
 </ul>
 <a name="DEEP_OVERHEAD">
@@ -1782,7 +1795,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DEEP_OVERHEAD</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2494">DEEP_OVERHEAD</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2504">DEEP_OVERHEAD</a></pre>
 </li>
 </ul>
 </li>
@@ -1799,7 +1812,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HStore</h4>
-<pre>protected&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.244">HStore</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region,
+<pre>protected&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.246">HStore</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region,
                  <a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family,
                  org.apache.hadoop.conf.Configuration&nbsp;confParam,
                  boolean&nbsp;warmup)
@@ -1830,7 +1843,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemstore</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStore.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStore</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.346">getMemstore</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStore.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStore</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.349">getMemstore</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>MemStore Instance to use in this store.</dd>
@@ -1843,7 +1856,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createCacheConf</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.380">createCacheConf</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.383">createCacheConf</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</pre>
 <div class="block">Creates the cache config.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1857,7 +1870,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createStoreEngine</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.393">createStoreEngine</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.396">createStoreEngine</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html" title="class in org.apache.hadoop.hbase.regionserver">HStore</a>&nbsp;store,
                                                  org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                  <a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;kvComparator)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1881,7 +1894,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>determineTTLFromFamily</h4>
-<pre>public static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.402">determineTTLFromFamily</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</pre>
+<pre>public static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.405">determineTTLFromFamily</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>family</code> - </dd>
@@ -1896,7 +1909,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getColumnFamilyName</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.418">getColumnFamilyName</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.421">getColumnFamilyName</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getColumnFamilyName--">getColumnFamilyName</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -1909,7 +1922,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.423">getTableName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.426">getTableName</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTableName--">getTableName</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -1922,7 +1935,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileSystem</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.428">getFileSystem</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.431">getFileSystem</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFileSystem--">getFileSystem</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -1935,7 +1948,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionFileSystem</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.432">getRegionFileSystem</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.435">getRegionFileSystem</a>()</pre>
 </li>
 </ul>
 <a name="getStoreFileTtl--">
@@ -1944,7 +1957,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFileTtl</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.438">getStoreFileTtl</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.441">getStoreFileTtl</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html#getStoreFileTtl--">getStoreFileTtl</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreConfigInformation</a></code></dd>
@@ -1959,7 +1972,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemStoreFlushSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.444">getMemStoreFlushSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.447">getMemStoreFlushSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html#getMemStoreFlushSize--">getMemStoreFlushSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreConfigInformation</a></code></dd>
@@ -1974,7 +1987,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushableSize</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.450">getFlushableSize</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.453">getFlushableSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushableSize--">getFlushableSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -1991,7 +2004,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSnapshotSize</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.455">getSnapshotSize</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.458">getSnapshotSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSnapshotSize--">getSnapshotSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2006,7 +2019,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionCheckMultiplier</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.460">getCompactionCheckMultiplier</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.463">getCompactionCheckMultiplier</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html#getCompactionCheckMultiplier--">getCompactionCheckMultiplier</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreConfigInformation</a></code></dd>
@@ -2023,7 +2036,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockingFileCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.465">getBlockingFileCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.468">getBlockingFileCount</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreConfigInformation.html#getBlockingFileCount--">StoreConfigInformation</a></code></span></div>
 <div class="block">The number of files required before flushes for this store will be blocked.</div>
 <dl>
@@ -2038,7 +2051,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getBytesPerChecksum</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.475">getBytesPerChecksum</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.478">getBytesPerChecksum</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns the configured bytesPerChecksum value.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -2054,7 +2067,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getChecksumType</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.485">getChecksumType</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util">ChecksumType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.488">getChecksumType</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns the configured checksum algorithm.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -2070,7 +2083,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCloseCheckInterval</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.497">getCloseCheckInterval</a>()</pre>
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.500">getCloseCheckInterval</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>how many bytes to write between status checks</dd>
@@ -2083,7 +2096,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getColumnFamilyDescriptor</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.502">getColumnFamilyDescriptor</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.505">getColumnFamilyDescriptor</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getColumnFamilyDescriptor--">getColumnFamilyDescriptor</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2096,7 +2109,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxSequenceId</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.507">getMaxSequenceId</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.510">getMaxSequenceId</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMaxSequenceId--">getMaxSequenceId</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2111,7 +2124,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxMemStoreTS</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.512">getMaxMemStoreTS</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.515">getMaxMemStoreTS</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMaxMemStoreTS--">getMaxMemStoreTS</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2127,7 +2140,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.ht
 <li class="blockList">
 <h4>getStoreHomedir</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.523">getStoreHomedir</a>(org.apache.hadoop.fs.Path&nbsp;tabledir,
+public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.526">getStoreHomedir</a>(org.apache.hadoop.fs.Path&nbsp;tabledir,
                                                                     <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
                                                                     byte[]&nbsp;family)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
@@ -2148,7 +2161,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <li class="blockList">
 <h4>getStoreHomedir</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.535">getStoreHomedir</a>(org.apache.hadoop.fs.Path&nbsp;tabledir,
+public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.538">getStoreHomedir</a>(org.apache.hadoop.fs.Path&nbsp;tabledir,
                                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedName,
                                                                     byte[]&nbsp;family)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
@@ -2168,7 +2181,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncoder</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.543">getDataBlockEncoder</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.546">getDataBlockEncoder</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the data block encoder</dd>
@@ -2181,7 +2194,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>setDataBlockEncoderInTest</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.551">setDataBlockEncoderInTest</a>(<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;blockEncoder)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.554">setDataBlockEncoderInTest</a>(<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileDataBlockEncoder</a>&nbsp;blockEncoder)</pre>
 <div class="block">Should be used only in tests.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -2195,7 +2208,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>loadStoreFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.560">loadStoreFiles</a>(boolean&nbsp;warmup)
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.563">loadStoreFiles</a>(boolean&nbsp;warmup)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Creates an unsorted list of StoreFile loaded in parallel
  from the given directory.</div>
@@ -2211,7 +2224,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>openStoreFiles</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.565">openStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Colle [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.568">openStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Colle [...]
                                         boolean&nbsp;warmup)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2226,7 +2239,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>refreshStoreFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.642">refreshStoreFiles</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.645">refreshStoreFiles</a>()
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#refreshStoreFiles--">Store</a></code></span></div>
 <div class="block">Checks the underlying store files, and opens the files that have not been opened, and removes
@@ -2246,7 +2259,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>refreshStoreFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.652">refreshStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;newFiles)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.655">refreshStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;newFiles)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Replaces the store files that the store has with the given files. Mainly used by secondary
  region replicas to keep up to date with the primary region files.</div>
@@ -2262,7 +2275,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>refreshStoreFilesInternal</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.667">refreshStoreFilesInternal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&gt;&nbsp;newFiles)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.670">refreshStoreFilesInternal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&gt;&nbsp;newFiles)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks the underlying store files, and opens the files that  have not
  been opened, and removes the store file readers for store files no longer
@@ -2280,7 +2293,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createStoreFileAndReader</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.720">createStoreFileAndReader</a>(org.apache.hadoop.fs.Path&nbsp;p)
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.723">createStoreFileAndReader</a>(org.apache.hadoop.fs.Path&nbsp;p)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2294,7 +2307,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createStoreFileAndReader</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.726">createStoreFileAndReader</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&nbsp;info)
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.729">createStoreFileAndReader</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&nbsp;info)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2308,7 +2321,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>startReplayingFromWAL</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.737">startReplayingFromWAL</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.740">startReplayingFromWAL</a>()</pre>
 <div class="block">This message intends to inform the MemStore that next coming updates
  are going to be part of the replaying edits from WAL</div>
 </li>
@@ -2319,7 +2332,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>stopReplayingFromWAL</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.745">stopReplayingFromWAL</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.748">stopReplayingFromWAL</a>()</pre>
 <div class="block">This message intends to inform the MemStore that the replaying edits from WAL
  are done</div>
 </li>
@@ -2330,7 +2343,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.752">add</a>(<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.755">add</a>(<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&nbsp;cell,
                 <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSizing.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStoreSizing</a>&nbsp;memstoreSizing)</pre>
 <div class="block">Adds a value to the memstore</div>
 </li>
@@ -2341,7 +2354,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.769">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.772">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
                 <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSizing.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStoreSizing</a>&nbsp;memstoreSizing)</pre>
 <div class="block">Adds the specified value to the memstore</div>
 </li>
@@ -2352,7 +2365,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>timeOfOldestEdit</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.784">timeOfOldestEdit</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.787">timeOfOldestEdit</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#timeOfOldestEdit--">Store</a></code></span></div>
 <div class="block">When was the last edit done in the memstore</div>
 <dl>
@@ -2367,7 +2380,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.792">getStorefiles</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.795">getStorefiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefiles--">getStorefiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2382,7 +2395,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedFiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.797">getCompactedFiles</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.800">getCompactedFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedFiles--">getCompactedFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -2395,7 +2408,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>assertBulkLoadHFileOk</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.805">assertBulkLoadHFileOk</a>(org.apache.hadoop.fs.Path&nbsp;srcPath)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.808">assertBulkLoadHFileOk</a>(org.apache.hadoop.fs.Path&nbsp;srcPath)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This throws a WrongRegionException if the HFile does not fit in this region, or an
  InvalidHFileException if the HFile is not valid.</div>
@@ -2411,7 +2424,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>preBulkLoadHFile</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.884">preBulkLoadHFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPathStr,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.887">preBulkLoadHFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPathStr,
                                                                                   long&nbsp;seqNum)
                                                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This method should only be called from Region. It is assumed that the ranges of values in the
@@ -2431,7 +2444,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadHFile</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.889">bulkLoadHFile</a>(byte[]&nbsp;family,
+<pre>public&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.892">bulkLoadHFile</a>(byte[]&nbsp;family,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;srcPathStr,
                                                org.apache.hadoop.fs.Path&nbsp;dstPath)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2447,7 +2460,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadHFile</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.911">bulkLoadHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&nbsp;fileInfo)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.914">bulkLoadHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileInfo.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileInfo</a>&nbsp;fileInfo)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2461,7 +2474,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadHFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.916">bulkLoadHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;sf)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.919">bulkLoadHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;sf)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2475,7 +2488,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollection&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.948">close</a>()
+<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableCollection&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.951">close</a>()
                                                                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close all the readers We don't need to worry about subsequent requests because the Region holds
  a write lock that will prevent any more reads or writes.</div>
@@ -2493,7 +2506,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1015">snapshot</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1018">snapshot</a>()</pre>
 <div class="block">Snapshot this stores memstore. Call before running
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/HStore.html#flushCache-long-org.apache.hadoop.hbase.regionserver.MemStoreSnapshot-org.apache.hadoop.hbase.monitoring.MonitoredTask-org.apache.hadoop.hbase.regionserver.throttle.ThroughputController-org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker-"><code>flushCache(long, MemStoreSnapshot, MonitoredTask, ThroughputController,
  FlushLifeCycleTracker)</code></a>
@@ -2506,7 +2519,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>flushCache</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1033">flushCache</a>(long&nbsp;logCacheFlushId,
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1036">flushCache</a>(long&nbsp;logCacheFlushId,
                                                      <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSnapshot</a>&nbsp;snapshot,
                                                      <a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status,
                                                      <a href="../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;throughputController,
@@ -2532,7 +2545,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>commitFile</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1086">commitFile</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1089">commitFile</a>(org.apache.hadoop.fs.Path&nbsp;path,
                               long&nbsp;logCacheFlushId,
                               <a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a>&nbsp;status)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2554,7 +2567,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createWriterInTmp</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1116">createWriterInTmp</a>(long&nbsp;maxKeyCount,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1119">createWriterInTmp</a>(long&nbsp;maxKeyCount,
                                          <a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
                                          boolean&nbsp;isCompaction,
                                          boolean&nbsp;includeMVCCReadpoint,
@@ -2581,7 +2594,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileContext</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1165">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1175">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;compression,
                                        boolean&nbsp;includeMVCCReadpoint,
                                        boolean&nbsp;includesTag,
                                        <a href="../../../../../org/apache/hadoop/hbase/io/crypto/Encryption.Context.html" title="class in org.apache.hadoop.hbase.io.crypto">Encryption.Context</a>&nbsp;cryptoContext)</pre>
@@ -2593,7 +2606,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1190">getTotalSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1200">getTotalSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)</pre>
 </li>
 </ul>
 <a name="updateStorefiles-java.util.List-long-">
@@ -2602,7 +2615,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>updateStorefiles</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1201">updateStorefiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1211">updateStorefiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
                                  long&nbsp;snapshotId)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Change storeFiles adding into place the Reader produced by this new flush.</div>
@@ -2623,7 +2636,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>notifyChangedReadersObservers</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1232">notifyChangedReadersObservers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1242">notifyChangedReadersObservers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Notify all observers that set of Readers has changed.</div>
 <dl>
@@ -2638,7 +2651,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1256">getScanners</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1266">getScanners</a>(boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;isGet,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
@@ -2670,7 +2683,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1276">getScanners</a>(boolean&nbsp;cacheBlocks,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1286">getScanners</a>(boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
                                          <a href="../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.html" title="class in org.apache.hadoop.hbase.regionserver.querymatcher">ScanQueryMatcher</a>&nbsp;matcher,
@@ -2705,7 +2718,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>clearAndClose</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1310">clearAndClose</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;scanners)</pre>
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1320">clearAndClose</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;scanners)</pre>
 </li>
 </ul>
 <a name="getScanners-java.util.List-boolean-boolean-boolean-boolean-org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher-byte:A-byte:A-long-boolean-">
@@ -2714,7 +2727,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1334">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1344">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
                                          boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;isGet,
                                          boolean&nbsp;usePread,
@@ -2751,7 +2764,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1358">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1368">getScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java [...]
                                          boolean&nbsp;cacheBlocks,
                                          boolean&nbsp;usePread,
                                          boolean&nbsp;isCompaction,
@@ -2791,7 +2804,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>addChangedReaderObserver</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1391">addChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1401">addChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>o</code> - Observer who wants to know about changes in set of Readers</dd>
@@ -2804,7 +2817,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteChangedReaderObserver</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1398">deleteChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1408">deleteChangedReaderObserver</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.html" title="interface in org.apache.hadoop.hbase.regionserver">ChangedReadersObserver</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>o</code> - Observer no longer interested in changes in set of Readers.</dd>
@@ -2817,7 +2830,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compact</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1450">compact</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/ [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1460">compact</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/ [...]
                                 <a href="../../../../../org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.html" title="interface in org.apache.hadoop.hbase.regionserver.throttle">ThroughputController</a>&nbsp;throughputController,
                                 <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2874,7 +2887,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>doCompaction</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1481">doCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/comp [...]
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1491">doCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/comp [...]
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToCompact,
                                         <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user,
                                         long&nbsp;compactionStartTime,
@@ -2892,7 +2905,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>moveCompactedFilesIntoPlace</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1514">moveCompactedFilesIntoPlace</a>(<a href="../../../../../org/apache/hadoop/hbase/regi [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1524">moveCompactedFilesIntoPlace</a>(<a href="../../../../../org/apache/hadoop/hbase/regi [...]
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;newFiles,
                                                      <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2908,7 +2921,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>moveFileIntoPlace</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1530">moveFileIntoPlace</a>(org.apache.hadoop.fs.Path&nbsp;newFile)
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1540">moveFileIntoPlace</a>(org.apache.hadoop.fs.Path&nbsp;newFile)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2922,7 +2935,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>writeCompactionWalRecord</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1542">writeCompactionWalRecord</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacted,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1552">writeCompactionWalRecord</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesCompacted,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the compaction WAL record.</div>
@@ -2941,7 +2954,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>replaceStoreFiles</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1562">replaceStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1572">replaceStoreFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles,
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;result)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2956,7 +2969,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>updateSpaceQuotaAfterFileReplacement</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1593">updateSpaceQuotaAfterFileReplacement</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;sizeStore,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1603">updateSpaceQuotaAfterFileReplacement</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;sizeStore,
                                           <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;oldFiles,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;newFiles)</pre>
@@ -2977,7 +2990,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>logCompactionEndMessage</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1620">logCompactionEndMessage</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1630">logCompactionEndMessage</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;sfs,
                                      long&nbsp;now,
                                      long&nbsp;compactionStartTime)</pre>
@@ -2996,7 +3009,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>replayCompactionMarker</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1659">replayCompactionMarker</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor&nbsp;compaction,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1669">replayCompactionMarker</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor&nbsp;compaction,
                                    boolean&nbsp;pickCompactionFiles,
                                    boolean&nbsp;removeFiles)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3017,7 +3030,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>compactRecentForTestingAssumingDefaultPolicy</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1724">compactRecentForTestingAssumingDefaultPolicy</a>(int&nbsp;N)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1734">compactRecentForTestingAssumingDefaultPolicy</a>(int&nbsp;N)
                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This method tries to compact N recent files for testing.
  Note that because compacting "recent" files only makes sense for some policies,
@@ -3037,7 +3050,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>hasReferences</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1776">hasReferences</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1786">hasReferences</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#hasReferences--">hasReferences</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3052,7 +3065,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionProgress</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1794">getCompactionProgress</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionProgress</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1804">getCompactionProgress</a>()</pre>
 <div class="block">getter for CompactionProgress object</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3066,7 +3079,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldPerformMajorCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1799">shouldPerformMajorCompaction</a>()
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1809">shouldPerformMajorCompaction</a>()
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#shouldPerformMajorCompaction--">Store</a></code></span></div>
 <div class="block">Tests whether we should run a major compaction. For example, if the configured major compaction
@@ -3087,7 +3100,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>requestCompaction</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1811">requestCompaction</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1821">requestCompaction</a>()
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3101,7 +3114,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>requestCompaction</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1815">requestCompaction</a>(int&nbsp;priority,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1825">requestCompaction</a>(int&nbsp;priority,
                                                      <a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver.compactions">CompactionLifeCycleTracker</a>&nbsp;tracker,
                                                      <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3117,7 +3130,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>addToCompactingFiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1897">addToCompactingFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToAdd)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1907">addToCompactingFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToAdd)</pre>
 <div class="block">Adds the files to compacting files. filesCompacting must be locked.</div>
 </li>
 </ul>
@@ -3127,7 +3140,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>removeUnneededFiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1909">removeUnneededFiles</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1919">removeUnneededFiles</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3141,7 +3154,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>cancelRequestedCompaction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1945">cancelRequestedCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&nbsp;compaction)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1955">cancelRequestedCompaction</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionContext.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionContext</a>&nbsp;compaction)</pre>
 </li>
 </ul>
 <a name="finishCompactionRequest-org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl-">
@@ -3150,7 +3163,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>finishCompactionRequest</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1949">finishCompactionRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1959">finishCompactionRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/CompactionRequestImpl.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactionRequestImpl</a>&nbsp;cr)</pre>
 </li>
 </ul>
 <a name="validateStoreFile-org.apache.hadoop.fs.Path-">
@@ -3159,7 +3172,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>validateStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1965">validateStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1975">validateStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Validates a store file by opening and closing it. In HFileV2 this should not be an expensive
  operation.</div>
@@ -3177,7 +3190,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>completeCompaction</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1984">completeCompaction</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.1994">completeCompaction</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedFiles)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Update counts.</div>
 <dl>
@@ -3194,7 +3207,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>versionsToReturn</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2004">versionsToReturn</a>(int&nbsp;wantedVersions)</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2014">versionsToReturn</a>(int&nbsp;wantedVersions)</pre>
 </li>
 </ul>
 <a name="canSplit--">
@@ -3203,7 +3216,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>canSplit</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2014">canSplit</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2024">canSplit</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#canSplit--">Store</a></code></span></div>
 <div class="block">Returns whether this store is splittable, i.e., no reference file in this store.</div>
 <dl>
@@ -3218,7 +3231,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplitPoint</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2031">getSplitPoint</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2041">getSplitPoint</a>()</pre>
 <div class="block">Determines if Store should be split.</div>
 </li>
 </ul>
@@ -3228,7 +3241,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastCompactSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2051">getLastCompactSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2061">getLastCompactSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getLastCompactSize--">getLastCompactSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3243,7 +3256,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2056">getSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2066">getSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSize--">getSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3258,7 +3271,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>triggerMajorCompaction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2060">triggerMajorCompaction</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2070">triggerMajorCompaction</a>()</pre>
 </li>
 </ul>
 <a name="getScanner-org.apache.hadoop.hbase.client.Scan-java.util.NavigableSet-long-">
@@ -3267,7 +3280,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2076">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2086">getScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                                   long&nbsp;readPt)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3290,7 +3303,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2093">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2103">createScanner</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a>&nbsp;scan,
                                         <a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true" title="class or interface in java.util">NavigableSet</a>&lt;byte[]&gt;&nbsp;targetCols,
                                         long&nbsp;readPt)
@@ -3307,7 +3320,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>recreateScanners</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2115">recreateScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/KeyValueScanner.html" title="interface in org.apache.hadoop.hbase.regionserver">KeyValueScanner</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2125">recreateScanners</a>(<a href="https://docs.oracle.com/javase/8/docs/api [...]
                                               boolean&nbsp;cacheBlocks,
                                               boolean&nbsp;usePread,
                                               boolean&nbsp;isCompaction,
@@ -3346,7 +3359,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2149">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2159">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -3359,7 +3372,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2154">getStorefilesCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2164">getStorefilesCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesCount--">getStorefilesCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3374,7 +3387,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedFilesCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2159">getCompactedFilesCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2169">getCompactedFilesCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedFilesCount--">getCompactedFilesCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3389,7 +3402,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFileAgeStream</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/LongStream.html?is-external=true" title="class or interface in java.util.stream">LongStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2163">getStoreFileAgeStream</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/LongStream.html?is-external=true" title="class or interface in java.util.stream">LongStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2173">getStoreFileAgeStream</a>()</pre>
 </li>
 </ul>
 <a name="getMaxStoreFileAge--">
@@ -3398,7 +3411,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMaxStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2176">getMaxStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2186">getMaxStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMaxStoreFileAge--">getMaxStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3413,7 +3426,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMinStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2181">getMinStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalLong.html?is-external=true" title="class or interface in java.util">OptionalLong</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2191">getMinStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMinStoreFileAge--">getMinStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3428,7 +3441,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getAvgStoreFileAge</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true" title="class or interface in java.util">OptionalDouble</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2186">getAvgStoreFileAge</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true" title="class or interface in java.util">OptionalDouble</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2196">getAvgStoreFileAge</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getAvgStoreFileAge--">getAvgStoreFileAge</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3443,7 +3456,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumReferenceFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2191">getNumReferenceFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2201">getNumReferenceFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumReferenceFiles--">getNumReferenceFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3458,7 +3471,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumHFiles</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2197">getNumHFiles</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2207">getNumHFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getNumHFiles--">getNumHFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3473,7 +3486,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreSizeUncompressed</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2203">getStoreSizeUncompressed</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2213">getStoreSizeUncompressed</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStoreSizeUncompressed--">getStoreSizeUncompressed</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3488,7 +3501,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2208">getStorefilesSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2218">getStorefilesSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesSize--">getStorefilesSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3503,7 +3516,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getHFilesSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2214">getHFilesSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2224">getHFilesSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getHFilesSize--">getHFilesSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3518,7 +3531,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalUncompressedBytes</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2220">getTotalUncompressedBytes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files)</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2230">getTotalUncompressedBytes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files)</pre>
 </li>
 </ul>
 <a name="getStorefilesSize-java.util.Collection-java.util.function.Predicate-">
@@ -3527,7 +3540,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2226">getStorefilesSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files,
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2236">getStorefilesSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;files,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;predicate)</pre>
 </li>
 </ul>
@@ -3537,7 +3550,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefileFieldSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2231">getStorefileFieldSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file,
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2241">getStorefileFieldSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp;f)</pre>
 </li>
 </ul>
@@ -3547,7 +3560,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesFieldSize</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2242">getStorefilesFieldSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp [...]
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2252">getStorefilesFieldSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/ToLongFunction.html?is-external=true" title="class or interface in java.util.function">ToLongFunction</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileReader.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileReader</a>&gt;&nbsp [...]
 </li>
 </ul>
 <a name="getStorefilesRootLevelIndexSize--">
@@ -3556,7 +3569,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStorefilesRootLevelIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2248">getStorefilesRootLevelIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2258">getStorefilesRootLevelIndexSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getStorefilesRootLevelIndexSize--">getStorefilesRootLevelIndexSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3571,7 +3584,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticIndexSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2253">getTotalStaticIndexSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2263">getTotalStaticIndexSize</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticIndexSize--">Store</a></code></span></div>
 <div class="block">Returns the total size of all index blocks in the data block indexes, including the root level,
  intermediate levels, and the leaf level for multi-level indexes, or just the root level for
@@ -3590,7 +3603,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalStaticBloomSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2258">getTotalStaticBloomSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2268">getTotalStaticBloomSize</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getTotalStaticBloomSize--">Store</a></code></span></div>
 <div class="block">Returns the total byte size of all Bloom filter bit arrays. For compound Bloom filters even the
  Bloom blocks currently not loaded into the block cache are counted.</div>
@@ -3608,7 +3621,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMemStoreSize</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2263">getMemStoreSize</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSize.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2273">getMemStoreSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMemStoreSize--">getMemStoreSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3623,7 +3636,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactPriority</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2268">getCompactPriority</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2278">getCompactPriority</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactPriority--">getCompactPriority</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3636,7 +3649,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>throttleCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2276">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2286">throttleCompaction</a>(long&nbsp;compactionSize)</pre>
 </li>
 </ul>
 <a name="getHRegion--">
@@ -3645,7 +3658,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getHRegion</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2280">getHRegion</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2290">getHRegion</a>()</pre>
 </li>
 </ul>
 <a name="getCoprocessorHost--">
@@ -3654,7 +3667,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCoprocessorHost</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2284">getCoprocessorHost</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionCoprocessorHost</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2294">getCoprocessorHost</a>()</pre>
 </li>
 </ul>
 <a name="getRegionInfo--">
@@ -3663,7 +3676,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2289">getRegionInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2299">getRegionInfo</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getRegionInfo--">getRegionInfo</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3678,7 +3691,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>areWritesEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2294">areWritesEnabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2304">areWritesEnabled</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#areWritesEnabled--">areWritesEnabled</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3691,7 +3704,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getSmallestReadPoint</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2299">getSmallestReadPoint</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2309">getSmallestReadPoint</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getSmallestReadPoint--">getSmallestReadPoint</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3708,7 +3721,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>upsert</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2314">upsert</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2324">upsert</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;cells,
                    long&nbsp;readpoint,
                    <a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreSizing.html" title="interface in org.apache.hadoop.hbase.regionserver">MemStoreSizing</a>&nbsp;memstoreSizing)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3733,7 +3746,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>createFlushContext</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2324">createFlushContext</a>(long&nbsp;cacheFlushId,
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFlushContext.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFlushContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2334">createFlushContext</a>(long&nbsp;cacheFlushId,
                                             <a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushLifeCycleTracker.html" title="interface in org.apache.hadoop.hbase.regionserver">FlushLifeCycleTracker</a>&nbsp;tracker)</pre>
 </li>
 </ul>
@@ -3743,7 +3756,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>needsCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2473">needsCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2483">needsCompaction</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#needsCompaction--">Store</a></code></span></div>
 <div class="block">See if there's too much store files in this store</div>
 <dl>
@@ -3761,7 +3774,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2486">getCacheConfig</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2496">getCacheConfig</a>()</pre>
 <div class="block">Used for tests.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3775,7 +3788,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2501">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2511">heapSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize--">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -3791,7 +3804,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getComparator</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2507">getComparator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2517">getComparator</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getComparator--">getComparator</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3804,7 +3817,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanInfo</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2511">getScanInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2521">getScanInfo</a>()</pre>
 </li>
 </ul>
 <a name="setScanInfo-org.apache.hadoop.hbase.regionserver.ScanInfo-">
@@ -3813,7 +3826,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>setScanInfo</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2519">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2529">setScanInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanInfo.html" title="class in org.apache.hadoop.hbase.regionserver">ScanInfo</a>&nbsp;scanInfo)</pre>
 <div class="block">Set scan info, used by test</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -3827,7 +3840,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>hasTooManyStoreFiles</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2524">hasTooManyStoreFiles</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2534">hasTooManyStoreFiles</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#hasTooManyStoreFiles--">hasTooManyStoreFiles</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3842,7 +3855,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2529">getFlushedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2539">getFlushedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsCount--">getFlushedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3857,7 +3870,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2534">getFlushedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2544">getFlushedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedCellsSize--">getFlushedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3872,7 +3885,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getFlushedOutputFileSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2539">getFlushedOutputFileSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2549">getFlushedOutputFileSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getFlushedOutputFileSize--">getFlushedOutputFileSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3887,7 +3900,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2544">getCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2554">getCompactedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsCount--">getCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3902,7 +3915,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2549">getCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2559">getCompactedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactedCellsSize--">getCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3917,7 +3930,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsCount</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2554">getMajorCompactedCellsCount</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2564">getMajorCompactedCellsCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsCount--">getMajorCompactedCellsCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3932,7 +3945,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getMajorCompactedCellsSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2559">getMajorCompactedCellsSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2569">getMajorCompactedCellsSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getMajorCompactedCellsSize--">getMajorCompactedCellsSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -3947,7 +3960,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreEngine</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2568">getStoreEngine</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreEngine.html" title="class in org.apache.hadoop.hbase.regionserver">StoreEngine</a>&lt;?,?,?,?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2578">getStoreEngine</a>()</pre>
 <div class="block">Returns the StoreEngine that is backing this concrete implementation of Store.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -3961,7 +3974,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getOffPeakHours</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2572">getOffPeakHours</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">OffPeakHours</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2582">getOffPeakHours</a>()</pre>
 </li>
 </ul>
 <a name="onConfigurationChange-org.apache.hadoop.conf.Configuration-">
@@ -3970,7 +3983,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>onConfigurationChange</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2580">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2590">onConfigurationChange</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">This method would be called by the <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf"><code>ConfigurationManager</code></a>
  object when the <code>Configuration</code> object is reloaded from disk.</div>
 <dl>
@@ -3985,7 +3998,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>registerChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2592">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2602">registerChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to register the children to the manager.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -4001,7 +4014,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>deregisterChildren</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2600">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2610">deregisterChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;manager)</pre>
 <div class="block">Needs to be called to deregister the children from the manager.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -4017,7 +4030,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompactionPressure</h4>
-<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2605">getCompactionPressure</a>()</pre>
+<pre>public&nbsp;double&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2615">getCompactionPressure</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCompactionPressure--">Store</a></code></span></div>
 <div class="block">This value can represent the degree of emergency of compaction for this store. It should be
  greater than or equal to 0.0, any value greater than 1.0 means we have too many store files.
@@ -4044,7 +4057,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>isPrimaryReplicaStore</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2610">isPrimaryReplicaStore</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2620">isPrimaryReplicaStore</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#isPrimaryReplicaStore--">isPrimaryReplicaStore</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4057,7 +4070,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>preSnapshotOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2618">preSnapshotOperation</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2628">preSnapshotOperation</a>()</pre>
 <div class="block">Sets the store up for a region level snapshot operation.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -4071,7 +4084,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>postSnapshotOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2626">postSnapshotOperation</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2636">postSnapshotOperation</a>()</pre>
 <div class="block">Perform tasks needed after the completion of snapshot operation.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -4085,7 +4098,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>closeAndArchiveCompactedFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2633">closeAndArchiveCompactedFiles</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2643">closeAndArchiveCompactedFiles</a>()
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Closes and archives the compacted files under this store</div>
 <dl>
@@ -4100,7 +4113,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>removeCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2663">removeCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedfiles)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2673">removeCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;compactedfiles)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Archives and removes the compacted files</div>
 <dl>
@@ -4117,7 +4130,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFileSize</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2750">getStoreFileSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file)</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2760">getStoreFileSize</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&nbsp;file)</pre>
 <div class="block">Computes the length of a store file without succumbing to any errors along the way. If an
  error is encountered, the implementation returns <code>0</code> instead of the actual size.</div>
 <dl>
@@ -4134,7 +4147,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>preFlushSeqIDEstimation</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2768">preFlushSeqIDEstimation</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2778">preFlushSeqIDEstimation</a>()</pre>
 </li>
 </ul>
 <a name="isSloppyMemStore--">
@@ -4143,7 +4156,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>isSloppyMemStore</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2773">isSloppyMemStore</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2783">isSloppyMemStore</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#isSloppyMemStore--">isSloppyMemStore</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4158,7 +4171,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>clearCompactedfiles</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2777">clearCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToRemove)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2787">clearCompactedfiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HStoreFile.html" title="class in org.apache.hadoop.hbase.regionserver">HStoreFile</a>&gt;&nbsp;filesToRemove)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -4172,7 +4185,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>reportArchivedFilesForQuota</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2787">reportArchivedFilesForQuota</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;archivedFiles,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2797">reportArchivedFilesForQuota</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFile.html" title="interface in org.apache.hadoop.hbase.regionserver">StoreFile</a>&gt;&nbsp;archivedFiles,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;fileSizes)</pre>
 </li>
 </ul>
@@ -4182,7 +4195,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getCurrentParallelPutCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2815">getCurrentParallelPutCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2825">getCurrentParallelPutCount</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html#getCurrentParallelPutCount--">getCurrentParallelPutCount</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/Store.html" title="interface in org.apache.hadoop.hbase.regionserver">Store</a></code></dd>
@@ -4195,7 +4208,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreRefCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2819">getStoreRefCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2829">getStoreRefCount</a>()</pre>
 </li>
 </ul>
 <a name="getMaxCompactedStoreFileRefCount--">
@@ -4204,7 +4217,7 @@ public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-ht
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMaxCompactedStoreFileRefCount</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2829">getMaxCompactedStoreFileRefCount</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HStore.html#line.2839">getMaxCompactedStoreFileRefCount</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>get maximum ref count of storeFile among all compacted HStore Files
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 8e3037f..1fe071d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -735,20 +735,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index bd7348f..e1c6d6d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,8 +130,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index a68742c..0de0029 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -248,9 +248,9 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
index 581ff15..732dfb2 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
@@ -166,8 +166,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">SyncReplicationState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">SyncReplicationState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index 883f175..087555d 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -192,8 +192,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index 795500d..b47c8cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index f42b3c3..0c91adf 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -162,12 +162,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index d2656d9..dfc1137 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -192,9 +192,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 50e8c8b..7d3c2dc 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -559,14 +559,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index f2c94bd..d78cc8b 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -199,8 +199,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index cb7aa86..26b603c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  justification="Intentional; to be modified in test")<a name="line.10"></a>
 <span class="sourceLineNo">011</span>public class Version {<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String version = new String("3.0.0-SNAPSHOT");<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String revision = "ab9766599dfae624330631ca6352d8a00bc2e607";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String revision = "33f45d441385c37d5cb00a40a29d0aaa0caac7db";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String user = "jenkins";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String date = "Sat Jan  4 14:36:01 UTC 2020";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String date = "Mon Jan  6 14:36:43 UTC 2020";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>  public static final String url = "git://jenkins-websites-he-de.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.16"></a>
 <span class="sourceLineNo">017</span>  public static final String srcChecksum = "(stdin)=";<a name="line.17"></a>
 <span class="sourceLineNo">018</span>}<a name="line.18"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.DeadServerMetricRegionChore.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.DeadServerMetricRegionChore.html
index 3743ca0..6258247 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.DeadServerMetricRegionChore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.DeadServerMetricRegionChore.html
@@ -1772,7 +1772,7 @@
 <span class="sourceLineNo">1764</span><a name="line.1764"></a>
 <span class="sourceLineNo">1765</span>  // should be called under the RegionStateNode lock<a name="line.1765"></a>
 <span class="sourceLineNo">1766</span>  // for SCP<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>  void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
+<span class="sourceLineNo">1767</span>  public void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
 <span class="sourceLineNo">1768</span>    RegionState.State state = regionNode.getState();<a name="line.1768"></a>
 <span class="sourceLineNo">1769</span>    ServerName regionLocation = regionNode.getRegionLocation();<a name="line.1769"></a>
 <span class="sourceLineNo">1770</span>    regionNode.transitionState(State.ABNORMALLY_CLOSED);<a name="line.1770"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
index 3743ca0..6258247 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionChore.html
@@ -1772,7 +1772,7 @@
 <span class="sourceLineNo">1764</span><a name="line.1764"></a>
 <span class="sourceLineNo">1765</span>  // should be called under the RegionStateNode lock<a name="line.1765"></a>
 <span class="sourceLineNo">1766</span>  // for SCP<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>  void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
+<span class="sourceLineNo">1767</span>  public void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
 <span class="sourceLineNo">1768</span>    RegionState.State state = regionNode.getState();<a name="line.1768"></a>
 <span class="sourceLineNo">1769</span>    ServerName regionLocation = regionNode.getRegionLocation();<a name="line.1769"></a>
 <span class="sourceLineNo">1770</span>    regionNode.transitionState(State.ABNORMALLY_CLOSED);<a name="line.1770"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
index 3743ca0..6258247 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionInTransitionStat.html
@@ -1772,7 +1772,7 @@
 <span class="sourceLineNo">1764</span><a name="line.1764"></a>
 <span class="sourceLineNo">1765</span>  // should be called under the RegionStateNode lock<a name="line.1765"></a>
 <span class="sourceLineNo">1766</span>  // for SCP<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>  void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
+<span class="sourceLineNo">1767</span>  public void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
 <span class="sourceLineNo">1768</span>    RegionState.State state = regionNode.getState();<a name="line.1768"></a>
 <span class="sourceLineNo">1769</span>    ServerName regionLocation = regionNode.getRegionLocation();<a name="line.1769"></a>
 <span class="sourceLineNo">1770</span>    regionNode.transitionState(State.ABNORMALLY_CLOSED);<a name="line.1770"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionMetaLoadingVisitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionMetaLoadingVisitor.html
index 3743ca0..6258247 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionMetaLoadingVisitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.RegionMetaLoadingVisitor.html
@@ -1772,7 +1772,7 @@
 <span class="sourceLineNo">1764</span><a name="line.1764"></a>
 <span class="sourceLineNo">1765</span>  // should be called under the RegionStateNode lock<a name="line.1765"></a>
 <span class="sourceLineNo">1766</span>  // for SCP<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>  void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
+<span class="sourceLineNo">1767</span>  public void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
 <span class="sourceLineNo">1768</span>    RegionState.State state = regionNode.getState();<a name="line.1768"></a>
 <span class="sourceLineNo">1769</span>    ServerName regionLocation = regionNode.getRegionLocation();<a name="line.1769"></a>
 <span class="sourceLineNo">1770</span>    regionNode.transitionState(State.ABNORMALLY_CLOSED);<a name="line.1770"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
index 3743ca0..6258247 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/assignment/AssignmentManager.html
@@ -1772,7 +1772,7 @@
 <span class="sourceLineNo">1764</span><a name="line.1764"></a>
 <span class="sourceLineNo">1765</span>  // should be called under the RegionStateNode lock<a name="line.1765"></a>
 <span class="sourceLineNo">1766</span>  // for SCP<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>  void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
+<span class="sourceLineNo">1767</span>  public void regionClosedAbnormally(RegionStateNode regionNode) throws IOException {<a name="line.1767"></a>
 <span class="sourceLineNo">1768</span>    RegionState.State state = regionNode.getState();<a name="line.1768"></a>
 <span class="sourceLineNo">1769</span>    ServerName regionLocation = regionNode.getRegionLocation();<a name="line.1769"></a>
 <span class="sourceLineNo">1770</span>    regionNode.transitionState(State.ABNORMALLY_CLOSED);<a name="line.1770"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
index e99af1e..58a89a4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.html
@@ -490,42 +490,54 @@
 <span class="sourceLineNo">482</span>          regionNode.getProcedure().serverCrashed(env, regionNode, getServerName());<a name="line.482"></a>
 <span class="sourceLineNo">483</span>          continue;<a name="line.483"></a>
 <span class="sourceLineNo">484</span>        }<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        if (env.getMasterServices().getTableStateManager().isTableState(regionNode.getTable(),<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          TableState.State.DISABLING, TableState.State.DISABLED)) {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          continue;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>        }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>        // force to assign to a new candidate server, see HBASE-23035 for more details.<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        TransitRegionStateProcedure proc =<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          TransitRegionStateProcedure.assign(env, region, true, null);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        regionNode.setProcedure(proc);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        addChildProcedure(proc);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      } finally {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        regionNode.unlock();<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      }<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  }<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>  @Override<a name="line.500"></a>
-<span class="sourceLineNo">501</span>  protected ProcedureMetrics getProcedureMetrics(MasterProcedureEnv env) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    return env.getMasterServices().getMasterMetrics().getServerCrashProcMetrics();<a name="line.502"></a>
-<span class="sourceLineNo">503</span>  }<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>  @Override<a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected boolean holdLock(MasterProcedureEnv env) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    return true;<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  public static void updateProgress(MasterProcedureEnv env, long parentId) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    if (parentId == NO_PROC_ID) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      return;<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>    Procedure parentProcedure =<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        env.getMasterServices().getMasterProcedureExecutor().getProcedure(parentId);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>    if (parentProcedure != null &amp;&amp; parentProcedure instanceof ServerCrashProcedure) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      ((ServerCrashProcedure) parentProcedure).updateProgress(false);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    }<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>}<a name="line.520"></a>
+<span class="sourceLineNo">485</span>        if (env.getMasterServices().getTableStateManager()<a name="line.485"></a>
+<span class="sourceLineNo">486</span>          .isTableState(regionNode.getTable(), TableState.State.DISABLING)) {<a name="line.486"></a>
+<span class="sourceLineNo">487</span>          // We need to change the state here otherwise the TRSP scheduled by DTP will try to<a name="line.487"></a>
+<span class="sourceLineNo">488</span>          // close the region from a dead server and will never succeed. Please see HBASE-23636<a name="line.488"></a>
+<span class="sourceLineNo">489</span>          // for more details.<a name="line.489"></a>
+<span class="sourceLineNo">490</span>          env.getAssignmentManager().regionClosedAbnormally(regionNode);<a name="line.490"></a>
+<span class="sourceLineNo">491</span>          LOG.info("{} found table disabling for region {}, set it state to ABNORMALLY_CLOSED.",<a name="line.491"></a>
+<span class="sourceLineNo">492</span>            this, regionNode);<a name="line.492"></a>
+<span class="sourceLineNo">493</span>          continue;<a name="line.493"></a>
+<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
+<span class="sourceLineNo">495</span>        if (env.getMasterServices().getTableStateManager()<a name="line.495"></a>
+<span class="sourceLineNo">496</span>          .isTableState(regionNode.getTable(), TableState.State.DISABLED)) {<a name="line.496"></a>
+<span class="sourceLineNo">497</span>          // This should not happen, table disabled but has regions on server.<a name="line.497"></a>
+<span class="sourceLineNo">498</span>          LOG.warn("Found table disabled for region {}, procDetails: {}", regionNode, this);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>          continue;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>        }<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        // force to assign to a new candidate server, see HBASE-23035 for more details.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>        TransitRegionStateProcedure proc =<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          TransitRegionStateProcedure.assign(env, region, true, null);<a name="line.503"></a>
+<span class="sourceLineNo">504</span>        regionNode.setProcedure(proc);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>        addChildProcedure(proc);<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      } finally {<a name="line.506"></a>
+<span class="sourceLineNo">507</span>        regionNode.unlock();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      }<a name="line.508"></a>
+<span class="sourceLineNo">509</span>    }<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
+<span class="sourceLineNo">511</span><a name="line.511"></a>
+<span class="sourceLineNo">512</span>  @Override<a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected ProcedureMetrics getProcedureMetrics(MasterProcedureEnv env) {<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    return env.getMasterServices().getMasterMetrics().getServerCrashProcMetrics();<a name="line.514"></a>
+<span class="sourceLineNo">515</span>  }<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  @Override<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  protected boolean holdLock(MasterProcedureEnv env) {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>    return true;<a name="line.519"></a>
+<span class="sourceLineNo">520</span>  }<a name="line.520"></a>
+<span class="sourceLineNo">521</span><a name="line.521"></a>
+<span class="sourceLineNo">522</span>  public static void updateProgress(MasterProcedureEnv env, long parentId) {<a name="line.522"></a>
+<span class="sourceLineNo">523</span>    if (parentId == NO_PROC_ID) {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      return;<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    }<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    Procedure parentProcedure =<a name="line.526"></a>
+<span class="sourceLineNo">527</span>        env.getMasterServices().getMasterProcedureExecutor().getProcedure(parentId);<a name="line.527"></a>
+<span class="sourceLineNo">528</span>    if (parentProcedure != null &amp;&amp; parentProcedure instanceof ServerCrashProcedure) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      ((ServerCrashProcedure) parentProcedure).updateProgress(false);<a name="line.529"></a>
+<span class="sourceLineNo">530</span>    }<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  }<a name="line.531"></a>
+<span class="sourceLineNo">532</span>}<a name="line.532"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index e089252..554aac3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -167,2686 +167,2696 @@
 <span class="sourceLineNo">159</span>  private AtomicLong storeSize = new AtomicLong();<a name="line.159"></a>
 <span class="sourceLineNo">160</span>  private AtomicLong totalUncompressedBytes = new AtomicLong();<a name="line.160"></a>
 <span class="sourceLineNo">161</span><a name="line.161"></a>
-<span class="sourceLineNo">162</span>  /**<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   * RWLock for store operations.<a name="line.163"></a>
-<span class="sourceLineNo">164</span>   * Locked in shared mode when the list of component stores is looked at:<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   *   - all reads/writes to table data<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   *   - checking for split<a name="line.166"></a>
-<span class="sourceLineNo">167</span>   * Locked in exclusive mode when the list of component stores is modified:<a name="line.167"></a>
-<span class="sourceLineNo">168</span>   *   - closing<a name="line.168"></a>
-<span class="sourceLineNo">169</span>   *   - completing a compaction<a name="line.169"></a>
-<span class="sourceLineNo">170</span>   */<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * Lock specific to archiving compacted store files.  This avoids races around<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * the combination of retrieving the list of compacted files and moving them to<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * the archive directory.  Since this is usually a background process (other than<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * on close), we don't want to handle this with the store write lock, which would<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * block readers and degrade performance.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   *<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   * Locked by:<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   *   - CompactedHFilesDispatchHandler via closeAndArchiveCompactedFiles()<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   *   - close()<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  final ReentrantLock archiveLock = new ReentrantLock();<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private final boolean verifyBulkLoads;<a name="line.185"></a>
+<span class="sourceLineNo">162</span>  private boolean cacheOnWriteLogged;<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
+<span class="sourceLineNo">165</span>   * RWLock for store operations.<a name="line.165"></a>
+<span class="sourceLineNo">166</span>   * Locked in shared mode when the list of component stores is looked at:<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   *   - all reads/writes to table data<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   *   - checking for split<a name="line.168"></a>
+<span class="sourceLineNo">169</span>   * Locked in exclusive mode when the list of component stores is modified:<a name="line.169"></a>
+<span class="sourceLineNo">170</span>   *   - closing<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   *   - completing a compaction<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.173"></a>
+<span class="sourceLineNo">174</span>  /**<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   * Lock specific to archiving compacted store files.  This avoids races around<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * the combination of retrieving the list of compacted files and moving them to<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * the archive directory.  Since this is usually a background process (other than<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * on close), we don't want to handle this with the store write lock, which would<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * block readers and degrade performance.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   *<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * Locked by:<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   *   - CompactedHFilesDispatchHandler via closeAndArchiveCompactedFiles()<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   *   - close()<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   */<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  final ReentrantLock archiveLock = new ReentrantLock();<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Use this counter to track concurrent puts. If TRACE-log is enabled, if we are over the<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * threshold set by hbase.region.store.parallel.put.print.threshold (Default is 50) we will<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * log a message that identifies the Store experience this high-level of concurrency.<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   */<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  private final AtomicInteger currentParallelPutCount = new AtomicInteger(0);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  private final int parallelPutCountPrintThreshold;<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  private ScanInfo scanInfo;<a name="line.195"></a>
+<span class="sourceLineNo">187</span>  private final boolean verifyBulkLoads;<a name="line.187"></a>
+<span class="sourceLineNo">188</span><a name="line.188"></a>
+<span class="sourceLineNo">189</span>  /**<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * Use this counter to track concurrent puts. If TRACE-log is enabled, if we are over the<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * threshold set by hbase.region.store.parallel.put.print.threshold (Default is 50) we will<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * log a message that identifies the Store experience this high-level of concurrency.<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  private final AtomicInteger currentParallelPutCount = new AtomicInteger(0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private final int parallelPutCountPrintThreshold;<a name="line.195"></a>
 <span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  // All access must be synchronized.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>  // TODO: ideally, this should be part of storeFileManager, as we keep passing this to it.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  private final List&lt;HStoreFile&gt; filesCompacting = Lists.newArrayList();<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  // All access must be synchronized.<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private final Set&lt;ChangedReadersObserver&gt; changedReaderObservers =<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    Collections.newSetFromMap(new ConcurrentHashMap&lt;ChangedReadersObserver, Boolean&gt;());<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  protected final int blocksize;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private HFileDataBlockEncoder dataBlockEncoder;<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  /** Checksum configuration */<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  protected ChecksumType checksumType;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  protected int bytesPerChecksum;<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>  // Comparing KeyValues<a name="line.212"></a>
-<span class="sourceLineNo">213</span>  protected final CellComparator comparator;<a name="line.213"></a>
-<span class="sourceLineNo">214</span><a name="line.214"></a>
-<span class="sourceLineNo">215</span>  final StoreEngine&lt;?, ?, ?, ?&gt; storeEngine;<a name="line.215"></a>
+<span class="sourceLineNo">197</span>  private ScanInfo scanInfo;<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  // All access must be synchronized.<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  // TODO: ideally, this should be part of storeFileManager, as we keep passing this to it.<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  private final List&lt;HStoreFile&gt; filesCompacting = Lists.newArrayList();<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  // All access must be synchronized.<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  private final Set&lt;ChangedReadersObserver&gt; changedReaderObservers =<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    Collections.newSetFromMap(new ConcurrentHashMap&lt;ChangedReadersObserver, Boolean&gt;());<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  protected final int blocksize;<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  private HFileDataBlockEncoder dataBlockEncoder;<a name="line.208"></a>
+<span class="sourceLineNo">209</span><a name="line.209"></a>
+<span class="sourceLineNo">210</span>  /** Checksum configuration */<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  protected ChecksumType checksumType;<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  protected int bytesPerChecksum;<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>  // Comparing KeyValues<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  protected final CellComparator comparator;<a name="line.215"></a>
 <span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  private static final AtomicBoolean offPeakCompactionTracker = new AtomicBoolean();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  private volatile OffPeakHours offPeakHours;<a name="line.218"></a>
-<span class="sourceLineNo">219</span><a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static final int DEFAULT_FLUSH_RETRIES_NUMBER = 10;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private int flushRetriesNumber;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private int pauseTime;<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private long blockingFileCount;<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  private int compactionCheckMultiplier;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>  protected Encryption.Context cryptoContext = Encryption.Context.NONE;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>  private AtomicLong flushedCellsCount = new AtomicLong();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  private AtomicLong compactedCellsCount = new AtomicLong();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private AtomicLong majorCompactedCellsCount = new AtomicLong();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private AtomicLong flushedCellsSize = new AtomicLong();<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private AtomicLong flushedOutputFileSize = new AtomicLong();<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  private AtomicLong compactedCellsSize = new AtomicLong();<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  private AtomicLong majorCompactedCellsSize = new AtomicLong();<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * Constructor<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * @param region<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   * @param family HColumnDescriptor for this column<a name="line.239"></a>
-<span class="sourceLineNo">240</span>   * @param confParam configuration object<a name="line.240"></a>
-<span class="sourceLineNo">241</span>   * failed.  Can be null.<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * @throws IOException<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   */<a name="line.243"></a>
-<span class="sourceLineNo">244</span>  protected HStore(final HRegion region, final ColumnFamilyDescriptor family,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      final Configuration confParam, boolean warmup) throws IOException {<a name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>    this.fs = region.getRegionFileSystem();<a name="line.247"></a>
+<span class="sourceLineNo">217</span>  final StoreEngine&lt;?, ?, ?, ?&gt; storeEngine;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static final AtomicBoolean offPeakCompactionTracker = new AtomicBoolean();<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private volatile OffPeakHours offPeakHours;<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final int DEFAULT_FLUSH_RETRIES_NUMBER = 10;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  private int flushRetriesNumber;<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private int pauseTime;<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>  private long blockingFileCount;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  private int compactionCheckMultiplier;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  protected Encryption.Context cryptoContext = Encryption.Context.NONE;<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private AtomicLong flushedCellsCount = new AtomicLong();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private AtomicLong compactedCellsCount = new AtomicLong();<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private AtomicLong majorCompactedCellsCount = new AtomicLong();<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  private AtomicLong flushedCellsSize = new AtomicLong();<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  private AtomicLong flushedOutputFileSize = new AtomicLong();<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  private AtomicLong compactedCellsSize = new AtomicLong();<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private AtomicLong majorCompactedCellsSize = new AtomicLong();<a name="line.236"></a>
+<span class="sourceLineNo">237</span><a name="line.237"></a>
+<span class="sourceLineNo">238</span>  /**<a name="line.238"></a>
+<span class="sourceLineNo">239</span>   * Constructor<a name="line.239"></a>
+<span class="sourceLineNo">240</span>   * @param region<a name="line.240"></a>
+<span class="sourceLineNo">241</span>   * @param family HColumnDescriptor for this column<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * @param confParam configuration object<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   * failed.  Can be null.<a name="line.243"></a>
+<span class="sourceLineNo">244</span>   * @throws IOException<a name="line.244"></a>
+<span class="sourceLineNo">245</span>   */<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  protected HStore(final HRegion region, final ColumnFamilyDescriptor family,<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      final Configuration confParam, boolean warmup) throws IOException {<a name="line.247"></a>
 <span class="sourceLineNo">248</span><a name="line.248"></a>
-<span class="sourceLineNo">249</span>    // Assemble the store's home directory and Ensure it exists.<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    fs.createStoreDir(family.getNameAsString());<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    this.region = region;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this.family = family;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    // 'conf' renamed to 'confParam' b/c we use this.conf in the constructor<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    // CompoundConfiguration will look for keys in reverse order of addition, so we'd<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    // add global config first, then table and cf overrides, then cf metadata.<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    this.conf = new CompoundConfiguration()<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      .add(confParam)<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      .addBytesMap(region.getTableDescriptor().getValues())<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      .addStringMap(family.getConfiguration())<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      .addBytesMap(family.getValues());<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    this.blocksize = family.getBlocksize();<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>    // set block storage policy for store directory<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    String policyName = family.getStoragePolicy();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    if (null == policyName) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      policyName = this.conf.get(BLOCK_STORAGE_POLICY_KEY, DEFAULT_BLOCK_STORAGE_POLICY);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    this.fs.setStoragePolicy(family.getNameAsString(), policyName.trim());<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    this.dataBlockEncoder = new HFileDataBlockEncoderImpl(family.getDataBlockEncoding());<a name="line.270"></a>
+<span class="sourceLineNo">249</span>    this.fs = region.getRegionFileSystem();<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>    // Assemble the store's home directory and Ensure it exists.<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    fs.createStoreDir(family.getNameAsString());<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    this.region = region;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    this.family = family;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    // 'conf' renamed to 'confParam' b/c we use this.conf in the constructor<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    // CompoundConfiguration will look for keys in reverse order of addition, so we'd<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    // add global config first, then table and cf overrides, then cf metadata.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    this.conf = new CompoundConfiguration()<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      .add(confParam)<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      .addBytesMap(region.getTableDescriptor().getValues())<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      .addStringMap(family.getConfiguration())<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      .addBytesMap(family.getValues());<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    this.blocksize = family.getBlocksize();<a name="line.263"></a>
+<span class="sourceLineNo">264</span><a name="line.264"></a>
+<span class="sourceLineNo">265</span>    // set block storage policy for store directory<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    String policyName = family.getStoragePolicy();<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    if (null == policyName) {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      policyName = this.conf.get(BLOCK_STORAGE_POLICY_KEY, DEFAULT_BLOCK_STORAGE_POLICY);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    this.fs.setStoragePolicy(family.getNameAsString(), policyName.trim());<a name="line.270"></a>
 <span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>    this.comparator = region.getCellComparator();<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    // used by ScanQueryMatcher<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    long timeToPurgeDeletes =<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    LOG.trace("Time to purge deletes set to {}ms in store {}", timeToPurgeDeletes, this);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    // Get TTL<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    long ttl = determineTTLFromFamily(family);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    // Why not just pass a HColumnDescriptor in here altogether?  Even if have<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    // to clone it?<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    scanInfo = new ScanInfo(conf, family, ttl, timeToPurgeDeletes, this.comparator);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    this.memstore = getMemstore();<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.284"></a>
+<span class="sourceLineNo">272</span>    this.dataBlockEncoder = new HFileDataBlockEncoderImpl(family.getDataBlockEncoding());<a name="line.272"></a>
+<span class="sourceLineNo">273</span><a name="line.273"></a>
+<span class="sourceLineNo">274</span>    this.comparator = region.getCellComparator();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    // used by ScanQueryMatcher<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    long timeToPurgeDeletes =<a name="line.276"></a>
+<span class="sourceLineNo">277</span>        Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    LOG.trace("Time to purge deletes set to {}ms in store {}", timeToPurgeDeletes, this);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    // Get TTL<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    long ttl = determineTTLFromFamily(family);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    // Why not just pass a HColumnDescriptor in here altogether?  Even if have<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    // to clone it?<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    scanInfo = new ScanInfo(conf, family, ttl, timeToPurgeDeletes, this.comparator);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.memstore = getMemstore();<a name="line.284"></a>
 <span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>    // Setting up cache configuration for this family<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    createCacheConf(family);<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.verifyBulkLoads = conf.getBoolean("hbase.hstore.bulkload.verify", false);<a name="line.289"></a>
+<span class="sourceLineNo">286</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>    // Setting up cache configuration for this family<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    createCacheConf(family);<a name="line.289"></a>
 <span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>    this.blockingFileCount =<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        conf.getInt(BLOCKING_STOREFILES_KEY, DEFAULT_BLOCKING_STOREFILE_COUNT);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    this.compactionCheckMultiplier = conf.getInt(<a name="line.293"></a>
-<span class="sourceLineNo">294</span>        COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY, DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    if (this.compactionCheckMultiplier &lt;= 0) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      LOG.error("Compaction check period multiplier must be positive, setting default: {}",<a name="line.296"></a>
-<span class="sourceLineNo">297</span>          DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      this.compactionCheckMultiplier = DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    }<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (HStore.closeCheckInterval == 0) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      HStore.closeCheckInterval = conf.getInt(<a name="line.302"></a>
-<span class="sourceLineNo">303</span>          "hbase.hstore.close.check.interval", 10*1000*1000 /* 10 MB */);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>    this.storeEngine = createStoreEngine(this, this.conf, this.comparator);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    List&lt;HStoreFile&gt; hStoreFiles = loadStoreFiles(warmup);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    // Move the storeSize calculation out of loadStoreFiles() method, because the secondary read<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    // replica's refreshStoreFiles() will also use loadStoreFiles() to refresh its store files and<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    // update the storeSize in the completeCompaction(..) finally (just like compaction) , so<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    // no need calculate the storeSize twice.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    this.storeSize.addAndGet(getStorefilesSize(hStoreFiles, sf -&gt; true));<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    this.totalUncompressedBytes.addAndGet(getTotalUncompressedBytes(hStoreFiles));<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    this.storeEngine.getStoreFileManager().loadFiles(hStoreFiles);<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>    // Initialize checksum type from name. The names are CRC32, CRC32C, etc.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    this.checksumType = getChecksumType(conf);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    // Initialize bytes per checksum<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    this.bytesPerChecksum = getBytesPerChecksum(conf);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    flushRetriesNumber = conf.getInt(<a name="line.320"></a>
-<span class="sourceLineNo">321</span>        "hbase.hstore.flush.retries.number", DEFAULT_FLUSH_RETRIES_NUMBER);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    pauseTime = conf.getInt(HConstants.HBASE_SERVER_PAUSE, HConstants.DEFAULT_HBASE_SERVER_PAUSE);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    if (flushRetriesNumber &lt;= 0) {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      throw new IllegalArgumentException(<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          "hbase.hstore.flush.retries.number must be &gt; 0, not "<a name="line.325"></a>
-<span class="sourceLineNo">326</span>              + flushRetriesNumber);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    cryptoContext = EncryptionUtil.createEncryptionContext(conf, family);<a name="line.328"></a>
-<span class="sourceLineNo">329</span><a name="line.329"></a>
-<span class="sourceLineNo">330</span>    int confPrintThreshold =<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        this.conf.getInt("hbase.region.store.parallel.put.print.threshold", 50);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    if (confPrintThreshold &lt; 10) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      confPrintThreshold = 10;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    this.parallelPutCountPrintThreshold = confPrintThreshold;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    LOG.info("Store={},  memstore type={}, storagePolicy={}, verifyBulkLoads={}, "<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            + "parallelPutCountPrintThreshold={}, encoding={}, compression={}",<a name="line.337"></a>
-<span class="sourceLineNo">338</span>        getColumnFamilyName(), memstore.getClass().getSimpleName(), policyName, verifyBulkLoads,<a name="line.338"></a>
-<span class="sourceLineNo">339</span>        parallelPutCountPrintThreshold, family.getDataBlockEncoding(),<a name="line.339"></a>
-<span class="sourceLineNo">340</span>        family.getCompressionType());<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>  /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   * @return MemStore Instance to use in this store.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>   */<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private MemStore getMemstore() {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    MemStore ms = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    // Check if in-memory-compaction configured. Note MemoryCompactionPolicy is an enum!<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    MemoryCompactionPolicy inMemoryCompaction = null;<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    if (this.getTableName().isSystemTable()) {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      inMemoryCompaction = MemoryCompactionPolicy.valueOf(<a name="line.351"></a>
-<span class="sourceLineNo">352</span>          conf.get("hbase.systemtables.compacting.memstore.type", "NONE"));<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } else {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      inMemoryCompaction = family.getInMemoryCompaction();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    }<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    if (inMemoryCompaction == null) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      inMemoryCompaction =<a name="line.357"></a>
-<span class="sourceLineNo">358</span>          MemoryCompactionPolicy.valueOf(conf.get(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY,<a name="line.358"></a>
-<span class="sourceLineNo">359</span>              CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT).toUpperCase());<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    }<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    switch (inMemoryCompaction) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      case NONE:<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        ms = ReflectionUtils.newInstance(DefaultMemStore.class,<a name="line.363"></a>
-<span class="sourceLineNo">364</span>            new Object[] { conf, this.comparator,<a name="line.364"></a>
-<span class="sourceLineNo">365</span>                this.getHRegion().getRegionServicesForStores()});<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        break;<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      default:<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        Class&lt;? extends CompactingMemStore&gt; clz = conf.getClass(MEMSTORE_CLASS_NAME,<a name="line.368"></a>
-<span class="sourceLineNo">369</span>            CompactingMemStore.class, CompactingMemStore.class);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        ms = ReflectionUtils.newInstance(clz, new Object[]{conf, this.comparator, this,<a name="line.370"></a>
-<span class="sourceLineNo">371</span>            this.getHRegion().getRegionServicesForStores(), inMemoryCompaction});<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    return ms;<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  }<a name="line.374"></a>
-<span class="sourceLineNo">375</span><a name="line.375"></a>
-<span class="sourceLineNo">376</span>  /**<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * Creates the cache config.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @param family The current column family.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   */<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  protected void createCacheConf(final ColumnFamilyDescriptor family) {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    this.cacheConf = new CacheConfig(conf, family, region.getBlockCache(),<a name="line.381"></a>
-<span class="sourceLineNo">382</span>        region.getRegionServicesForStores().getByteBuffAllocator());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
-<span class="sourceLineNo">384</span><a name="line.384"></a>
-<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Creates the store engine configured for the given Store.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @param store The store. An unfortunate dependency needed due to it<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   *              being passed to coprocessors via the compactor.<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   * @param conf Store configuration.<a name="line.389"></a>
-<span class="sourceLineNo">390</span>   * @param kvComparator KVComparator for storeFileManager.<a name="line.390"></a>
-<span class="sourceLineNo">391</span>   * @return StoreEngine to use.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>   */<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected StoreEngine&lt;?, ?, ?, ?&gt; createStoreEngine(HStore store, Configuration conf,<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      CellComparator kvComparator) throws IOException {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    return StoreEngine.create(store, conf, comparator);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @param family<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @return TTL in seconds of the specified family<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   */<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  public static long determineTTLFromFamily(final ColumnFamilyDescriptor family) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    // HCD.getTimeToLive returns ttl in seconds.  Convert to milliseconds.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    long ttl = family.getTimeToLive();<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    if (ttl == HConstants.FOREVER) {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      // Default is unlimited ttl.<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      ttl = Long.MAX_VALUE;<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    } else if (ttl == -1) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      ttl = Long.MAX_VALUE;<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    } else {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      // Second -&gt; ms adjust for user data<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      ttl *= 1000;<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    }<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    return ttl;<a name="line.414"></a>
-<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
-<span class="sourceLineNo">416</span><a name="line.416"></a>
-<span class="sourceLineNo">417</span>  @Override<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  public String getColumnFamilyName() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    return this.family.getNameAsString();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  @Override<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  public TableName getTableName() {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    return this.getRegionInfo().getTable();<a name="line.424"></a>
-<span class="sourceLineNo">425</span>  }<a name="line.425"></a>
-<span class="sourceLineNo">426</span><a name="line.426"></a>
-<span class="sourceLineNo">427</span>  @Override<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  public FileSystem getFileSystem() {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    return this.fs.getFileSystem();<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  }<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  public HRegionFileSystem getRegionFileSystem() {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    return this.fs;<a name="line.433"></a>
-<span class="sourceLineNo">434</span>  }<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>  /* Implementation of StoreConfigInformation */<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  public long getStoreFileTtl() {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    // TTL only applies if there's no MIN_VERSIONs setting on the column.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    return (this.scanInfo.getMinVersions() == 0) ? this.scanInfo.getTtl() : Long.MAX_VALUE;<a name="line.440"></a>
-<span class="sourceLineNo">441</span>  }<a name="line.441"></a>
-<span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>  @Override<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  public long getMemStoreFlushSize() {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    // TODO: Why is this in here?  The flushsize of the region rather than the store?  St.Ack<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    return this.region.memstoreFlushSize;<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  }<a name="line.447"></a>
-<span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @Override<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  public MemStoreSize getFlushableSize() {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    return this.memstore.getFlushableSize();<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  }<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  @Override<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  public MemStoreSize getSnapshotSize() {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return this.memstore.getSnapshotSize();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  @Override<a name="line.459"></a>
-<span class="sourceLineNo">460</span>  public long getCompactionCheckMultiplier() {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    return this.compactionCheckMultiplier;<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  }<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  @Override<a name="line.464"></a>
-<span class="sourceLineNo">465</span>  public long getBlockingFileCount() {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    return blockingFileCount;<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  }<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  /* End implementation of StoreConfigInformation */<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>  /**<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * Returns the configured bytesPerChecksum value.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   * @param conf The configuration<a name="line.472"></a>
-<span class="sourceLineNo">473</span>   * @return The bytesPerChecksum that is set in the configuration<a name="line.473"></a>
-<span class="sourceLineNo">474</span>   */<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  public static int getBytesPerChecksum(Configuration conf) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    return conf.getInt(HConstants.BYTES_PER_CHECKSUM,<a name="line.476"></a>
-<span class="sourceLineNo">477</span>                       HFile.DEFAULT_BYTES_PER_CHECKSUM);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>  }<a name="line.478"></a>
-<span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>  /**<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * Returns the configured checksum algorithm.<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param conf The configuration<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @return The checksum algorithm that is set in the configuration<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static ChecksumType getChecksumType(Configuration conf) {<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    String checksumName = conf.get(HConstants.CHECKSUM_TYPE_NAME);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    if (checksumName == null) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      return ChecksumType.getDefaultChecksumType();<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    } else {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>      return ChecksumType.nameToType(checksumName);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    }<a name="line.491"></a>
-<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
-<span class="sourceLineNo">493</span><a name="line.493"></a>
-<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * @return how many bytes to write between status checks<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   */<a name="line.496"></a>
-<span class="sourceLineNo">497</span>  public static int getCloseCheckInterval() {<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    return closeCheckInterval;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>  }<a name="line.499"></a>
-<span class="sourceLineNo">500</span><a name="line.500"></a>
-<span class="sourceLineNo">501</span>  @Override<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  public ColumnFamilyDescriptor getColumnFamilyDescriptor() {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    return this.family;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>  }<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  @Override<a name="line.506"></a>
-<span class="sourceLineNo">507</span>  public OptionalLong getMaxSequenceId() {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    return StoreUtils.getMaxSequenceIdInList(this.getStorefiles());<a name="line.508"></a>
-<span class="sourceLineNo">509</span>  }<a name="line.509"></a>
-<span class="sourceLineNo">510</span><a name="line.510"></a>
-<span class="sourceLineNo">511</span>  @Override<a name="line.511"></a>
-<span class="sourceLineNo">512</span>  public OptionalLong getMaxMemStoreTS() {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    return StoreUtils.getMaxMemStoreTSInList(this.getStorefiles());<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  /**<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param tabledir {@link Path} to where the table is being stored<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hri {@link RegionInfo} for the region.<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param family {@link ColumnFamilyDescriptor} describing the column family<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return Path to family/Store home directory.<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   */<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  @Deprecated<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  public static Path getStoreHomedir(final Path tabledir,<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      final RegionInfo hri, final byte[] family) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    return getStoreHomedir(tabledir, hri.getEncodedName(), family);<a name="line.525"></a>
-<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
-<span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * @param tabledir {@link Path} to where the table is being stored<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * @param encodedName Encoded region name.<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * @param family {@link ColumnFamilyDescriptor} describing the column family<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   * @return Path to family/Store home directory.<a name="line.532"></a>
-<span class="sourceLineNo">533</span>   */<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  @Deprecated<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  public static Path getStoreHomedir(final Path tabledir,<a name="line.535"></a>
-<span class="sourceLineNo">536</span>      final String encodedName, final byte[] family) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    return new Path(tabledir, new Path(encodedName, Bytes.toString(family)));<a name="line.537"></a>
-<span class="sourceLineNo">538</span>  }<a name="line.538"></a>
-<span class="sourceLineNo">539</span><a name="line.539"></a>
-<span class="sourceLineNo">540</span>  /**<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * @return the data block encoder<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   */<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  public HFileDataBlockEncoder getDataBlockEncoder() {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    return dataBlockEncoder;<a name="line.544"></a>
-<span class="sourceLineNo">545</span>  }<a name="line.545"></a>
-<span class="sourceLineNo">546</span><a name="line.546"></a>
-<span class="sourceLineNo">547</span>  /**<a name="line.547"></a>
-<span class="sourceLineNo">548</span>   * Should be used only in tests.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * @param blockEncoder the block delta encoder to use<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   */<a name="line.550"></a>
-<span class="sourceLineNo">551</span>  void setDataBlockEncoderInTest(HFileDataBlockEncoder blockEncoder) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    this.dataBlockEncoder = blockEncoder;<a name="line.552"></a>
-<span class="sourceLineNo">553</span>  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span><a name="line.554"></a>
-<span class="sourceLineNo">555</span>  /**<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * Creates an unsorted list of StoreFile loaded in parallel<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * from the given directory.<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @throws IOException<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   */<a name="line.559"></a>
-<span class="sourceLineNo">560</span>  private List&lt;HStoreFile&gt; loadStoreFiles(boolean warmup) throws IOException {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    Collection&lt;StoreFileInfo&gt; files = fs.getStoreFiles(getColumnFamilyName());<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    return openStoreFiles(files, warmup);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
-<span class="sourceLineNo">564</span><a name="line.564"></a>
-<span class="sourceLineNo">565</span>  private List&lt;HStoreFile&gt; openStoreFiles(Collection&lt;StoreFileInfo&gt; files, boolean warmup)<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      throws IOException {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    if (CollectionUtils.isEmpty(files)) {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      return Collections.emptyList();<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    }<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // initialize the thread pool for opening store files in parallel..<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    ThreadPoolExecutor storeFileOpenerThreadPool =<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      this.region.getStoreFileOpenAndCloseThreadPool("StoreFileOpenerThread-"<a name="line.572"></a>
-<span class="sourceLineNo">573</span>        + this.region.getRegionInfo().getEncodedName() + "-" + this.getColumnFamilyName());<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    CompletionService&lt;HStoreFile&gt; completionService = new ExecutorCompletionService&lt;&gt;(storeFileOpenerThreadPool);<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>    int totalValidStoreFile = 0;<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    for (StoreFileInfo storeFileInfo : files) {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      // open each store file in parallel<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      completionService.submit(() -&gt; this.createStoreFileAndReader(storeFileInfo));<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      totalValidStoreFile++;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    }<a name="line.581"></a>
-<span class="sourceLineNo">582</span><a name="line.582"></a>
-<span class="sourceLineNo">583</span>    Set&lt;String&gt; compactedStoreFiles = new HashSet&lt;&gt;();<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    ArrayList&lt;HStoreFile&gt; results = new ArrayList&lt;&gt;(files.size());<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    IOException ioe = null;<a name="line.585"></a>
-<span class="sourceLineNo">586</span>    try {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      for (int i = 0; i &lt; totalValidStoreFile; i++) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        try {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>          HStoreFile storeFile = completionService.take().get();<a name="line.589"></a>
-<span class="sourceLineNo">590</span>          if (storeFile != null) {<a name="line.590"></a>
-<span class="sourceLineNo">591</span>            LOG.debug("loaded {}", storeFile);<a name="line.591"></a>
-<span class="sourceLineNo">592</span>            results.add(storeFile);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>            compactedStoreFiles.addAll(storeFile.getCompactedStoreFiles());<a name="line.593"></a>
-<span class="sourceLineNo">594</span>          }<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        } catch (InterruptedException e) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          if (ioe == null) ioe = new InterruptedIOException(e.getMessage());<a name="line.596"></a>
-<span class="sourceLineNo">597</span>        } catch (ExecutionException e) {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          if (ioe == null) ioe = new IOException(e.getCause());<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        }<a name="line.599"></a>
-<span class="sourceLineNo">600</span>      }<a name="line.600"></a>
-<span class="sourceLineNo">601</span>    } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      storeFileOpenerThreadPool.shutdownNow();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    if (ioe != null) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      // close StoreFile readers<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      boolean evictOnClose =<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          cacheConf != null? cacheConf.shouldEvictOnClose(): true;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      for (HStoreFile file : results) {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        try {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          if (file != null) {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>            file.closeStoreFile(evictOnClose);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          }<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        } catch (IOException e) {<a name="line.613"></a>
-<span class="sourceLineNo">614</span>          LOG.warn("Could not close store file", e);<a name="line.614"></a>
-<span class="sourceLineNo">615</span>        }<a name="line.615"></a>
-<span class="sourceLineNo">616</span>      }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      throw ioe;<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
-<span class="sourceLineNo">619</span><a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Should not archive the compacted store files when region warmup. See HBASE-22163.<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    if (!warmup) {<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Remove the compacted files from result<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedStoreFiles.size());<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      for (HStoreFile storeFile : results) {<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        if (compactedStoreFiles.contains(storeFile.getPath().getName())) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          LOG.warn("Clearing the compacted storefile {} from this store", storeFile);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          storeFile.getReader().close(true);<a name="line.627"></a>
-<span class="sourceLineNo">628</span>          filesToRemove.add(storeFile);<a name="line.628"></a>
-<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      }<a name="line.630"></a>
-<span class="sourceLineNo">631</span>      results.removeAll(filesToRemove);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>      if (!filesToRemove.isEmpty() &amp;&amp; this.isPrimaryReplicaStore()) {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>        LOG.debug("Moving the files {} to archive", filesToRemove);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        this.fs.removeStoreFiles(this.getColumnFamilyDescriptor().getNameAsString(), filesToRemove);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      }<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    }<a name="line.636"></a>
-<span class="sourceLineNo">637</span><a name="line.637"></a>
-<span class="sourceLineNo">638</span>    return results;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>  }<a name="line.639"></a>
+<span class="sourceLineNo">291</span>    this.verifyBulkLoads = conf.getBoolean("hbase.hstore.bulkload.verify", false);<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>    this.blockingFileCount =<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        conf.getInt(BLOCKING_STOREFILES_KEY, DEFAULT_BLOCKING_STOREFILE_COUNT);<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    this.compactionCheckMultiplier = conf.getInt(<a name="line.295"></a>
+<span class="sourceLineNo">296</span>        COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY, DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    if (this.compactionCheckMultiplier &lt;= 0) {<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      LOG.error("Compaction check period multiplier must be positive, setting default: {}",<a name="line.298"></a>
+<span class="sourceLineNo">299</span>          DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      this.compactionCheckMultiplier = DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span><a name="line.302"></a>
+<span class="sourceLineNo">303</span>    if (HStore.closeCheckInterval == 0) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      HStore.closeCheckInterval = conf.getInt(<a name="line.304"></a>
+<span class="sourceLineNo">305</span>          "hbase.hstore.close.check.interval", 10*1000*1000 /* 10 MB */);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>    this.storeEngine = createStoreEngine(this, this.conf, this.comparator);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    List&lt;HStoreFile&gt; hStoreFiles = loadStoreFiles(warmup);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    // Move the storeSize calculation out of loadStoreFiles() method, because the secondary read<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    // replica's refreshStoreFiles() will also use loadStoreFiles() to refresh its store files and<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // update the storeSize in the completeCompaction(..) finally (just like compaction) , so<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    // no need calculate the storeSize twice.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    this.storeSize.addAndGet(getStorefilesSize(hStoreFiles, sf -&gt; true));<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    this.totalUncompressedBytes.addAndGet(getTotalUncompressedBytes(hStoreFiles));<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    this.storeEngine.getStoreFileManager().loadFiles(hStoreFiles);<a name="line.316"></a>
+<span class="sourceLineNo">317</span><a name="line.317"></a>
+<span class="sourceLineNo">318</span>    // Initialize checksum type from name. The names are CRC32, CRC32C, etc.<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    this.checksumType = getChecksumType(conf);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    // Initialize bytes per checksum<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    this.bytesPerChecksum = getBytesPerChecksum(conf);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    flushRetriesNumber = conf.getInt(<a name="line.322"></a>
+<span class="sourceLineNo">323</span>        "hbase.hstore.flush.retries.number", DEFAULT_FLUSH_RETRIES_NUMBER);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    pauseTime = conf.getInt(HConstants.HBASE_SERVER_PAUSE, HConstants.DEFAULT_HBASE_SERVER_PAUSE);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    if (flushRetriesNumber &lt;= 0) {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      throw new IllegalArgumentException(<a name="line.326"></a>
+<span class="sourceLineNo">327</span>          "hbase.hstore.flush.retries.number must be &gt; 0, not "<a name="line.327"></a>
+<span class="sourceLineNo">328</span>              + flushRetriesNumber);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    }<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    cryptoContext = EncryptionUtil.createEncryptionContext(conf, family);<a name="line.330"></a>
+<span class="sourceLineNo">331</span><a name="line.331"></a>
+<span class="sourceLineNo">332</span>    int confPrintThreshold =<a name="line.332"></a>
+<span class="sourceLineNo">333</span>        this.conf.getInt("hbase.region.store.parallel.put.print.threshold", 50);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    if (confPrintThreshold &lt; 10) {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      confPrintThreshold = 10;<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    }<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.parallelPutCountPrintThreshold = confPrintThreshold;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    LOG.info("Store={},  memstore type={}, storagePolicy={}, verifyBulkLoads={}, "<a name="line.338"></a>
+<span class="sourceLineNo">339</span>            + "parallelPutCountPrintThreshold={}, encoding={}, compression={}",<a name="line.339"></a>
+<span class="sourceLineNo">340</span>        getColumnFamilyName(), memstore.getClass().getSimpleName(), policyName, verifyBulkLoads,<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        parallelPutCountPrintThreshold, family.getDataBlockEncoding(),<a name="line.341"></a>
+<span class="sourceLineNo">342</span>        family.getCompressionType());<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    cacheOnWriteLogged = false;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
+<span class="sourceLineNo">345</span><a name="line.345"></a>
+<span class="sourceLineNo">346</span>  /**<a name="line.346"></a>
+<span class="sourceLineNo">347</span>   * @return MemStore Instance to use in this store.<a name="line.347"></a>
+<span class="sourceLineNo">348</span>   */<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private MemStore getMemstore() {<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    MemStore ms = null;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    // Check if in-memory-compaction configured. Note MemoryCompactionPolicy is an enum!<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    MemoryCompactionPolicy inMemoryCompaction = null;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    if (this.getTableName().isSystemTable()) {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>      inMemoryCompaction = MemoryCompactionPolicy.valueOf(<a name="line.354"></a>
+<span class="sourceLineNo">355</span>          conf.get("hbase.systemtables.compacting.memstore.type", "NONE"));<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    } else {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>      inMemoryCompaction = family.getInMemoryCompaction();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    }<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    if (inMemoryCompaction == null) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      inMemoryCompaction =<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          MemoryCompactionPolicy.valueOf(conf.get(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY,<a name="line.361"></a>
+<span class="sourceLineNo">362</span>              CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT).toUpperCase());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    }<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    switch (inMemoryCompaction) {<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      case NONE:<a name="line.365"></a>
+<span class="sourceLineNo">366</span>        ms = ReflectionUtils.newInstance(DefaultMemStore.class,<a name="line.366"></a>
+<span class="sourceLineNo">367</span>            new Object[] { conf, this.comparator,<a name="line.367"></a>
+<span class="sourceLineNo">368</span>                this.getHRegion().getRegionServicesForStores()});<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        break;<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      default:<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        Class&lt;? extends CompactingMemStore&gt; clz = conf.getClass(MEMSTORE_CLASS_NAME,<a name="line.371"></a>
+<span class="sourceLineNo">372</span>            CompactingMemStore.class, CompactingMemStore.class);<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        ms = ReflectionUtils.newInstance(clz, new Object[]{conf, this.comparator, this,<a name="line.373"></a>
+<span class="sourceLineNo">374</span>            this.getHRegion().getRegionServicesForStores(), inMemoryCompaction});<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    return ms;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>  }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>  /**<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * Creates the cache config.<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * @param family The current column family.<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   */<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  protected void createCacheConf(final ColumnFamilyDescriptor family) {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    this.cacheConf = new CacheConfig(conf, family, region.getBlockCache(),<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        region.getRegionServicesForStores().getByteBuffAllocator());<a name="line.385"></a>
+<span class="sourceLineNo">386</span>  }<a name="line.386"></a>
+<span class="sourceLineNo">387</span><a name="line.387"></a>
+<span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>   * Creates the store engine configured for the given Store.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   * @param store The store. An unfortunate dependency needed due to it<a name="line.390"></a>
+<span class="sourceLineNo">391</span>   *              being passed to coprocessors via the compactor.<a name="line.391"></a>
+<span class="sourceLineNo">392</span>   * @param conf Store configuration.<a name="line.392"></a>
+<span class="sourceLineNo">393</span>   * @param kvComparator KVComparator for storeFileManager.<a name="line.393"></a>
+<span class="sourceLineNo">394</span>   * @return StoreEngine to use.<a name="line.394"></a>
+<span class="sourceLineNo">395</span>   */<a name="line.395"></a>
+<span class="sourceLineNo">396</span>  protected StoreEngine&lt;?, ?, ?, ?&gt; createStoreEngine(HStore store, Configuration conf,<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      CellComparator kvComparator) throws IOException {<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    return StoreEngine.create(store, conf, comparator);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>  }<a name="line.399"></a>
+<span class="sourceLineNo">400</span><a name="line.400"></a>
+<span class="sourceLineNo">401</span>  /**<a name="line.401"></a>
+<span class="sourceLineNo">402</span>   * @param family<a name="line.402"></a>
+<span class="sourceLineNo">403</span>   * @return TTL in seconds of the specified family<a name="line.403"></a>
+<span class="sourceLineNo">404</span>   */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>  public static long determineTTLFromFamily(final ColumnFamilyDescriptor family) {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    // HCD.getTimeToLive returns ttl in seconds.  Convert to milliseconds.<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    long ttl = family.getTimeToLive();<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    if (ttl == HConstants.FOREVER) {<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      // Default is unlimited ttl.<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      ttl = Long.MAX_VALUE;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    } else if (ttl == -1) {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      ttl = Long.MAX_VALUE;<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    } else {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      // Second -&gt; ms adjust for user data<a name="line.414"></a>
+<span class="sourceLineNo">415</span>      ttl *= 1000;<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    }<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    return ttl;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
+<span class="sourceLineNo">419</span><a name="line.419"></a>
+<span class="sourceLineNo">420</span>  @Override<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  public String getColumnFamilyName() {<a name="line.421"></a>
+<span class="sourceLineNo">422</span>    return this.family.getNameAsString();<a name="line.422"></a>
+<span class="sourceLineNo">423</span>  }<a name="line.423"></a>
+<span class="sourceLineNo">424</span><a name="line.424"></a>
+<span class="sourceLineNo">425</span>  @Override<a name="line.425"></a>
+<span class="sourceLineNo">426</span>  public TableName getTableName() {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    return this.getRegionInfo().getTable();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
+<span class="sourceLineNo">429</span><a name="line.429"></a>
+<span class="sourceLineNo">430</span>  @Override<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  public FileSystem getFileSystem() {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    return this.fs.getFileSystem();<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  }<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>  public HRegionFileSystem getRegionFileSystem() {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    return this.fs;<a name="line.436"></a>
+<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
+<span class="sourceLineNo">438</span><a name="line.438"></a>
+<span class="sourceLineNo">439</span>  /* Implementation of StoreConfigInformation */<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  @Override<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  public long getStoreFileTtl() {<a name="line.441"></a>
+<span class="sourceLineNo">442</span>    // TTL only applies if there's no MIN_VERSIONs setting on the column.<a name="line.442"></a>
+<span class="sourceLineNo">443</span>    return (this.scanInfo.getMinVersions() == 0) ? this.scanInfo.getTtl() : Long.MAX_VALUE;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>  @Override<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  public long getMemStoreFlushSize() {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    // TODO: Why is this in here?  The flushsize of the region rather than the store?  St.Ack<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    return this.region.memstoreFlushSize;<a name="line.449"></a>
+<span class="sourceLineNo">450</span>  }<a name="line.450"></a>
+<span class="sourceLineNo">451</span><a name="line.451"></a>
+<span class="sourceLineNo">452</span>  @Override<a name="line.452"></a>
+<span class="sourceLineNo">453</span>  public MemStoreSize getFlushableSize() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    return this.memstore.getFlushableSize();<a name="line.454"></a>
+<span class="sourceLineNo">455</span>  }<a name="line.455"></a>
+<span class="sourceLineNo">456</span><a name="line.456"></a>
+<span class="sourceLineNo">457</span>  @Override<a name="line.457"></a>
+<span class="sourceLineNo">458</span>  public MemStoreSize getSnapshotSize() {<a name="line.458"></a>
+<span class="sourceLineNo">459</span>    return this.memstore.getSnapshotSize();<a name="line.459"></a>
+<span class="sourceLineNo">460</span>  }<a name="line.460"></a>
+<span class="sourceLineNo">461</span><a name="line.461"></a>
+<span class="sourceLineNo">462</span>  @Override<a name="line.462"></a>
+<span class="sourceLineNo">463</span>  public long getCompactionCheckMultiplier() {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    return this.compactionCheckMultiplier;<a name="line.464"></a>
+<span class="sourceLineNo">465</span>  }<a name="line.465"></a>
+<span class="sourceLineNo">466</span><a name="line.466"></a>
+<span class="sourceLineNo">467</span>  @Override<a name="line.467"></a>
+<span class="sourceLineNo">468</span>  public long getBlockingFileCount() {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    return blockingFileCount;<a name="line.469"></a>
+<span class="sourceLineNo">470</span>  }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /* End implementation of StoreConfigInformation */<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   * Returns the configured bytesPerChecksum value.<a name="line.474"></a>
+<span class="sourceLineNo">475</span>   * @param conf The configuration<a name="line.475"></a>
+<span class="sourceLineNo">476</span>   * @return The bytesPerChecksum that is set in the configuration<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   */<a name="line.477"></a>
+<span class="sourceLineNo">478</span>  public static int getBytesPerChecksum(Configuration conf) {<a name="line.478"></a>
+<span class="sourceLineNo">479</span>    return conf.getInt(HConstants.BYTES_PER_CHECKSUM,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>                       HFile.DEFAULT_BYTES_PER_CHECKSUM);<a name="line.480"></a>
+<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
+<span class="sourceLineNo">482</span><a name="line.482"></a>
+<span class="sourceLineNo">483</span>  /**<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   * Returns the configured checksum algorithm.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   * @param conf The configuration<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * @return The checksum algorithm that is set in the configuration<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
+<span class="sourceLineNo">488</span>  public static ChecksumType getChecksumType(Configuration conf) {<a name="line.488"></a>
+<span class="sourceLineNo">489</span>    String checksumName = conf.get(HConstants.CHECKSUM_TYPE_NAME);<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    if (checksumName == null) {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      return ChecksumType.getDefaultChecksumType();<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    } else {<a name="line.492"></a>
+<span class="sourceLineNo">493</span>      return ChecksumType.nameToType(checksumName);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    }<a name="line.494"></a>
+<span class="sourceLineNo">495</span>  }<a name="line.495"></a>
+<span class="sourceLineNo">496</span><a name="line.496"></a>
+<span class="sourceLineNo">497</span>  /**<a name="line.497"></a>
+<span class="sourceLineNo">498</span>   * @return how many bytes to write between status checks<a name="line.498"></a>
+<span class="sourceLineNo">499</span>   */<a name="line.499"></a>
+<span class="sourceLineNo">500</span>  public static int getCloseCheckInterval() {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>    return closeCheckInterval;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>  }<a name="line.502"></a>
+<span class="sourceLineNo">503</span><a name="line.503"></a>
+<span class="sourceLineNo">504</span>  @Override<a name="line.504"></a>
+<span class="sourceLineNo">505</span>  public ColumnFamilyDescriptor getColumnFamilyDescriptor() {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    return this.family;<a name="line.506"></a>
+<span class="sourceLineNo">507</span>  }<a name="line.507"></a>
+<span class="sourceLineNo">508</span><a name="line.508"></a>
+<span class="sourceLineNo">509</span>  @Override<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  public OptionalLong getMaxSequenceId() {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>    return StoreUtils.getMaxSequenceIdInList(this.getStorefiles());<a name="line.511"></a>
+<span class="sourceLineNo">512</span>  }<a name="line.512"></a>
+<span class="sourceLineNo">513</span><a name="line.513"></a>
+<span class="sourceLineNo">514</span>  @Override<a name="line.514"></a>
+<span class="sourceLineNo">515</span>  public OptionalLong getMaxMemStoreTS() {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    return StoreUtils.getMaxMemStoreTSInList(this.getStorefiles());<a name="line.516"></a>
+<span class="sourceLineNo">517</span>  }<a name="line.517"></a>
+<span class="sourceLineNo">518</span><a name="line.518"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * @param tabledir {@link Path} to where the table is being stored<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * @param hri {@link RegionInfo} for the region.<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * @param family {@link ColumnFamilyDescriptor} describing the column family<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * @return Path to family/Store home directory.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  @Deprecated<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  public static Path getStoreHomedir(final Path tabledir,<a name="line.526"></a>
+<span class="sourceLineNo">527</span>      final RegionInfo hri, final byte[] family) {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>    return getStoreHomedir(tabledir, hri.getEncodedName(), family);<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  }<a name="line.529"></a>
+<span class="sourceLineNo">530</span><a name="line.530"></a>
+<span class="sourceLineNo">531</span>  /**<a name="line.531"></a>
+<span class="sourceLineNo">532</span>   * @param tabledir {@link Path} to where the table is being stored<a name="line.532"></a>
+<span class="sourceLineNo">533</span>   * @param encodedName Encoded region name.<a name="line.533"></a>
+<span class="sourceLineNo">534</span>   * @param family {@link ColumnFamilyDescriptor} describing the column family<a name="line.534"></a>
+<span class="sourceLineNo">535</span>   * @return Path to family/Store home directory.<a name="line.535"></a>
+<span class="sourceLineNo">536</span>   */<a name="line.536"></a>
+<span class="sourceLineNo">537</span>  @Deprecated<a name="line.537"></a>
+<span class="sourceLineNo">538</span>  public static Path getStoreHomedir(final Path tabledir,<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      final String encodedName, final byte[] family) {<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    return new Path(tabledir, new Path(encodedName, Bytes.toString(family)));<a name="line.540"></a>
+<span class="sourceLineNo">541</span>  }<a name="line.541"></a>
+<span class="sourceLineNo">542</span><a name="line.542"></a>
+<span class="sourceLineNo">543</span>  /**<a name="line.543"></a>
+<span class="sourceLineNo">544</span>   * @return the data block encoder<a name="line.544"></a>
+<span class="sourceLineNo">545</span>   */<a name="line.545"></a>
+<span class="sourceLineNo">546</span>  public HFileDataBlockEncoder getDataBlockEncoder() {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    return dataBlockEncoder;<a name="line.547"></a>
+<span class="sourceLineNo">548</span>  }<a name="line.548"></a>
+<span class="sourceLineNo">549</span><a name="line.549"></a>
+<span class="sourceLineNo">550</span>  /**<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * Should be used only in tests.<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * @param blockEncoder the block delta encoder to use<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   */<a name="line.553"></a>
+<span class="sourceLineNo">554</span>  void setDataBlockEncoderInTest(HFileDataBlockEncoder blockEncoder) {<a name="line.554"></a>
+<span class="sourceLineNo">555</span>    this.dataBlockEncoder = blockEncoder;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>  }<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>  /**<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * Creates an unsorted list of StoreFile loaded in parallel<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * from the given directory.<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  private List&lt;HStoreFile&gt; loadStoreFiles(boolean warmup) throws IOException {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    Collection&lt;StoreFileInfo&gt; files = fs.getStoreFiles(getColumnFamilyName());<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    return openStoreFiles(files, warmup);<a name="line.565"></a>
+<span class="sourceLineNo">566</span>  }<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>  private List&lt;HStoreFile&gt; openStoreFiles(Collection&lt;StoreFileInfo&gt; files, boolean warmup)<a name="line.568"></a>
+<span class="sourceLineNo">569</span>      throws IOException {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    if (CollectionUtils.isEmpty(files)) {<a name="line.570"></a>
+<span class="sourceLineNo">571</span>      return Collections.emptyList();<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>    // initialize the thread pool for opening store files in parallel..<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    ThreadPoolExecutor storeFileOpenerThreadPool =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      this.region.getStoreFileOpenAndCloseThreadPool("StoreFileOpenerThread-"<a name="line.575"></a>
+<span class="sourceLineNo">576</span>        + this.region.getRegionInfo().getEncodedName() + "-" + this.getColumnFamilyName());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    CompletionService&lt;HStoreFile&gt; completionService = new ExecutorCompletionService&lt;&gt;(storeFileOpenerThreadPool);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>    int totalValidStoreFile = 0;<a name="line.579"></a>
+<span class="sourceLineNo">580</span>    for (StoreFileInfo storeFileInfo : files) {<a name="line.580"></a>
+<span class="sourceLineNo">581</span>      // open each store file in parallel<a name="line.581"></a>
+<span class="sourceLineNo">582</span>      completionService.submit(() -&gt; this.createStoreFileAndReader(storeFileInfo));<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      totalValidStoreFile++;<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
+<span class="sourceLineNo">585</span><a name="line.585"></a>
+<span class="sourceLineNo">586</span>    Set&lt;String&gt; compactedStoreFiles = new HashSet&lt;&gt;();<a name="line.586"></a>
+<span class="sourceLineNo">587</span>    ArrayList&lt;HStoreFile&gt; results = new ArrayList&lt;&gt;(files.size());<a name="line.587"></a>
+<span class="sourceLineNo">588</span>    IOException ioe = null;<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    try {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      for (int i = 0; i &lt; totalValidStoreFile; i++) {<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        try {<a name="line.591"></a>
+<span class="sourceLineNo">592</span>          HStoreFile storeFile = completionService.take().get();<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          if (storeFile != null) {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>            LOG.debug("loaded {}", storeFile);<a name="line.594"></a>
+<span class="sourceLineNo">595</span>            results.add(storeFile);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>            compactedStoreFiles.addAll(storeFile.getCompactedStoreFiles());<a name="line.596"></a>
+<span class="sourceLineNo">597</span>          }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>        } catch (InterruptedException e) {<a name="line.598"></a>
+<span class="sourceLineNo">599</span>          if (ioe == null) ioe = new InterruptedIOException(e.getMessage());<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        } catch (ExecutionException e) {<a name="line.600"></a>
+<span class="sourceLineNo">601</span>          if (ioe == null) ioe = new IOException(e.getCause());<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        }<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      }<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    } finally {<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      storeFileOpenerThreadPool.shutdownNow();<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    if (ioe != null) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      // close StoreFile readers<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      boolean evictOnClose =<a name="line.609"></a>
+<span class="sourceLineNo">610</span>          cacheConf != null? cacheConf.shouldEvictOnClose(): true;<a name="line.610"></a>
+<span class="sourceLineNo">611</span>      for (HStoreFile file : results) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>        try {<a name="line.612"></a>
+<span class="sourceLineNo">613</span>          if (file != null) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>            file.closeStoreFile(evictOnClose);<a name="line.614"></a>
+<span class="sourceLineNo">615</span>          }<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        } catch (IOException e) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          LOG.warn("Could not close store file", e);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>        }<a name="line.618"></a>
+<span class="sourceLineNo">619</span>      }<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      throw ioe;<a name="line.620"></a>
+<span class="sourceLineNo">621</span>    }<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>    // Should not archive the compacted store files when region warmup. See HBASE-22163.<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    if (!warmup) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>      // Remove the compacted files from result<a name="line.625"></a>
+<span class="sourceLineNo">626</span>      List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedStoreFiles.size());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>      for (HStoreFile storeFile : results) {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (compactedStoreFiles.contains(storeFile.getPath().getName())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          LOG.warn("Clearing the compacted storefile {} from this store", storeFile);<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          storeFile.getReader().close(true);<a name="line.630"></a>
+<span class="sourceLineNo">631</span>          filesToRemove.add(storeFile);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>        }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      }<a name="line.633"></a>
+<span class="sourceLineNo">634</span>      results.removeAll(filesToRemove);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>      if (!filesToRemove.isEmpty() &amp;&amp; this.isPrimaryReplicaStore()) {<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        LOG.debug("Moving the files {} to archive", filesToRemove);<a name="line.636"></a>
+<span class="sourceLineNo">637</span>        this.fs.removeStoreFiles(this.getColumnFamilyDescriptor().getNameAsString(), filesToRemove);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>    }<a name="line.639"></a>
 <span class="sourceLineNo">640</span><a name="line.640"></a>
-<span class="sourceLineNo">641</span>  @Override<a name="line.641"></a>
-<span class="sourceLineNo">642</span>  public void refreshStoreFiles() throws IOException {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>    Collection&lt;StoreFileInfo&gt; newFiles = fs.getStoreFiles(getColumnFamilyName());<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    refreshStoreFilesInternal(newFiles);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>  }<a name="line.645"></a>
-<span class="sourceLineNo">646</span><a name="line.646"></a>
-<span class="sourceLineNo">647</span>  /**<a name="line.647"></a>
-<span class="sourceLineNo">648</span>   * Replaces the store files that the store has with the given files. Mainly used by secondary<a name="line.648"></a>
-<span class="sourceLineNo">649</span>   * region replicas to keep up to date with the primary region files.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>   * @throws IOException<a name="line.650"></a>
-<span class="sourceLineNo">651</span>   */<a name="line.651"></a>
-<span class="sourceLineNo">652</span>  public void refreshStoreFiles(Collection&lt;String&gt; newFiles) throws IOException {<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    List&lt;StoreFileInfo&gt; storeFiles = new ArrayList&lt;&gt;(newFiles.size());<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    for (String file : newFiles) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      storeFiles.add(fs.getStoreFileInfo(getColumnFamilyName(), file));<a name="line.655"></a>
-<span class="sourceLineNo">656</span>    }<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    refreshStoreFilesInternal(storeFiles);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Checks the underlying store files, and opens the files that  have not<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   * been opened, and removes the store file readers for store files no longer<a name="line.662"></a>
-<span class="sourceLineNo">663</span>   * available. Mainly used by secondary region replicas to keep up to date with<a name="line.663"></a>
-<span class="sourceLineNo">664</span>   * the primary region files.<a name="line.664"></a>
-<span class="sourceLineNo">665</span>   * @throws IOException<a name="line.665"></a>
-<span class="sourceLineNo">666</span>   */<a name="line.666"></a>
-<span class="sourceLineNo">667</span>  private void refreshStoreFilesInternal(Collection&lt;StoreFileInfo&gt; newFiles) throws IOException {<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    StoreFileManager sfm = storeEngine.getStoreFileManager();<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    Collection&lt;HStoreFile&gt; currentFiles = sfm.getStorefiles();<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    Collection&lt;HStoreFile&gt; compactedFiles = sfm.getCompactedfiles();<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    if (currentFiles == null) currentFiles = Collections.emptySet();<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    if (newFiles == null) newFiles = Collections.emptySet();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    if (compactedFiles == null) compactedFiles = Collections.emptySet();<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    HashMap&lt;StoreFileInfo, HStoreFile&gt; currentFilesSet = new HashMap&lt;&gt;(currentFiles.size());<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    for (HStoreFile sf : currentFiles) {<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      currentFilesSet.put(sf.getFileInfo(), sf);<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    }<a name="line.678"></a>
-<span class="sourceLineNo">679</span>    HashMap&lt;StoreFileInfo, HStoreFile&gt; compactedFilesSet = new HashMap&lt;&gt;(compactedFiles.size());<a name="line.679"></a>
-<span class="sourceLineNo">680</span>    for (HStoreFile sf : compactedFiles) {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>      compactedFilesSet.put(sf.getFileInfo(), sf);<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    Set&lt;StoreFileInfo&gt; newFilesSet = new HashSet&lt;StoreFileInfo&gt;(newFiles);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    // Exclude the files that have already been compacted<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    newFilesSet = Sets.difference(newFilesSet, compactedFilesSet.keySet());<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    Set&lt;StoreFileInfo&gt; toBeAddedFiles = Sets.difference(newFilesSet, currentFilesSet.keySet());<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    Set&lt;StoreFileInfo&gt; toBeRemovedFiles = Sets.difference(currentFilesSet.keySet(), newFilesSet);<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>    if (toBeAddedFiles.isEmpty() &amp;&amp; toBeRemovedFiles.isEmpty()) {<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      return;<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    LOG.info("Refreshing store files for region " + this.getRegionInfo().getRegionNameAsString()<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      + " files to add: " + toBeAddedFiles + " files to remove: " + toBeRemovedFiles);<a name="line.695"></a>
+<span class="sourceLineNo">641</span>    return results;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span><a name="line.643"></a>
+<span class="sourceLineNo">644</span>  @Override<a name="line.644"></a>
+<span class="sourceLineNo">645</span>  public void refreshStoreFiles() throws IOException {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    Collection&lt;StoreFileInfo&gt; newFiles = fs.getStoreFiles(getColumnFamilyName());<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    refreshStoreFilesInternal(newFiles);<a name="line.647"></a>
+<span class="sourceLineNo">648</span>  }<a name="line.648"></a>
+<span class="sourceLineNo">649</span><a name="line.649"></a>
+<span class="sourceLineNo">650</span>  /**<a name="line.650"></a>
+<span class="sourceLineNo">651</span>   * Replaces the store files that the store has with the given files. Mainly used by secondary<a name="line.651"></a>
+<span class="sourceLineNo">652</span>   * region replicas to keep up to date with the primary region files.<a name="line.652"></a>
+<span class="sourceLineNo">653</span>   * @throws IOException<a name="line.653"></a>
+<span class="sourceLineNo">654</span>   */<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  public void refreshStoreFiles(Collection&lt;String&gt; newFiles) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    List&lt;StoreFileInfo&gt; storeFiles = new ArrayList&lt;&gt;(newFiles.size());<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    for (String file : newFiles) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      storeFiles.add(fs.getStoreFileInfo(getColumnFamilyName(), file));<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>    refreshStoreFilesInternal(storeFiles);<a name="line.660"></a>
+<span class="sourceLineNo">661</span>  }<a name="line.661"></a>
+<span class="sourceLineNo">662</span><a name="line.662"></a>
+<span class="sourceLineNo">663</span>  /**<a name="line.663"></a>
+<span class="sourceLineNo">664</span>   * Checks the underlying store files, and opens the files that  have not<a name="line.664"></a>
+<span class="sourceLineNo">665</span>   * been opened, and removes the store file readers for store files no longer<a name="line.665"></a>
+<span class="sourceLineNo">666</span>   * available. Mainly used by secondary region replicas to keep up to date with<a name="line.666"></a>
+<span class="sourceLineNo">667</span>   * the primary region files.<a name="line.667"></a>
+<span class="sourceLineNo">668</span>   * @throws IOException<a name="line.668"></a>
+<span class="sourceLineNo">669</span>   */<a name="line.669"></a>
+<span class="sourceLineNo">670</span>  private void refreshStoreFilesInternal(Collection&lt;StoreFileInfo&gt; newFiles) throws IOException {<a name="line.670"></a>
+<span class="sourceLineNo">671</span>    StoreFileManager sfm = storeEngine.getStoreFileManager();<a name="line.671"></a>
+<span class="sourceLineNo">672</span>    Collection&lt;HStoreFile&gt; currentFiles = sfm.getStorefiles();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>    Collection&lt;HStoreFile&gt; compactedFiles = sfm.getCompactedfiles();<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    if (currentFiles == null) currentFiles = Collections.emptySet();<a name="line.674"></a>
+<span class="sourceLineNo">675</span>    if (newFiles == null) newFiles = Collections.emptySet();<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    if (compactedFiles == null) compactedFiles = Collections.emptySet();<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>    HashMap&lt;StoreFileInfo, HStoreFile&gt; currentFilesSet = new HashMap&lt;&gt;(currentFiles.size());<a name="line.678"></a>
+<span class="sourceLineNo">679</span>    for (HStoreFile sf : currentFiles) {<a name="line.679"></a>
+<span class="sourceLineNo">680</span>      currentFilesSet.put(sf.getFileInfo(), sf);<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
+<span class="sourceLineNo">682</span>    HashMap&lt;StoreFileInfo, HStoreFile&gt; compactedFilesSet = new HashMap&lt;&gt;(compactedFiles.size());<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    for (HStoreFile sf : compactedFiles) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      compactedFilesSet.put(sf.getFileInfo(), sf);<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    }<a name="line.685"></a>
+<span class="sourceLineNo">686</span><a name="line.686"></a>
+<span class="sourceLineNo">687</span>    Set&lt;StoreFileInfo&gt; newFilesSet = new HashSet&lt;StoreFileInfo&gt;(newFiles);<a name="line.687"></a>
+<span class="sourceLineNo">688</span>    // Exclude the files that have already been compacted<a name="line.688"></a>
+<span class="sourceLineNo">689</span>    newFilesSet = Sets.difference(newFilesSet, compactedFilesSet.keySet());<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    Set&lt;StoreFileInfo&gt; toBeAddedFiles = Sets.difference(newFilesSet, currentFilesSet.keySet());<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    Set&lt;StoreFileInfo&gt; toBeRemovedFiles = Sets.difference(currentFilesSet.keySet(), newFilesSet);<a name="line.691"></a>
+<span class="sourceLineNo">692</span><a name="line.692"></a>
+<span class="sourceLineNo">693</span>    if (toBeAddedFiles.isEmpty() &amp;&amp; toBeRemovedFiles.isEmpty()) {<a name="line.693"></a>
+<span class="sourceLineNo">694</span>      return;<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    }<a name="line.695"></a>
 <span class="sourceLineNo">696</span><a name="line.696"></a>
-<span class="sourceLineNo">697</span>    Set&lt;HStoreFile&gt; toBeRemovedStoreFiles = new HashSet&lt;&gt;(toBeRemovedFiles.size());<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    for (StoreFileInfo sfi : toBeRemovedFiles) {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      toBeRemovedStoreFiles.add(currentFilesSet.get(sfi));<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    }<a name="line.700"></a>
-<span class="sourceLineNo">701</span><a name="line.701"></a>
-<span class="sourceLineNo">702</span>    // try to open the files<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    List&lt;HStoreFile&gt; openedFiles = openStoreFiles(toBeAddedFiles, false);<a name="line.703"></a>
+<span class="sourceLineNo">697</span>    LOG.info("Refreshing store files for region " + this.getRegionInfo().getRegionNameAsString()<a name="line.697"></a>
+<span class="sourceLineNo">698</span>      + " files to add: " + toBeAddedFiles + " files to remove: " + toBeRemovedFiles);<a name="line.698"></a>
+<span class="sourceLineNo">699</span><a name="line.699"></a>
+<span class="sourceLineNo">700</span>    Set&lt;HStoreFile&gt; toBeRemovedStoreFiles = new HashSet&lt;&gt;(toBeRemovedFiles.size());<a name="line.700"></a>
+<span class="sourceLineNo">701</span>    for (StoreFileInfo sfi : toBeRemovedFiles) {<a name="line.701"></a>
+<span class="sourceLineNo">702</span>      toBeRemovedStoreFiles.add(currentFilesSet.get(sfi));<a name="line.702"></a>
+<span class="sourceLineNo">703</span>    }<a name="line.703"></a>
 <span class="sourceLineNo">704</span><a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // propogate the file changes to the underlying store file manager<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    replaceStoreFiles(toBeRemovedStoreFiles, openedFiles); //won't throw an exception<a name="line.706"></a>
+<span class="sourceLineNo">705</span>    // try to open the files<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    List&lt;HStoreFile&gt; openedFiles = openStoreFiles(toBeAddedFiles, false);<a name="line.706"></a>
 <span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    // Advance the memstore read point to be at least the new store files seqIds so that<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    // readers might pick it up. This assumes that the store is not getting any writes (otherwise<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // in-flight transactions might be made visible)<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    if (!toBeAddedFiles.isEmpty()) {<a name="line.711"></a>
-<span class="sourceLineNo">712</span>      // we must have the max sequence id here as we do have several store files<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      region.getMVCC().advanceTo(this.getMaxSequenceId().getAsLong());<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    }<a name="line.714"></a>
-<span class="sourceLineNo">715</span><a name="line.715"></a>
-<span class="sourceLineNo">716</span>    completeCompaction(toBeRemovedStoreFiles);<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
+<span class="sourceLineNo">708</span>    // propogate the file changes to the underlying store file manager<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    replaceStoreFiles(toBeRemovedStoreFiles, openedFiles); //won't throw an exception<a name="line.709"></a>
+<span class="sourceLineNo">710</span><a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // Advance the memstore read point to be at least the new store files seqIds so that<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // readers might pick it up. This assumes that the store is not getting any writes (otherwise<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    // in-flight transactions might be made visible)<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    if (!toBeAddedFiles.isEmpty()) {<a name="line.714"></a>
+<span class="sourceLineNo">715</span>      // we must have the max sequence id here as we do have several store files<a name="line.715"></a>
+<span class="sourceLineNo">716</span>      region.getMVCC().advanceTo(this.getMaxSequenceId().getAsLong());<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  @VisibleForTesting<a name="line.719"></a>
-<span class="sourceLineNo">720</span>  protected HStoreFile createStoreFileAndReader(final Path p) throws IOException {<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    StoreFileInfo info = new StoreFileInfo(conf, this.getFileSystem(),<a name="line.721"></a>
-<span class="sourceLineNo">722</span>        p, isPrimaryReplicaStore());<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    return createStoreFileAndReader(info);<a name="line.723"></a>
-<span class="sourceLineNo">724</span>  }<a name="line.724"></a>
-<span class="sourceLineNo">725</span><a name="line.725"></a>
-<span class="sourceLineNo">726</span>  private HStoreFile createStoreFileAndReader(StoreFileInfo info) throws IOException {<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    info.setRegionCoprocessorHost(this.region.getCoprocessorHost());<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    HStoreFile storeFile = new HStoreFile(info, this.family.getBloomFilterType(), this.cacheConf);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    storeFile.initReader();<a name="line.729"></a>
-<span class="sourceLineNo">730</span>    return storeFile;<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>  /**<a name="line.733"></a>
-<span class="sourceLineNo">734</span>   * This message intends to inform the MemStore that next coming updates<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * are going to be part of the replaying edits from WAL<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   */<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  public void startReplayingFromWAL(){<a name="line.737"></a>
-<span class="sourceLineNo">738</span>    this.memstore.startReplayingFromWAL();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>  }<a name="line.739"></a>
-<span class="sourceLineNo">740</span><a name="line.740"></a>
-<span class="sourceLineNo">741</span>  /**<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   * This message intends to inform the MemStore that the replaying edits from WAL<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * are done<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   */<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  public void stopReplayingFromWAL(){<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    this.memstore.stopReplayingFromWAL();<a name="line.746"></a>
-<span class="sourceLineNo">747</span>  }<a name="line.747"></a>
-<span class="sourceLineNo">748</span><a name="line.748"></a>
-<span class="sourceLineNo">749</span>  /**<a name="line.749"></a>
-<span class="sourceLineNo">750</span>   * Adds a value to the memstore<a name="line.750"></a>
-<span class="sourceLineNo">751</span>   */<a name="line.751"></a>
-<span class="sourceLineNo">752</span>  public void add(final Cell cell, MemStoreSizing memstoreSizing) {<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    lock.readLock().lock();<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    try {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      if (this.currentParallelPutCount.getAndIncrement() &gt; this.parallelPutCountPrintThreshold) {<a name="line.755"></a>
-<span class="sourceLineNo">756</span>        LOG.trace(this.getTableName() + "tableName={}, encodedName={}, columnFamilyName={} is " +<a name="line.756"></a>
-<span class="sourceLineNo">757</span>          "too busy!", this.getRegionInfo().getEncodedName(), this .getColumnFamilyName());<a name="line.757"></a>
-<span class="sourceLineNo">758</span>      }<a name="line.758"></a>
-<span class="sourceLineNo">759</span>      this.memstore.add(cell, memstoreSizing);<a name="line.759"></a>
-<span class="sourceLineNo">760</span>    } finally {<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      lock.readLock().unlock();<a name="line.761"></a>
-<span class="sourceLineNo">762</span>      currentParallelPutCount.decrementAndGet();<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    }<a name="line.763"></a>
-<span class="sourceLineNo">764</span>  }<a name="line.764"></a>
-<span class="sourceLineNo">765</span><a name="line.765"></a>
-<span class="sourceLineNo">766</span>  /**<a name="line.766"></a>
-<span class="sourceLineNo">767</span>   * Adds the specified value to the memstore<a name="line.767"></a>
-<span class="sourceLineNo">768</span>   */<a name="line.768"></a>
-<span class="sourceLineNo">769</span>  public void add(final Iterable&lt;Cell&gt; cells, MemStoreSizing memstoreSizing) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>    lock.readLock().lock();<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    try {<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      if (this.currentParallelPutCount.getAndIncrement() &gt; this.parallelPutCountPrintThreshold) {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        LOG.trace(this.getTableName() + "tableName={}, encodedName={}, columnFamilyName={} is " +<a name="line.773"></a>
-<span class="sourceLineNo">774</span>            "too busy!", this.getRegionInfo().getEncodedName(), this .getColumnFamilyName());<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      }<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      memstore.add(cells, memstoreSizing);<a name="line.776"></a>
-<span class="sourceLineNo">777</span>    } finally {<a name="line.777"></a>
-<span class="sourceLineNo">778</span>      lock.readLock().unlock();<a name="line.778"></a>
-<span class="sourceLineNo">779</span>      currentParallelPutCount.decrementAndGet();<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
-<span class="sourceLineNo">781</span>  }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>  @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>  public long timeOfOldestEdit() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>    return memstore.timeOfOldestEdit();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>  }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>  /**<a name="line.788"></a>
-<span class="sourceLineNo">789</span>   * @return All store files.<a name="line.789"></a>
-<span class="sourceLineNo">790</span>   */<a name="line.790"></a>
-<span class="sourceLineNo">791</span>  @Override<a name="line.791"></a>
-<span class="sourceLineNo">792</span>  public Collection&lt;HStoreFile&gt; getStorefiles() {<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    return this.storeEngine.getStoreFileManager().getStorefiles();<a name="line.793"></a>
-<span class="sourceLineNo">794</span>  }<a name="line.794"></a>
-<span class="sourceLineNo">795</span><a name="line.795"></a>
-<span class="sourceLineNo">796</span>  @Override<a name="line.796"></a>
-<span class="sourceLineNo">797</span>  public Collection&lt;HStoreFile&gt; getCompactedFiles() {<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    return this.storeEngine.getStoreFileManager().getCompactedfiles();<a name="line.798"></a>
-<span class="sourceLineNo">799</span>  }<a name="line.799"></a>
-<span class="sourceLineNo">800</span><a name="line.800"></a>
-<span class="sourceLineNo">801</span>  /**<a name="line.801"></a>
-<span class="sourceLineNo">802</span>   * This throws a WrongRegionException if the HFile does not fit in this region, or an<a name="line.802"></a>
-<span class="sourceLineNo">803</span>   * InvalidHFileException if the HFile is not valid.<a name="line.803"></a>
-<span class="sourceLineNo">804</span>   */<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  public void assertBulkLoadHFileOk(Path srcPath) throws IOException {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    HFile.Reader reader  = null;<a name="line.806"></a>
-<span class="sourceLineNo">807</span>    try {<a name="line.807"></a>
-<span class="sourceLineNo">808</span>      LOG.info("Validating hfile at " + srcPath + " for inclusion in "<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          + "store " + this + " region " + this.getRegionInfo().getRegionNameAsString());<a name="line.809"></a>
-<span class="sourceLineNo">810</span>      FileSystem srcFs = srcPath.getFileSystem(conf);<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      srcFs.access(srcPath, FsAction.READ_WRITE);<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      reader = HFile.createReader(srcFs, srcPath, cacheConf, isPrimaryReplicaStore(), conf);<a name="line.812"></a>
-<span class="sourceLineNo">813</span><a name="line.813"></a>
-<span class="sourceLineNo">814</span>      Optional&lt;byte[]&gt; firstKey = reader.getFirstRowKey();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      Preconditions.checkState(firstKey.isPresent(), "First key can not be null");<a name="line.815"></a>
-<span class="sourceLineNo">816</span>      Optional&lt;Cell&gt; lk = reader.getLastKey();<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      Preconditions.checkState(lk.isPresent(), "Last key can not be null");<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      byte[] lastKey =  CellUtil.cloneRow(lk.get());<a name="line.818"></a>
-<span class="sourceLineNo">819</span><a name="line.819"></a>
-<span class="sourceLineNo">820</span>      if (LOG.isDebugEnabled()) {<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey.get()) +<a name="line.821"></a>
-<span class="sourceLineNo">822</span>            " last=" + Bytes.toStringBinary(lastKey));<a name="line.822"></a>
-<span class="sourceLineNo">823</span>        LOG.debug("Region bounds: first=" +<a name="line.823"></a>
-<span class="sourceLineNo">824</span>            Bytes.toStringBinary(getRegionInfo().getStartKey()) +<a name="line.824"></a>
-<span class="sourceLineNo">825</span>            " last=" + Bytes.toStringBinary(getRegionInfo().getEndKey()));<a name="line.825"></a>
-<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>      if (!this.getRegionInfo().containsRange(firstKey.get(), lastKey)) {<a name="line.828"></a>
-<span class="sourceLineNo">829</span>        throw new WrongRegionException(<a name="line.829"></a>
-<span class="sourceLineNo">830</span>            "Bulk load file " + srcPath.toString() + " does not fit inside region "<a name="line.830"></a>
-<span class="sourceLineNo">831</span>            + this.getRegionInfo().getRegionNameAsString());<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      }<a name="line.832"></a>
-<span class="sourceLineNo">833</span><a name="line.833"></a>
-<span class="sourceLineNo">834</span>      if(reader.length() &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.834"></a>
-<span class="sourceLineNo">835</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.835"></a>
-<span class="sourceLineNo">836</span>        LOG.warn("Trying to bulk load hfile " + srcPath + " with size: " +<a name="line.836"></a>
-<span class="sourceLineNo">837</span>            reader.length() + " bytes can be problematic as it may lead to oversplitting.");<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      }<a name="line.838"></a>
-<span class="sourceLineNo">839</span><a name="line.839"></a>
-<span class="sourceLineNo">840</span>      if (verifyBulkLoads) {<a name="line.840"></a>
-<span class="sourceLineNo">841</span>        long verificationStartTime = EnvironmentEdgeManager.currentTime();<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        LOG.info("Full verification started for bulk load hfile: {}", srcPath);<a name="line.842"></a>
-<span class="sourceLineNo">843</span>        Cell prevCell = null;<a name="line.843"></a>
-<span class="sourceLineNo">844</span>        HFileScanner scanner = reader.getScanner(false, false, false);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>        scanner.seekTo();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>        do {<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          Cell cell = scanner.getCell();<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          if (prevCell != null) {<a name="line.848"></a>
-<span class="sourceLineNo">849</span>            if (comparator.compareRows(prevCell, cell) &gt; 0) {<a name="line.849"></a>
-<span class="sourceLineNo">850</span>              throw new InvalidHFileException("Previous row is greater than"<a name="line.850"></a>
-<span class="sourceLineNo">851</span>                  + " current row: path=" + srcPath + " previous="<a name="line.851"></a>
-<span class="sourceLineNo">852</span>                  + CellUtil.getCellKeyAsString(prevCell) + " current="<a name="line.852"></a>
-<span class="sourceLineNo">853</span>                  + CellUtil.getCellKeyAsString(cell));<a name="line.853"></a>
-<span class="sourceLineNo">854</span>            }<a name="line.854"></a>
-<span class="sourceLineNo">855</span>            if (CellComparator.getInstance().compareFamilies(prevCell, cell) != 0) {<a name="line.855"></a>
-<span class="sourceLineNo">856</span>              throw new InvalidHFileException("Previous key had different"<a name="line.856"></a>
-<span class="sourceLineNo">857</span>                  + " family compared to current key: path=" + srcPath<a name="line.857"></a>
-<span class="sourceLineNo">858</span>                  + " previous="<a name="line.858"></a>
-<span class="sourceLineNo">859</span>                  + Bytes.toStringBinary(prevCell.getFamilyArray(), prevCell.getFamilyOffset(),<a name="line.859"></a>
-<span class="sourceLineNo">860</span>                      prevCell.getFamilyLength())<a name="line.860"></a>
-<span class="sourceLineNo">861</span>                  + " current="<a name="line.861"></a>
-<span class="sourceLineNo">862</span>                  + Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),<a name="line.862"></a>
-<span class="sourceLineNo">863</span>                      cell.getFamilyLength()));<a name="line.863"></a>
-<span class="sourceLineNo">864</span>            }<a name="line.864"></a>
-<span class="sourceLineNo">865</span>          }<a name="line.865"></a>
-<span class="sourceLineNo">866</span>          prevCell = cell;<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        } while (scanner.next());<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      LOG.info("Full verification complete for bulk load hfile: " + srcPath.toString()<a name="line.868"></a>
-<span class="sourceLineNo">869</span>         + " took " + (EnvironmentEdgeManager.currentTime() - verificationStartTime)<a name="line.869"></a>
-<span class="sourceLineNo">870</span>         + " ms");<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    } finally {<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      if (reader != null) reader.close();<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    }<a name="line.874"></a>
-<span class="sourceLineNo">875</span>  }<a name="line.875"></a>
-<span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>  /**<a name="line.877"></a>
-<span class="sourceLineNo">878</span>   * This method should only be called from Region. It is assumed that the ranges of values in the<a name="line.878"></a>
-<span class="sourceLineNo">879</span>   * HFile fit within the stores assigned region. (assertBulkLoadHFileOk checks this)<a name="line.879"></a>
-<span class="sourceLineNo">880</span>   *<a name="line.880"></a>
-<span class="sourceLineNo">881</span>   * @param srcPathStr<a name="line.881"></a>
-<span class="sourceLineNo">882</span>   * @param seqNum sequence Id associated with the HFile<a name="line.882"></a>
-<span class="sourceLineNo">883</span>   */<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  public Pair&lt;Path, Path&gt; preBulkLoadHFile(String srcPathStr, long seqNum) throws IOException {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    Path srcPath = new Path(srcPathStr);<a name="line.885"></a>
-<span class="sourceLineNo">886</span>    return fs.bulkLoadStoreFile(getColumnFamilyName(), srcPath, seqNum);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  }<a name="line.887"></a>
-<span class="sourceLineNo">888</span><a name="line.888"></a>
-<span class="sourceLineNo">889</span>  public Path bulkLoadHFile(byte[] family, String srcPathStr, Path dstPath) throws IOException {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>    Path srcPath = new Path(srcPathStr);<a name="line.890"></a>
-<span class="sourceLineNo">891</span>    try {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      fs.commitStoreFile(srcPath, dstPath);<a name="line.892"></a>
-<span class="sourceLineNo">893</span>    } finally {<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      if (this.getCoprocessorHost() != null) {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>        this.getCoprocessorHost().postCommitStoreFile(family, srcPath, dstPath);<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>    }<a name="line.897"></a>
-<span class="sourceLineNo">898</span><a name="line.898"></a>
-<span class="sourceLineNo">899</span>    LOG.info("Loaded HFile " + srcPath + " into store '" + getColumnFamilyName() + "' as "<a name="line.899"></a>
-<span class="sourceLineNo">900</span>        + dstPath + " - updating store file list.");<a name="line.900"></a>
+<span class="sourceLineNo">719</span>    completeCompaction(toBeRemovedStoreFiles);<a name="line.719"></a>
+<span class="sourceLineNo">720</span>  }<a name="line.720"></a>
+<span class="sourceLineNo">721</span><a name="line.721"></a>
+<span class="sourceLineNo">722</span>  @VisibleForTesting<a name="line.722"></a>
+<span class="sourceLineNo">723</span>  protected HStoreFile createStoreFileAndReader(final Path p) throws IOException {<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    StoreFileInfo info = new StoreFileInfo(conf, this.getFileSystem(),<a name="line.724"></a>
+<span class="sourceLineNo">725</span>        p, isPrimaryReplicaStore());<a name="line.725"></a>
+<span class="sourceLineNo">726</span>    return createStoreFileAndReader(info);<a name="line.726"></a>
+<span class="sourceLineNo">727</span>  }<a name="line.727"></a>
+<span class="sourceLineNo">728</span><a name="line.728"></a>
+<span class="sourceLineNo">729</span>  private HStoreFile createStoreFileAndReader(StoreFileInfo info) throws IOException {<a name="line.729"></a>
+<span class="sourceLineNo">730</span>    info.setRegionCoprocessorHost(this.region.getCoprocessorHost());<a name="line.730"></a>
+<span class="sourceLineNo">731</span>    HStoreFile storeFile = new HStoreFile(info, this.family.getBloomFilterType(), this.cacheConf);<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    storeFile.initReader();<a name="line.732"></a>
+<span class="sourceLineNo">733</span>    return storeFile;<a name="line.733"></a>
+<span class="sourceLineNo">734</span>  }<a name="line.734"></a>
+<span class="sourceLineNo">735</span><a name="line.735"></a>
+<span class="sourceLineNo">736</span>  /**<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   * This message intends to inform the MemStore that next coming updates<a name="line.737"></a>
+<span class="sourceLineNo">738</span>   * are going to be part of the replaying edits from WAL<a name="line.738"></a>
+<span class="sourceLineNo">739</span>   */<a name="line.739"></a>
+<span class="sourceLineNo">740</span>  public void startReplayingFromWAL(){<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    this.memstore.startReplayingFromWAL();<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  /**<a name="line.744"></a>
+<span class="sourceLineNo">745</span>   * This message intends to inform the MemStore that the replaying edits from WAL<a name="line.745"></a>
+<span class="sourceLineNo">746</span>   * are done<a name="line.746"></a>
+<span class="sourceLineNo">747</span>   */<a name="line.747"></a>
+<span class="sourceLineNo">748</span>  public void stopReplayingFromWAL(){<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    this.memstore.stopReplayingFromWAL();<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  /**<a name="line.752"></a>
+<span class="sourceLineNo">753</span>   * Adds a value to the memstore<a name="line.753"></a>
+<span class="sourceLineNo">754</span>   */<a name="line.754"></a>
+<span class="sourceLineNo">755</span>  public void add(final Cell cell, MemStoreSizing memstoreSizing) {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>    lock.readLock().lock();<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    try {<a name="line.757"></a>
+<span class="sourceLineNo">758</span>      if (this.currentParallelPutCount.getAndIncrement() &gt; this.parallelPutCountPrintThreshold) {<a name="line.758"></a>
+<span class="sourceLineNo">759</span>        LOG.trace(this.getTableName() + "tableName={}, encodedName={}, columnFamilyName={} is " +<a name="line.759"></a>
+<span class="sourceLineNo">760</span>          "too busy!", this.getRegionInfo().getEncodedName(), this .getColumnFamilyName());<a name="line.760"></a>
+<span class="sourceLineNo">761</span>      }<a name="line.761"></a>
+<span class="sourceLineNo">762</span>      this.memstore.add(cell, memstoreSizing);<a name="line.762"></a>
+<span class="sourceLineNo">763</span>    } finally {<a name="line.763"></a>
+<span class="sourceLineNo">764</span>      lock.readLock().unlock();<a name="line.764"></a>
+<span class="sourceLineNo">765</span>      currentParallelPutCount.decrementAndGet();<a name="line.765"></a>
+<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
+<span class="sourceLineNo">767</span>  }<a name="line.767"></a>
+<span class="sourceLineNo">768</span><a name="line.768"></a>
+<span class="sourceLineNo">769</span>  /**<a name="line.769"></a>
+<span class="sourceLineNo">770</span>   * Adds the specified value to the memstore<a name="line.770"></a>
+<span class="sourceLineNo">771</span>   */<a name="line.771"></a>
+<span class="sourceLineNo">772</span>  public void add(final Iterable&lt;Cell&gt; cells, MemStoreSizing memstoreSizing) {<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    lock.readLock().lock();<a name="line.773"></a>
+<span class="sourceLineNo">774</span>    try {<a name="line.774"></a>
+<span class="sourceLineNo">775</span>      if (this.currentParallelPutCount.getAndIncrement() &gt; this.parallelPutCountPrintThreshold) {<a name="line.775"></a>
+<span class="sourceLineNo">776</span>        LOG.trace(this.getTableName() + "tableName={}, encodedName={}, columnFamilyName={} is " +<a name="line.776"></a>
+<span class="sourceLineNo">777</span>            "too busy!", this.getRegionInfo().getEncodedName(), this .getColumnFamilyName());<a name="line.777"></a>
+<span class="sourceLineNo">778</span>      }<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      memstore.add(cells, memstoreSizing);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    } finally {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      lock.readLock().unlock();<a name="line.781"></a>
+<span class="sourceLineNo">782</span>      currentParallelPutCount.decrementAndGet();<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    }<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  @Override<a name="line.786"></a>
+<span class="sourceLineNo">787</span>  public long timeOfOldestEdit() {<a name="line.787"></a>
+<span class="sourceLineNo">788</span>    return memstore.timeOfOldestEdit();<a name="line.788"></a>
+<span class="sourceLineNo">789</span>  }<a name="line.789"></a>
+<span class="sourceLineNo">790</span><a name="line.790"></a>
+<span class="sourceLineNo">791</span>  /**<a name="line.791"></a>
+<span class="sourceLineNo">792</span>   * @return All store files.<a name="line.792"></a>
+<span class="sourceLineNo">793</span>   */<a name="line.793"></a>
+<span class="sourceLineNo">794</span>  @Override<a name="line.794"></a>
+<span class="sourceLineNo">795</span>  public Collection&lt;HStoreFile&gt; getStorefiles() {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>    return this.storeEngine.getStoreFileManager().getStorefiles();<a name="line.796"></a>
+<span class="sourceLineNo">797</span>  }<a name="line.797"></a>
+<span class="sourceLineNo">798</span><a name="line.798"></a>
+<span class="sourceLineNo">799</span>  @Override<a name="line.799"></a>
+<span class="sourceLineNo">800</span>  public Collection&lt;HStoreFile&gt; getCompactedFiles() {<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    return this.storeEngine.getStoreFileManager().getCompactedfiles();<a name="line.801"></a>
+<span class="sourceLineNo">802</span>  }<a name="line.802"></a>
+<span class="sourceLineNo">803</span><a name="line.803"></a>
+<span class="sourceLineNo">804</span>  /**<a name="line.804"></a>
+<span class="sourceLineNo">805</span>   * This throws a WrongRegionException if the HFile does not fit in this region, or an<a name="line.805"></a>
+<span class="sourceLineNo">806</span>   * InvalidHFileException if the HFile is not valid.<a name="line.806"></a>
+<span class="sourceLineNo">807</span>   */<a name="line.807"></a>
+<span class="sourceLineNo">808</span>  public void assertBulkLoadHFileOk(Path srcPath) throws IOException {<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    HFile.Reader reader  = null;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>    try {<a name="line.810"></a>
+<span class="sourceLineNo">811</span>      LOG.info("Validating hfile at " + srcPath + " for inclusion in "<a name="line.811"></a>
+<span class="sourceLineNo">812</span>          + "store " + this + " region " + this.getRegionInfo().getRegionNameAsString());<a name="line.812"></a>
+<span class="sourceLineNo">813</span>      FileSystem srcFs = srcPath.getFileSystem(conf);<a name="line.813"></a>
+<span class="sourceLineNo">814</span>      srcFs.access(srcPath, FsAction.READ_WRITE);<a name="line.814"></a>
+<span class="sourceLineNo">815</span>      reader = HFile.createReader(srcFs, srcPath, cacheConf, isPrimaryReplicaStore(), conf);<a name="line.815"></a>
+<span class="sourceLineNo">816</span><a name="line.816"></a>
+<span class="sourceLineNo">817</span>      Optional&lt;byte[]&gt; firstKey = reader.getFirstRowKey();<a name="line.817"></a>
+<span class="sourceLineNo">818</span>      Preconditions.checkState(firstKey.isPresent(), "First key can not be null");<a name="line.818"></a>
+<span class="sourceLineNo">819</span>      Optional&lt;Cell&gt; lk = reader.getLastKey();<a name="line.819"></a>
+<span class="sourceLineNo">820</span>      Preconditions.checkState(lk.isPresent(), "Last key can not be null");<a name="line.820"></a>
+<span class="sourceLineNo">821</span>      byte[] lastKey =  CellUtil.cloneRow(lk.get());<a name="line.821"></a>
+<span class="sourceLineNo">822</span><a name="line.822"></a>
+<span class="sourceLineNo">823</span>      if (LOG.isDebugEnabled()) {<a name="line.823"></a>
+<span class="sourceLineNo">824</span>        LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey.get()) +<a name="line.824"></a>
+<span class="sourceLineNo">825</span>            " last=" + Bytes.toStringBinary(lastKey));<a name="line.825"></a>
+<span class="sourceLineNo">826</span>        LOG.debug("Region bounds: first=" +<a name="line.826"></a>
+<span class="sourceLineNo">827</span>            Bytes.toStringBinary(getRegionInfo().getStartKey()) +<a name="line.827"></a>
+<span class="sourceLineNo">828</span>            " last=" + Bytes.toStringBinary(getRegionInfo().getEndKey()));<a name="line.828"></a>
+<span class="sourceLineNo">829</span>      }<a name="line.829"></a>
+<span class="sourceLineNo">830</span><a name="line.830"></a>
+<span class="sourceLineNo">831</span>      if (!this.getRegionInfo().containsRange(firstKey.get(), lastKey)) {<a name="line.831"></a>
+<span class="sourceLineNo">832</span>        throw new WrongRegionException(<a name="line.832"></a>
+<span class="sourceLineNo">833</span>            "Bulk load file " + srcPath.toString() + " does not fit inside region "<a name="line.833"></a>
+<span class="sourceLineNo">834</span>            + this.getRegionInfo().getRegionNameAsString());<a name="line.834"></a>
+<span class="sourceLineNo">835</span>      }<a name="line.835"></a>
+<span class="sourceLineNo">836</span><a name="line.836"></a>
+<span class="sourceLineNo">837</span>      if(reader.length() &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.837"></a>
+<span class="sourceLineNo">838</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.838"></a>
+<span class="sourceLineNo">839</span>        LOG.warn("Trying to bulk load hfile " + srcPath + " with size: " +<a name="line.839"></a>
+<span class="sourceLineNo">840</span>            reader.length() + " bytes can be problematic as it may lead to oversplitting.");<a name="line.840"></a>
+<span class="sourceLineNo">841</span>      }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>      if (verifyBulkLoads) {<a name="line.843"></a>
+<span class="sourceLineNo">844</span>        long verificationStartTime = EnvironmentEdgeManager.currentTime();<a name="line.844"></a>
+<span class="sourceLineNo">845</span>        LOG.info("Full verification started for bulk load hfile: {}", srcPath);<a name="line.845"></a>
+<span class="sourceLineNo">846</span>        Cell prevCell = null;<a name="line.846"></a>
+<span class="sourceLineNo">847</span>        HFileScanner scanner = reader.getScanner(false, false, false);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        scanner.seekTo();<a name="line.848"></a>
+<span class="sourceLineNo">849</span>        do {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>          Cell cell = scanner.getCell();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>          if (prevCell != null) {<a name="line.851"></a>
+<span class="sourceLineNo">852</span>            if (comparator.compareRows(prevCell, cell) &gt; 0) {<a name="line.852"></a>
+<span class="sourceLineNo">853</span>              throw new InvalidHFileException("Previous row is greater than"<a name="line.853"></a>
+<span class="sourceLineNo">854</span>                  + " current row: path=" + srcPath + " previous="<a name="line.854"></a>
+<span class="sourceLineNo">855</span>                  + CellUtil.getCellKeyAsString(prevCell) + " current="<a name="line.855"></a>
+<span class="sourceLineNo">856</span>                  + CellUtil.getCellKeyAsString(cell));<a name="line.856"></a>
+<span class="sourceLineNo">857</span>            }<a name="line.857"></a>
+<span class="sourceLineNo">858</span>            if (CellComparator.getInstance().compareFamilies(prevCell, cell) != 0) {<a name="line.858"></a>
+<span class="sourceLineNo">859</span>              throw new InvalidHFileException("Previous key had different"<a name="line.859"></a>
+<span class="sourceLineNo">860</span>                  + " family compared to current key: path=" + srcPath<a name="line.860"></a>
+<span class="sourceLineNo">861</span>                  + " previous="<a name="line.861"></a>
+<span class="sourceLineNo">862</span>                  + Bytes.toStringBinary(prevCell.getFamilyArray(), prevCell.getFamilyOffset(),<a name="line.862"></a>
+<span class="sourceLineNo">863</span>                      prevCell.getFamilyLength())<a name="line.863"></a>
+<span class="sourceLineNo">864</span>                  + " current="<a name="line.864"></a>
+<span class="sourceLineNo">865</span>                  + Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),<a name="line.865"></a>
+<span class="sourceLineNo">866</span>                      cell.getFamilyLength()));<a name="line.866"></a>
+<span class="sourceLineNo">867</span>            }<a name="line.867"></a>
+<span class="sourceLineNo">868</span>          }<a name="line.868"></a>
+<span class="sourceLineNo">869</span>          prevCell = cell;<a name="line.869"></a>
+<span class="sourceLineNo">870</span>        } while (scanner.next());<a name="line.870"></a>
+<span class="sourceLineNo">871</span>      LOG.info("Full verification complete for bulk load hfile: " + srcPath.toString()<a name="line.871"></a>
+<span class="sourceLineNo">872</span>         + " took " + (EnvironmentEdgeManager.currentTime() - verificationStartTime)<a name="line.872"></a>
+<span class="sourceLineNo">873</span>         + " ms");<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      }<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    } finally {<a name="line.875"></a>
+<span class="sourceLineNo">876</span>      if (reader != null) reader.close();<a name="line.876"></a>
+<span class="sourceLineNo">877</span>    }<a name="line.877"></a>
+<span class="sourceLineNo">878</span>  }<a name="line.878"></a>
+<span class="sourceLineNo">879</span><a name="line.879"></a>
+<span class="sourceLineNo">880</span>  /**<a name="line.880"></a>
+<span class="sourceLineNo">881</span>   * This method should only be called from Region. It is assumed that the ranges of values in the<a name="line.881"></a>
+<span class="sourceLineNo">882</span>   * HFile fit within the stores assigned region. (assertBulkLoadHFileOk checks this)<a name="line.882"></a>
+<span class="sourceLineNo">883</span>   *<a name="line.883"></a>
+<span class="sourceLineNo">884</span>   * @param srcPathStr<a name="line.884"></a>
+<span class="sourceLineNo">885</span>   * @param seqNum sequence Id associated with the HFile<a name="line.885"></a>
+<span class="sourceLineNo">886</span>   */<a name="line.886"></a>
+<span class="sourceLineNo">887</span>  public Pair&lt;Path, Path&gt; preBulkLoadHFile(String srcPathStr, long seqNum) throws IOException {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>    Path srcPath = new Path(srcPathStr);<a name="line.888"></a>
+<span class="sourceLineNo">889</span>    return fs.bulkLoadStoreFile(getColumnFamilyName(), srcPath, seqNum);<a name="line.889"></a>
+<span class="sourceLineNo">890</span>  }<a name="line.890"></a>
+<span class="sourceLineNo">891</span><a name="line.891"></a>
+<span class="sourceLineNo">892</span>  public Path bulkLoadHFile(byte[] family, String srcPathStr, Path dstPath) throws IOException {<a name="line.892"></a>
+<span class="sourceLineNo">893</span>    Path srcPath = new Path(srcPathStr);<a name="line.893"></a>
+<span class="sourceLineNo">894</span>    try {<a name="line.894"></a>
+<span class="sourceLineNo">895</span>      fs.commitStoreFile(srcPath, dstPath);<a name="line.895"></a>
+<span class="sourceLineNo">896</span>    } finally {<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      if (this.getCoprocessorHost() != null) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.getCoprocessorHost().postCommitStoreFile(family, srcPath, dstPath);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
+<span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    HStoreFile sf = createStoreFileAndReader(dstPath);<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    bulkLoadHFile(sf);<a name="line.903"></a>
+<span class="sourceLineNo">902</span>    LOG.info("Loaded HFile " + srcPath + " into store '" + getColumnFamilyName() + "' as "<a name="line.902"></a>
+<span class="sourceLineNo">903</span>        + dstPath + " - updating store file list.");<a name="line.903"></a>
 <span class="sourceLineNo">904</span><a name="line.904"></a>
-<span class="sourceLineNo">905</span>    LOG.info("Successfully loaded store file {} into store {} (new location: {})",<a name="line.905"></a>
-<span class="sourceLineNo">906</span>        srcPath, this, dstPath);<a name="line.906"></a>
+<span class="sourceLineNo">905</span>    HStoreFile sf = createStoreFileAndReader(dstPath);<a name="line.905"></a>
+<span class="sourceLineNo">906</span>    bulkLoadHFile(sf);<a name="line.906"></a>
 <span class="sourceLineNo">907</span><a name="line.907"></a>
-<span class="sourceLineNo">908</span>    return dstPath;<a name="line.908"></a>
-<span class="sourceLineNo">909</span>  }<a name="line.909"></a>
+<span class="sourceLineNo">908</span>    LOG.info("Successfully loaded store file {} into store {} (new location: {})",<a name="line.908"></a>
+<span class="sourceLineNo">909</span>        srcPath, this, dstPath);<a name="line.909"></a>
 <span class="sourceLineNo">910</span><a name="line.910"></a>
-<span class="sourceLineNo">911</span>  public void bulkLoadHFile(StoreFileInfo fileInfo) throws IOException {<a name="line.911"></a>
-<span class="sourceLineNo">912</span>    HStoreFile sf = createStoreFileAndReader(fileInfo);<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    bulkLoadHFile(sf);<a name="line.913"></a>
-<span class="sourceLineNo">914</span>  }<a name="line.914"></a>
-<span class="sourceLineNo">915</span><a name="line.915"></a>
-<span class="sourceLineNo">916</span>  private void bulkLoadHFile(HStoreFile sf) throws IOException {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>    StoreFileReader r = sf.getReader();<a name="line.917"></a>
-<span class="sourceLineNo">918</span>    this.storeSize.addAndGet(r.length());<a name="line.918"></a>
-<span class="sourceLineNo">919</span>    this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.919"></a>
-<span class="sourceLineNo">920</span><a name="line.920"></a>
-<span class="sourceLineNo">921</span>    // Append the new storefile into the list<a name="line.921"></a>
-<span class="sourceLineNo">922</span>    this.lock.writeLock().lock();<a name="line.922"></a>
-<span class="sourceLineNo">923</span>    try {<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      this.storeEngine.getStoreFileManager().insertNewFiles(Lists.newArrayList(sf));<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    } finally {<a name="line.925"></a>
-<span class="sourceLineNo">926</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.926"></a>
-<span class="sourceLineNo">927</span>      // or changing the memstore. Let us release it before calling<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.928"></a>
-<span class="sourceLineNo">929</span>      // deadlock scenario that could have happened if continue to hold<a name="line.929"></a>
-<span class="sourceLineNo">930</span>      // the lock.<a name="line.930"></a>
-<span class="sourceLineNo">931</span>      this.lock.writeLock().unlock();<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    }<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    LOG.info("Loaded HFile " + sf.getFileInfo() + " into store '" + getColumnFamilyName());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>    if (LOG.isTraceEnabled()) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>      String traceMessage = "BULK LOAD time,size,store size,store files ["<a name="line.935"></a>
-<span class="sourceLineNo">936</span>          + EnvironmentEdgeManager.currentTime() + "," + r.length() + "," + storeSize<a name="line.936"></a>
-<span class="sourceLineNo">937</span>          + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.937"></a>
-<span class="sourceLineNo">938</span>      LOG.trace(traceMessage);<a name="line.938"></a>
-<span class="sourceLineNo">939</span>    }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
-<span class="sourceLineNo">941</span><a name="line.941"></a>
-<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   * Close all the readers We don't need to worry about subsequent requests because the Region holds<a name="line.943"></a>
-<span class="sourceLineNo">944</span>   * a write lock that will prevent any more reads or writes.<a name="line.944"></a>
-<span class="sourceLineNo">945</span>   * @return the {@link StoreFile StoreFiles} that were previously being used.<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * @throws IOException on failure<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   */<a name="line.947"></a>
-<span class="sourceLineNo">948</span>  public ImmutableCollection&lt;HStoreFile&gt; close() throws IOException {<a name="line.948"></a>
-<span class="sourceLineNo">949</span>    this.archiveLock.lock();<a name="line.949"></a>
-<span class="sourceLineNo">950</span>    this.lock.writeLock().lock();<a name="line.950"></a>
-<span class="sourceLineNo">951</span>    try {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>      // Clear so metrics doesn't find them.<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      ImmutableCollection&lt;HStoreFile&gt; result = storeEngine.getStoreFileManager().clearFiles();<a name="line.953"></a>
-<span class="sourceLineNo">954</span>      Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.954"></a>
-<span class="sourceLineNo">955</span>          storeEngine.getStoreFileManager().clearCompactedFiles();<a name="line.955"></a>
-<span class="sourceLineNo">956</span>      // clear the compacted files<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.957"></a>
-<span class="sourceLineNo">958</span>        removeCompactedfiles(compactedfiles);<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      }<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      if (!result.isEmpty()) {<a name="line.960"></a>
-<span class="sourceLineNo">961</span>        // initialize the thread pool for closing store files in parallel.<a name="line.961"></a>
-<span class="sourceLineNo">962</span>        ThreadPoolExecutor storeFileCloserThreadPool = this.region<a name="line.962"></a>
-<span class="sourceLineNo">963</span>            .getStoreFileOpenAndCloseThreadPool("StoreFileCloserThread-"<a name="line.963"></a>
-<span class="sourceLineNo">964</span>              + this.region.getRegionInfo().getEncodedName() + "-" + this.getColumnFamilyName());<a name="line.964"></a>
-<span class="sourceLineNo">965</span><a name="line.965"></a>
-<span class="sourceLineNo">966</span>        // close each store file in parallel<a name="line.966"></a>
-<span class="sourceLineNo">967</span>        CompletionService&lt;Void&gt; completionService =<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          new ExecutorCompletionService&lt;&gt;(storeFileCloserThreadPool);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        for (HStoreFile f : result) {<a name="line.969"></a>
-<span class="sourceLineNo">970</span>          completionService.submit(new Callable&lt;Void&gt;() {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>            @Override<a name="line.971"></a>
-<span class="sourceLineNo">972</span>            public Void call() throws IOException {<a name="line.972"></a>
-<span class="sourceLineNo">973</span>              boolean evictOnClose =<a name="line.973"></a>
-<span class="sourceLineNo">974</span>                  cacheConf != null? cacheConf.shouldEvictOnClose(): true;<a name="line.974"></a>
-<span class="sourceLineNo">975</span>              f.closeStoreFile(evictOnClose);<a name="line.975"></a>
-<span class="sourceLineNo">976</span>              return null;<a name="line.976"></a>
-<span class="sourceLineNo">977</span>            }<a name="line.977"></a>
-<span class="sourceLineNo">978</span>          });<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        }<a name="line.979"></a>
-<span class="sourceLineNo">980</span><a name="line.980"></a>
-<span class="sourceLineNo">981</span>        IOException ioe = null;<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        try {<a name="line.982"></a>
-<span class="sourceLineNo">983</span>          for (int i = 0; i &lt; result.size(); i++) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>            try {<a name="line.984"></a>
-<span class="sourceLineNo">985</span>              Future&lt;Void&gt; future = completionService.take();<a name="line.985"></a>
-<span class="sourceLineNo">986</span>              future.get();<a name="line.986"></a>
-<span class="sourceLineNo">987</span>            } catch (InterruptedException e) {<a name="line.987"></a>
-<span class="sourceLineNo">988</span>              if (ioe == null) {<a name="line.988"></a>
-<span class="sourceLineNo">989</span>                ioe = new InterruptedIOException();<a name="line.989"></a>
-<span class="sourceLineNo">990</span>                ioe.initCause(e);<a name="line.990"></a>
-<span class="sourceLineNo">991</span>              }<a name="line.991"></a>
-<span class="sourceLineNo">992</span>            } catch (ExecutionException e) {<a name="line.992"></a>
-<span class="sourceLineNo">993</span>              if (ioe == null) ioe = new IOException(e.getCause());<a name="line.993"></a>
-<span class="sourceLineNo">994</span>            }<a name="line.994"></a>
-<span class="sourceLineNo">995</span>          }<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        } finally {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          storeFileCloserThreadPool.shutdownNow();<a name="line.997"></a>
-<span class="sourceLineNo">998</span>        }<a name="line.998"></a>
-<span class="sourceLineNo">999</span>        if (ioe != null) throw ioe;<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>      }<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>      LOG.trace("Closed {}", this);<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>      return result;<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>    } finally {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>      this.lock.writeLock().unlock();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>      this.archiveLock.unlock();<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>    }<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>  }<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span><a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>  /**<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>   * Snapshot this stores memstore. Call before running<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>   * {@link #flushCache(long, MemStoreSnapshot, MonitoredTask, ThroughputController,<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>   * FlushLifeCycleTracker)}<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>   *  so it has some work to do.<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>   */<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>  void snapshot() {<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>    this.lock.writeLock().lock();<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>    try {<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>      this.memstore.snapshot();<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>    } finally {<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>      this.lock.writeLock().unlock();<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>    }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>  }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span><a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>  /**<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>   * Write out current snapshot. Presumes {@link #snapshot()} has been called previously.<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>   * @param logCacheFlushId flush sequence number<a name="line.1026"></a>
-<span class="sourceLineNo">1027</span>   * @param snapshot<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>   * @param status<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>   * @param throughputController<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>   * @return The path name of the tmp file to which the store was flushed<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>   * @throws IOException if exception occurs during process<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>   */<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>  protected List&lt;Path&gt; flushCache(final long logCacheFlushId, MemStoreSnapshot snapshot,<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>      MonitoredTask status, ThroughputController throughputController,<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>      FlushLifeCycleTracker tracker) throws IOException {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>    // If an exception happens flushing, we let it out without clearing<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    // the memstore snapshot.  The old snapshot will be returned when we say<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>    // 'snapshot', the next time flush comes around.<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    // Retry after catching exception when flushing, otherwise server will abort<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>    // itself<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>    StoreFlusher flusher = storeEngine.getStoreFlusher();<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>    IOException lastException = null;<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>    for (int i = 0; i &lt; flushRetriesNumber; i++) {<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>      try {<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        List&lt;Path&gt; pathNames =<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>            flusher.flushSnapshot(snapshot, logCacheFlushId, status, throughputController, tracker);<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        Path lastPathName = null;<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        try {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          for (Path pathName : pathNames) {<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>            lastPathName = pathName;<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>            validateStoreFile(pathName);<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          }<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span>          return pathNames;<a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>        } catch (Exception e) {<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          LOG.warn("Failed validating store file {}, retrying num={}", lastPathName, i, e);<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>          if (e instanceof IOException) {<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>            lastException = (IOException) e;<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          } else {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            lastException = new IOException(e);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      } catch (IOException e) {<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>        LOG.warn("Failed flushing store file, retrying num={}", i, e);<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>        lastException = e;<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      }<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      if (lastException != null &amp;&amp; i &lt; (flushRetriesNumber - 1)) {<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>        try {<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>          Thread.sleep(pauseTime);<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>        } catch (InterruptedException e) {<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>          IOException iie = new InterruptedIOException();<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>          iie.initCause(e);<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span>          throw iie;<a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>        }<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>      }<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>    }<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>    throw lastException;<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>  }<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span><a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>  /**<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>   * @param path The pathname of the tmp file into which the store was flushed<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span>   * @param logCacheFlushId<a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>   * @param status<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>   * @return store file created.<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>   * @throws IOException<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>   */<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>  private HStoreFile commitFile(Path path, long logCacheFlushId, MonitoredTask status)<a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>      throws IOException {<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>    // Write-out finished successfully, move into the right spot<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>    Path dstPath = fs.commitStoreFile(getColumnFamilyName(), path);<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span><a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>    status.setStatus("Flushing " + this + ": reopening flushed file");<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>    HStoreFile sf = createStoreFileAndReader(dstPath);<a name="line.1092"></a>
+<span class="sourceLineNo">911</span>    return dstPath;<a name="line.911"></a>
+<span class="sourceLineNo">912</span>  }<a name="line.912"></a>
+<span class="sourceLineNo">913</span><a name="line.913"></a>
+<span class="sourceLineNo">914</span>  public void bulkLoadHFile(StoreFileInfo fileInfo) throws IOException {<a name="line.914"></a>
+<span class="sourceLineNo">915</span>    HStoreFile sf = createStoreFileAndReader(fileInfo);<a name="line.915"></a>
+<span class="sourceLineNo">916</span>    bulkLoadHFile(sf);<a name="line.916"></a>
+<span class="sourceLineNo">917</span>  }<a name="line.917"></a>
+<span class="sourceLineNo">918</span><a name="line.918"></a>
+<span class="sourceLineNo">919</span>  private void bulkLoadHFile(HStoreFile sf) throws IOException {<a name="line.919"></a>
+<span class="sourceLineNo">920</span>    StoreFileReader r = sf.getReader();<a name="line.920"></a>
+<span class="sourceLineNo">921</span>    this.storeSize.addAndGet(r.length());<a name="line.921"></a>
+<span class="sourceLineNo">922</span>    this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.922"></a>
+<span class="sourceLineNo">923</span><a name="line.923"></a>
+<span class="sourceLineNo">924</span>    // Append the new storefile into the list<a name="line.924"></a>
+<span class="sourceLineNo">925</span>    this.lock.writeLock().lock();<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    try {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      this.storeEngine.getStoreFileManager().insertNewFiles(Lists.newArrayList(sf));<a name="line.927"></a>
+<span class="sourceLineNo">928</span>    } finally {<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.929"></a>
+<span class="sourceLineNo">930</span>      // or changing the memstore. Let us release it before calling<a name="line.930"></a>
+<span class="sourceLineNo">931</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.931"></a>
+<span class="sourceLineNo">932</span>      // deadlock scenario that could have happened if continue to hold<a name="line.932"></a>
+<span class="sourceLineNo">933</span>      // the lock.<a name="line.933"></a>
+<span class="sourceLineNo">934</span>      this.lock.writeLock().unlock();<a name="line.934"></a>
+<span class="sourceLineNo">935</span>    }<a name="line.935"></a>
+<span class="sourceLineNo">936</span>    LOG.info("Loaded HFile " + sf.getFileInfo() + " into store '" + getColumnFamilyName());<a name="line.936"></a>
+<span class="sourceLineNo">937</span>    if (LOG.isTraceEnabled()) {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>      String traceMessage = "BULK LOAD time,size,store size,store files ["<a name="line.938"></a>
+<span class="sourceLineNo">939</span>          + EnvironmentEdgeManager.currentTime() + "," + r.length() + "," + storeSize<a name="line.939"></a>
+<span class="sourceLineNo">940</span>          + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.940"></a>
+<span class="sourceLineNo">941</span>      LOG.trace(traceMessage);<a name="line.941"></a>
+<span class="sourceLineNo">942</span>    }<a name="line.942"></a>
+<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
+<span class="sourceLineNo">944</span><a name="line.944"></a>
+<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
+<span class="sourceLineNo">946</span>   * Close all the readers We don't need to worry about subsequent requests because the Region holds<a name="line.946"></a>
+<span class="sourceLineNo">947</span>   * a write lock that will prevent any more reads or writes.<a name="line.947"></a>
+<span class="sourceLineNo">948</span>   * @return the {@link StoreFile StoreFiles} that were previously being used.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>   * @throws IOException on failure<a name="line.949"></a>
+<span class="sourceLineNo">950</span>   */<a name="line.950"></a>
+<span class="sourceLineNo">951</span>  public ImmutableCollection&lt;HStoreFile&gt; close() throws IOException {<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    this.archiveLock.lock();<a name="line.952"></a>
+<span class="sourceLineNo">953</span>    this.lock.writeLock().lock();<a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      // Clear so metrics doesn't find them.<a name="line.955"></a>
+<span class="sourceLineNo">956</span>      ImmutableCollection&lt;HStoreFile&gt; result = storeEngine.getStoreFileManager().clearFiles();<a name="line.956"></a>
+<span class="sourceLineNo">957</span>      Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.957"></a>
+<span class="sourceLineNo">958</span>          storeEngine.getStoreFileManager().clearCompactedFiles();<a name="line.958"></a>
+<span class="sourceLineNo">959</span>      // clear the compacted files<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.960"></a>
+<span class="sourceLineNo">961</span>        removeCompactedfiles(compactedfiles);<a name="line.961"></a>
+<span class="sourceLineNo">962</span>      }<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      if (!result.isEmpty()) {<a name="line.963"></a>
+<span class="sourceLineNo">964</span>        // initialize the thread pool for closing store files in parallel.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>        ThreadPoolExecutor storeFileCloserThreadPool = this.region<a name="line.965"></a>
+<span class="sourceLineNo">966</span>            .getStoreFileOpenAndCloseThreadPool("StoreFileCloserThread-"<a name="line.966"></a>
+<span class="sourceLineNo">967</span>              + this.region.getRegionInfo().getEncodedName() + "-" + this.getColumnFamilyName());<a name="line.967"></a>
+<span class="sourceLineNo">968</span><a name="line.968"></a>
+<span class="sourceLineNo">969</span>        // close each store file in parallel<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        CompletionService&lt;Void&gt; completionService =<a name="line.970"></a>
+<span class="sourceLineNo">971</span>          new ExecutorCompletionService&lt;&gt;(storeFileCloserThreadPool);<a name="line.971"></a>
+<span class="sourceLineNo">972</span>        for (HStoreFile f : result) {<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          completionService.submit(new Callable&lt;Void&gt;() {<a name="line.973"></a>
+<span class="sourceLineNo">974</span>            @Override<a name="line.974"></a>
+<span class="sourceLineNo">975</span>            public Void call() throws IOException {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>              boolean evictOnClose =<a name="line.976"></a>
+<span class="sourceLineNo">977</span>                  cacheConf != null? cacheConf.shouldEvictOnClose(): true;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>              f.closeStoreFile(evictOnClose);<a name="line.978"></a>
+<span class="sourceLineNo">979</span>              return null;<a name="line.979"></a>
+<span class="sourceLineNo">980</span>            }<a name="line.980"></a>
+<span class="sourceLineNo">981</span>          });<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        }<a name="line.982"></a>
+<span class="sourceLineNo">983</span><a name="line.983"></a>
+<span class="sourceLineNo">984</span>        IOException ioe = null;<a name="line.984"></a>
+<span class="sourceLineNo">985</span>        try {<a name="line.985"></a>
+<span class="sourceLineNo">986</span>          for (int i = 0; i &lt; result.size(); i++) {<a name="line.986"></a>
+<span class="sourceLineNo">987</span>            try {<a name="line.987"></a>
+<span class="sourceLineNo">988</span>              Future&lt;Void&gt; future = completionService.take();<a name="line.988"></a>
+<span class="sourceLineNo">989</span>              future.get();<a name="line.989"></a>
+<span class="sourceLineNo">990</span>            } catch (InterruptedException e) {<a name="line.990"></a>
+<span class="sourceLineNo">991</span>              if (ioe == null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>                ioe = new InterruptedIOException();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>                ioe.initCause(e);<a name="line.993"></a>
+<span class="sourceLineNo">994</span>              }<a name="line.994"></a>
+<span class="sourceLineNo">995</span>            } catch (ExecutionException e) {<a name="line.995"></a>
+<span class="sourceLineNo">996</span>              if (ioe == null) ioe = new IOException(e.getCause());<a name="line.996"></a>
+<span class="sourceLineNo">997</span>            }<a name="line.997"></a>
+<span class="sourceLineNo">998</span>          }<a name="line.998"></a>
+<span class="sourceLineNo">999</span>        } finally {<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>          storeFileCloserThreadPool.shutdownNow();<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        }<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>        if (ioe != null) throw ioe;<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>      }<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>      LOG.trace("Closed {}", this);<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>      return result;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>    } finally {<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>      this.lock.writeLock().unlock();<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>      this.archiveLock.unlock();<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>    }<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>  }<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span><a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>  /**<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>   * Snapshot this stores memstore. Call before running<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>   * {@link #flushCache(long, MemStoreSnapshot, MonitoredTask, ThroughputController,<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>   * FlushLifeCycleTracker)}<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>   *  so it has some work to do.<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>   */<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>  void snapshot() {<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>    this.lock.writeLock().lock();<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>    try {<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>      this.memstore.snapshot();<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>    } finally {<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>      this.lock.writeLock().unlock();<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>    }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>  }<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span><a name="line.1026"></a>
+<span class="sourceLineNo">1027</span>  /**<a name="line.1027"></a>
+<span class="sourceLineNo">1028</span>   * Write out current snapshot. Presumes {@link #snapshot()} has been called previously.<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>   * @param logCacheFlushId flush sequence number<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>   * @param snapshot<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>   * @param status<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>   * @param throughputController<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>   * @return The path name of the tmp file to which the store was flushed<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>   * @throws IOException if exception occurs during process<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>   */<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>  protected List&lt;Path&gt; flushCache(final long logCacheFlushId, MemStoreSnapshot snapshot,<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>      MonitoredTask status, ThroughputController throughputController,<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>      FlushLifeCycleTracker tracker) throws IOException {<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>    // If an exception happens flushing, we let it out without clearing<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>    // the memstore snapshot.  The old snapshot will be returned when we say<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>    // 'snapshot', the next time flush comes around.<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>    // Retry after catching exception when flushing, otherwise server will abort<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>    // itself<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>    StoreFlusher flusher = storeEngine.getStoreFlusher();<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span>    IOException lastException = null;<a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>    for (int i = 0; i &lt; flushRetriesNumber; i++) {<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>      try {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>        List&lt;Path&gt; pathNames =<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>            flusher.flushSnapshot(snapshot, logCacheFlushId, status, throughputController, tracker);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        Path lastPathName = null;<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>        try {<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span>          for (Path pathName : pathNames) {<a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>            lastPathName = pathName;<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>            validateStoreFile(pathName);<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>          }<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          return pathNames;<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>        } catch (Exception e) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>          LOG.warn("Failed validating store file {}, retrying num={}", lastPathName, i, e);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          if (e instanceof IOException) {<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>            lastException = (IOException) e;<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>          } else {<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>            lastException = new IOException(e);<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>          }<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>        }<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      } catch (IOException e) {<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>        LOG.warn("Failed flushing store file, retrying num={}", i, e);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>        lastException = e;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>      }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>      if (lastException != null &amp;&amp; i &lt; (flushRetriesNumber - 1)) {<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        try {<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span>          Thread.sleep(pauseTime);<a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>        } catch (InterruptedException e) {<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>          IOException iie = new InterruptedIOException();<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>          iie.initCause(e);<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>          throw iie;<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        }<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>      }<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    }<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    throw lastException;<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span>  }<a name="line.1080"></a>
+<span class="sourceLineNo">1081</span><a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>  /**<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>   * @param path The pathname of the tmp file into which the store was flushed<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>   * @param logCacheFlushId<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span>   * @param status<a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>   * @return store file created.<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * @throws IOException<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   */<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>  private HStoreFile commitFile(Path path, long logCacheFlushId, MonitoredTask status)<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>      throws IOException {<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>    // Write-out finished successfully, move into the right spot<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>    Path dstPath = fs.commitStoreFile(getColumnFamilyName(), path);<a name="line.1092"></a>
 <span class="sourceLineNo">1093</span><a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>    StoreFileReader r = sf.getReader();<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>    this.storeSize.addAndGet(r.length());<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span><a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    if (LOG.isInfoEnabled()) {<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span>      LOG.info("Added " + sf + ", entries=" + r.getEntries() +<a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>        ", sequenceid=" + logCacheFlushId +<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>        ", filesize=" + TraditionalBinaryPrefix.long2String(r.length(), "", 1));<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    }<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    return sf;<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span>  }<a name="line.1104"></a>
-<span class="sourceLineNo">1105</span><a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>  /**<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>   * @param maxKeyCount<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>   * @param compression Compression algorithm to use<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>   * @param includesTag - includesTag or not<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>   */<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>  // compaction<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      boolean shouldDropBehind) throws IOException {<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>    final CacheConfig writerCacheConf;<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>    if (isCompaction) {<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span>      writerCacheConf = new CacheConfig(cacheConf);<a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>      final boolean shouldCacheCompactedBlocksOnWrite = cacheConf<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>        .shouldCacheCompactedBlocksOnWrite();<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // if data blocks are to be cached on write<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      // during compaction, we should forcefully<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      // cache index and bloom blocks as well<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      if (shouldCacheCompactedBlocksOnWrite) {<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>        LOG.info("cacheCompactedBlocksOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span>          "Data blocks, Index blocks and Bloom filter blocks");<a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>      } else {<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span>      }<a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>    } else {<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>      writerCacheConf = cacheConf;<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>      if (shouldCacheDataOnWrite) {<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>        LOG.info("cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>          "Index blocks and Bloom filter blocks");<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>      }<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>    }<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    if (region.getRegionServerServices() != null) {<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>          region.getRegionInfo().getEncodedName());<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    }<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      cryptoContext);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>        this.getFileSystem())<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>            .withOutputDir(familyTempDir)<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>            .withComparator(comparator)<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span>            .withBloomType(family.getBloomFilterType())<a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>            .withFavoredNodes(favoredNodes)<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>            .withFileContext(hFileContext)<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>    return builder.build();<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>  }<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span><a name="line.1164"></a>
-<span class="sourceLineNo">1165</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>    if (compression == null) {<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>    }<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>                                .withIncludesTags(includesTag)<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>                                .withCompression(compression)<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>                                .withCompressTags(family.isCompressTags())<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>                                .withChecksumType(checksumType)<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>                                .withBlockSize(blocksize)<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>                                .withHBaseCheckSum(true)<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>                                .withEncryptionContext(cryptoContext)<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>                                .withColumnFamily(family.getName())<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>                                .withTableName(region.getTableDescriptor()<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span>                                    .getTableName().getName())<a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>                                .build();<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>    return hFileContext;<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>  }<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span><a name="line.1188"></a>
-<span class="sourceLineNo">1189</span><a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  }<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span><a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>  /**<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   * @param sfs Store files<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>   * @param snapshotId<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>   * @throws IOException<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>   * @return Whether compaction is required.<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>   */<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    this.lock.writeLock().lock();<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>    try {<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span>      if (snapshotId &gt; 0) {<a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>      }<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>    } finally {<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      // or changing the memstore. Let us release it before calling<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      // the lock.<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      this.lock.writeLock().unlock();<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span>    }<a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>    // notify to be called here - only in case of flushes<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>    notifyChangedReadersObservers(sfs);<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span>    if (LOG.isTraceEnabled()) {<a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      long totalSize = getTotalSize(sfs);<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span>      LOG.trace(traceMessage);<a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>    }<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>    return needsCompaction();<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>  }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>  /**<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>   * Notify all observers that set of Readers has changed.<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>   * @throws IOException<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>   */<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>  private void notifyChangedReadersObservers(List&lt;HStoreFile&gt; sfs) throws IOException {<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>    for (ChangedReadersObserver o : this.changedReaderObservers) {<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>      List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>      this.lock.readLock().lock();<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      try {<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span>        memStoreScanners = this.memstore.getScanners(o.getReadPoint());<a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      } finally {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        this.lock.readLock().unlock();<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>      o.updateReaders(sfs, memStoreScanners);<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>    }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span>  }<a name="line.1243"></a>
-<span class="sourceLineNo">1244</span><a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>  /**<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>   * @param cacheBlocks cache the blocks or not<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>   * @param usePread true to use pread, false if not<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>   * @param matcher the scan query matcher<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>   * @param startRow the start row<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>   * @param stopRow the stop row<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>   * @param readPt the read point of the current scan<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>   * @return all scanners for this store<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>   */<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean isGet, boolean usePread,<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt)<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span>      throws IOException {<a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>    return getScanners(cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow, false,<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>      readPt);<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>  }<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span><a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>  /**<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>   * @param cacheBlocks cache the blocks or not<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>   * @param usePread true to use pread, false if not<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>   * @param matcher the scan query matcher<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>   * @param startRow the start row<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>   * @param includeStartRow true to include start row, false if not<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span>   * @param stopRow the stop row<a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * @param readPt the read point of the current scan<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   * @return all scanners for this store<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>   */<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean usePread,<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, boolean includeStartRow,<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>      byte[] stopRow, boolean includeStopRow, long readPt) throws IOException {<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    Collection&lt;HStoreFile&gt; storeFilesToScan;<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>    List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>    this.lock.readLock().lock();<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>    try {<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      storeFilesToScan = this.storeEngine.getStoreFileManager().getFilesForScan(startRow,<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        includeStartRow, stopRow, includeStopRow);<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>      memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>    } finally {<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>      this.lock.readLock().unlock();<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span><a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>    try {<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span>      // First the store file scanners<a name="line.1291"></a>
-<span class="sourceLineNo">1292</span><a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>      // TODO this used to get the store files in descending order,<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>      // but now we get them in ascending order, which I think is<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span>      // actually more correct, since memstore get put at the end.<a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>        .getScannersForStoreFiles(storeFilesToScan, cacheBlocks, usePread, isCompaction, false,<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>          matcher, readPt);<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>      scanners.addAll(sfScanners);<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>      // Then the memstore scanners<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>      scanners.addAll(memStoreScanners);<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      return scanners;<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>    } catch (Throwable t) {<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>      clearAndClose(memStoreScanners);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>  }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>  private static void clearAndClose(List&lt;KeyValueScanner&gt; scanners) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>    if (scanners == null) {<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span>    for (KeyValueScanner s : scanners) {<a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>      s.close();<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    }<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    scanners.clear();<a name="line.1317"></a>
+<span class="sourceLineNo">1094</span>    status.setStatus("Flushing " + this + ": reopening flushed file");<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    HStoreFile sf = createStoreFileAndReader(dstPath);<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span><a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    StoreFileReader r = sf.getReader();<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span>    this.storeSize.addAndGet(r.length());<a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span><a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    if (LOG.isInfoEnabled()) {<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>      LOG.info("Added " + sf + ", entries=" + r.getEntries() +<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span>        ", sequenceid=" + logCacheFlushId +<a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>        ", filesize=" + TraditionalBinaryPrefix.long2String(r.length(), "", 1));<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>    }<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>    return sf;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>  }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>  /**<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>   * @param maxKeyCount<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>   * @param compression Compression algorithm to use<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>   * @param isCompaction whether we are creating a new file in a compaction<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span>   * @param includeMVCCReadpoint - whether to include MVCC or not<a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>   * @param includesTag - includesTag or not<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>   * @return Writer for a new StoreFile in the tmp dir.<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>   */<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>  // TODO : allow the Writer factory to create Writers of ShipperListener type only in case of<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>  // compaction<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>  public StoreFileWriter createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>      boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span>      boolean shouldDropBehind) throws IOException {<a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    final CacheConfig writerCacheConf;<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (isCompaction) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // Don't cache data on write on compactions, unless specifically configured to do so<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      writerCacheConf = new CacheConfig(cacheConf);<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      final boolean cacheCompactedBlocksOnWrite =<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>        cacheConf.shouldCacheCompactedBlocksOnWrite();<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      // if data blocks are to be cached on write<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>      // during compaction, we should forcefully<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span>      // cache index and bloom blocks as well<a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>      if (cacheCompactedBlocksOnWrite) {<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span>        if (!cacheOnWriteLogged) {<a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>          LOG.info("For Store {} , cacheCompactedBlocksOnWrite is true, hence enabled " +<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>              "cacheOnWrite for Data blocks, Index blocks and Bloom filter blocks",<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>            getColumnFamilyName());<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>          cacheOnWriteLogged = true;<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>        }<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>      } else {<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>        writerCacheConf.setCacheDataOnWrite(false);<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>      }<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>    } else {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>      writerCacheConf = cacheConf;<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>      final boolean shouldCacheDataOnWrite = cacheConf.shouldCacheDataOnWrite();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>      if (shouldCacheDataOnWrite) {<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>        writerCacheConf.enableCacheOnWrite();<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>        if (!cacheOnWriteLogged) {<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>          LOG.info("For Store {} , cacheDataOnWrite is true, hence enabled cacheOnWrite for " +<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>            "Index blocks and Bloom filter blocks", getColumnFamilyName());<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>          cacheOnWriteLogged = true;<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>        }<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>      }<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>    }<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>    InetSocketAddress[] favoredNodes = null;<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span>    if (region.getRegionServerServices() != null) {<a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      favoredNodes = region.getRegionServerServices().getFavoredNodesForRegion(<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>          region.getRegionInfo().getEncodedName());<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>    }<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>    HFileContext hFileContext = createFileContext(compression, includeMVCCReadpoint, includesTag,<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span>      cryptoContext);<a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>    Path familyTempDir = new Path(fs.getTempDir(), family.getNameAsString());<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>    StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf,<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>        this.getFileSystem())<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span>            .withOutputDir(familyTempDir)<a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>            .withComparator(comparator)<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>            .withBloomType(family.getBloomFilterType())<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>            .withMaxKeyCount(maxKeyCount)<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>            .withFavoredNodes(favoredNodes)<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>            .withFileContext(hFileContext)<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>            .withShouldDropCacheBehind(shouldDropBehind)<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>            .withCompactedFilesSupplier(this::getCompactedFiles);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>    return builder.build();<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>  }<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span><a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>  private HFileContext createFileContext(Compression.Algorithm compression,<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>    if (compression == null) {<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>    }<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span>    HFileContext hFileContext = new HFileContextBuilder()<a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>                                .withIncludesMvcc(includeMVCCReadpoint)<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>                                .withIncludesTags(includesTag)<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span>                                .withCompression(compression)<a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>                                .withCompressTags(family.isCompressTags())<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>                                .withChecksumType(checksumType)<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>                                .withBytesPerCheckSum(bytesPerChecksum)<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>                                .withBlockSize(blocksize)<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>                                .withHBaseCheckSum(true)<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>                                .withDataBlockEncoding(family.getDataBlockEncoding())<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span>                                .withEncryptionContext(cryptoContext)<a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>                                .withCreateTime(EnvironmentEdgeManager.currentTime())<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>                                .withColumnFamily(family.getName())<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>                                .withTableName(region.getTableDescriptor()<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>                                    .getTableName().getName())<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>                                .build();<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>    return hFileContext;<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>  }<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span><a name="line.1198"></a>
+<span class="sourceLineNo">1199</span><a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>  private long getTotalSize(Collection&lt;HStoreFile&gt; sfs) {<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    return sfs.stream().mapToLong(sf -&gt; sf.getReader().length()).sum();<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>  }<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span><a name="line.1203"></a>
+<span class="sourceLineNo">1204</span>  /**<a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>   * Change storeFiles adding into place the Reader produced by this new flush.<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>   * @param sfs Store files<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>   * @param snapshotId<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>   * @throws IOException<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>   * @return Whether compaction is required.<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span>   */<a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>  private boolean updateStorefiles(List&lt;HStoreFile&gt; sfs, long snapshotId) throws IOException {<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>    this.lock.writeLock().lock();<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>    try {<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span>      this.storeEngine.getStoreFileManager().insertNewFiles(sfs);<a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      if (snapshotId &gt; 0) {<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>        this.memstore.clearSnapshot(snapshotId);<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span>      }<a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>    } finally {<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // We need the lock, as long as we are updating the storeFiles<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // or changing the memstore. Let us release it before calling<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      // notifyChangeReadersObservers. See HBASE-4485 for a possible<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span>      // deadlock scenario that could have happened if continue to hold<a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      // the lock.<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>      this.lock.writeLock().unlock();<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>    }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span>    // notify to be called here - only in case of flushes<a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>    notifyChangedReadersObservers(sfs);<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>    if (LOG.isTraceEnabled()) {<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      long totalSize = getTotalSize(sfs);<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      String traceMessage = "FLUSH time,count,size,store size,store files ["<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>          + EnvironmentEdgeManager.currentTime() + "," + sfs.size() + "," + totalSize<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>          + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]";<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>      LOG.trace(traceMessage);<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>    }<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>    return needsCompaction();<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span>  }<a name="line.1236"></a>
+<span class="sourceLineNo">1237</span><a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>  /**<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>   * Notify all observers that set of Readers has changed.<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>   * @throws IOException<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>   */<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span>  private void notifyChangedReadersObservers(List&lt;HStoreFile&gt; sfs) throws IOException {<a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>    for (ChangedReadersObserver o : this.changedReaderObservers) {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>      List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>      this.lock.readLock().lock();<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>      try {<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>        memStoreScanners = this.memstore.getScanners(o.getReadPoint());<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>      } finally {<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>        this.lock.readLock().unlock();<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>      }<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      o.updateReaders(sfs, memStoreScanners);<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>    }<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>  }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span><a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>  /**<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span>   * @param cacheBlocks cache the blocks or not<a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>   * @param usePread true to use pread, false if not<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   * @param matcher the scan query matcher<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>   * @param startRow the start row<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>   * @param stopRow the stop row<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>   * @param readPt the read point of the current scan<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>   * @return all scanners for this store<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>   */<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean isGet, boolean usePread,<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, byte[] stopRow, long readPt)<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>      throws IOException {<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>    return getScanners(cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow, false,<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span>      readPt);<a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  }<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span><a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>  /**<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>   * Get all scanners with no filtering based on TTL (that happens further down the line).<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>   * @param cacheBlocks cache the blocks or not<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>   * @param usePread true to use pread, false if not<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>   * @param matcher the scan query matcher<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>   * @param startRow the start row<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>   * @param includeStartRow true to include start row, false if not<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>   * @param stopRow the stop row<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>   * @param readPt the read point of the current scan<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>   * @return all scanners for this store<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>   */<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>  public List&lt;KeyValueScanner&gt; getScanners(boolean cacheBlocks, boolean usePread,<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow, boolean includeStartRow,<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>      byte[] stopRow, boolean includeStopRow, long readPt) throws IOException {<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>    Collection&lt;HStoreFile&gt; storeFilesToScan;<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span>    List&lt;KeyValueScanner&gt; memStoreScanners;<a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>    this.lock.readLock().lock();<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    try {<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>      storeFilesToScan = this.storeEngine.getStoreFileManager().getFilesForScan(startRow,<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span>        includeStartRow, stopRow, includeStopRow);<a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>      memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>    } finally {<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>      this.lock.readLock().unlock();<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>    }<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span><a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    try {<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>      // First the store file scanners<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span><a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>      // TODO this used to get the store files in descending order,<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>      // but now we get them in ascending order, which I think is<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      // actually more correct, since memstore get put at the end.<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>        .getScannersForStoreFiles(storeFilesToScan, cacheBlocks, usePread, isCompaction, false,<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span>          matcher, readPt);<a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      scanners.addAll(sfScanners);<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      // Then the memstore scanners<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>      scanners.addAll(memStoreScanners);<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span>      return scanners;<a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    } catch (Throwable t) {<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>      clearAndClose(memStoreScanners);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    }<a name="line.1317"></a>
 <span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
 <span class="sourceLineNo">1319</span><a name="line.1319"></a>
-<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * (that happens further down the line).<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * @param files the list of files on which the scanners has to be created<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   * @param cacheBlocks cache the blocks or not<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>   * @param usePread true to use pread, false if not<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>   * @param matcher the scan query matcher<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>   * @param startRow the start row<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>   * @param stopRow the stop row<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>   * @param readPt the read point of the current scan<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span>   */<a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>      boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>      byte[] startRow, byte[] stopRow, long readPt, boolean includeMemstoreScanner)<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>      throws IOException {<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>    return getScanners(files, cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow,<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>      false, readPt, includeMemstoreScanner);<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>  }<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span><a name="line.1341"></a>
-<span class="sourceLineNo">1342</span>  /**<a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>   * (that happens further down the line).<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>   * @param files the list of files on which the scanners has to be created<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span>   * @param cacheBlocks ache the blocks or not<a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>   * @param usePread true to use pread, false if not<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>   * @param matcher the scan query matcher<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>   * @param startRow the start row<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span>   * @param includeStartRow true to include start row, false if not<a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>   * @param stopRow the stop row<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span>   * @param readPt the read point of the current scan<a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>   */<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>      boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow,<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>    List&lt;KeyValueScanner&gt; memStoreScanners = null;<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>    if (includeMemstoreScanner) {<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>      this.lock.readLock().lock();<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span>      try {<a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>        memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>      } finally {<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>        this.lock.readLock().unlock();<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>      }<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span>    }<a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>    try {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        .getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, false, matcher,<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>          readPt);<a name="line.1374"></a>
-<span class="sourceLineNo">1375</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1375"></a>
-<span class="sourceLineNo">1376</span>      scanners.addAll(sfScanners);<a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // Then the memstore scanners<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      if (memStoreScanners != null) {<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>        scanners.addAll(memStoreScanners);<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>      }<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>      return scanners;<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>    } catch (Throwable t) {<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      clearAndClose(memStoreScanners);<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>    }<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>  }<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span><a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>  /**<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>   * @param o Observer who wants to know about changes in set of Readers<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>   */<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>  public void addChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>    this.changedReaderObservers.add(o);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>  }<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span><a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>  /**<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>   * @param o Observer no longer interested in changes in set of Readers.<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>   */<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>  public void deleteChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>    // We don't check if observer present; it may not be (legitimately)<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>    this.changedReaderObservers.remove(o);<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>  }<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span><a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>  // Compaction<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span><a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>  /**<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>   * Compact the StoreFiles.  This method may take some time, so the calling<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span>   * thread must be able to block for long periods.<a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>   *<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>   * &lt;p&gt;During this time, the Store can work as usual, getting values from<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span>   * StoreFiles and writing new StoreFiles from the memstore.<a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>   *<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>   * Existing StoreFiles are not destroyed until the new compacted StoreFile is<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span>   * completely written-out to disk.<a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>   *<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * &lt;p&gt;The compactLock prevents multiple simultaneous compactions.<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * The structureLock prevents us from interfering with other write operations.<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   *<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * &lt;p&gt;We don't want to hold the structureLock for the whole time, as a compact()<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * can be lengthy and we want to allow cache-flushes during this period.<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   *<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>   * &lt;p&gt; Compaction event should be idempotent, since there is no IO Fencing for<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>   * the region directory in hdfs. A region server might still try to complete the<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>   * compaction after it lost the region. That is why the following events are carefully<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>   * ordered for a compaction:<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>   *  1. Compaction writes new files under region/.tmp directory (compaction output)<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>   *  2. Compaction atomically moves the temporary file under region directory<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>   *  3. Compaction appends a WAL edit containing the compaction input and output files.<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>   *  Forces sync on WAL.<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>   *  4. Compaction deletes the input files from the region directory.<a name="line.1431"></a>
+<span class="sourceLineNo">1320</span>  private static void clearAndClose(List&lt;KeyValueScanner&gt; scanners) {<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>    if (scanners == null) {<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>      return;<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>    }<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>    for (KeyValueScanner s : scanners) {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>      s.close();<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    }<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    scanners.clear();<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>  }<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span><a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>  /**<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span>   * (that happens further down the line).<a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>   * @param files the list of files on which the scanners has to be created<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * @param cacheBlocks cache the blocks or not<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   * @param usePread true to use pread, false if not<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>   * @param matcher the scan query matcher<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>   * @param startRow the start row<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>   * @param stopRow the stop row<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>   * @param readPt the read point of the current scan<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>   */<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span>      boolean isGet, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>      byte[] startRow, byte[] stopRow, long readPt, boolean includeMemstoreScanner)<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      throws IOException {<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>    return getScanners(files, cacheBlocks, usePread, isCompaction, matcher, startRow, true, stopRow,<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>      false, readPt, includeMemstoreScanner);<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span>  }<a name="line.1350"></a>
+<span class="sourceLineNo">1351</span><a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>  /**<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span>   * Create scanners on the given files and if needed on the memstore with no filtering based on TTL<a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>   * (that happens further down the line).<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>   * @param files the list of files on which the scanners has to be created<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>   * @param cacheBlocks ache the blocks or not<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>   * @param usePread true to use pread, false if not<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>   * @param isCompaction true if the scanner is created for compaction<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>   * @param matcher the scan query matcher<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>   * @param startRow the start row<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>   * @param includeStartRow true to include start row, false if not<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>   * @param stopRow the stop row<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>   * @param includeStopRow true to include stop row, false if not<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span>   * @param readPt the read point of the current scan<a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>   * @param includeMemstoreScanner true if memstore has to be included<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>   * @return scanners on the given files and on the memstore if specified<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>   */<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>  public List&lt;KeyValueScanner&gt; getScanners(List&lt;HStoreFile&gt; files, boolean cacheBlocks,<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span>      boolean usePread, boolean isCompaction, ScanQueryMatcher matcher, byte[] startRow,<a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>    List&lt;KeyValueScanner&gt; memStoreScanners = null;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>    if (includeMemstoreScanner) {<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span>      this.lock.readLock().lock();<a name="line.1374"></a>
+<span class="sourceLineNo">1375</span>      try {<a name="line.1375"></a>
+<span class="sourceLineNo">1376</span>        memStoreScanners = this.memstore.getScanners(readPt);<a name="line.1376"></a>
+<span class="sourceLineNo">1377</span>      } finally {<a name="line.1377"></a>
+<span class="sourceLineNo">1378</span>        this.lock.readLock().unlock();<a name="line.1378"></a>
+<span class="sourceLineNo">1379</span>      }<a name="line.1379"></a>
+<span class="sourceLineNo">1380</span>    }<a name="line.1380"></a>
+<span class="sourceLineNo">1381</span>    try {<a name="line.1381"></a>
+<span class="sourceLineNo">1382</span>      List&lt;StoreFileScanner&gt; sfScanners = StoreFileScanner<a name="line.1382"></a>
+<span class="sourceLineNo">1383</span>        .getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, false, matcher,<a name="line.1383"></a>
+<span class="sourceLineNo">1384</span>          readPt);<a name="line.1384"></a>
+<span class="sourceLineNo">1385</span>      List&lt;KeyValueScanner&gt; scanners = new ArrayList&lt;&gt;(sfScanners.size() + 1);<a name="line.1385"></a>
+<span class="sourceLineNo">1386</span>      scanners.addAll(sfScanners);<a name="line.1386"></a>
+<span class="sourceLineNo">1387</span>      // Then the memstore scanners<a name="line.1387"></a>
+<span class="sourceLineNo">1388</span>      if (memStoreScanners != null) {<a name="line.1388"></a>
+<span class="sourceLineNo">1389</span>        scanners.addAll(memStoreScanners);<a name="line.1389"></a>
+<span class="sourceLineNo">1390</span>      }<a name="line.1390"></a>
+<span class="sourceLineNo">1391</span>      return scanners;<a name="line.1391"></a>
+<span class="sourceLineNo">1392</span>    } catch (Throwable t) {<a name="line.1392"></a>
+<span class="sourceLineNo">1393</span>      clearAndClose(memStoreScanners);<a name="line.1393"></a>
+<span class="sourceLineNo">1394</span>      throw t instanceof IOException ? (IOException) t : new IOException(t);<a name="line.1394"></a>
+<span class="sourceLineNo">1395</span>    }<a name="line.1395"></a>
+<span class="sourceLineNo">1396</span>  }<a name="line.1396"></a>
+<span class="sourceLineNo">1397</span><a name="line.1397"></a>
+<span class="sourceLineNo">1398</span>  /**<a name="line.1398"></a>
+<span class="sourceLineNo">1399</span>   * @param o Observer who wants to know about changes in set of Readers<a name="line.1399"></a>
+<span class="sourceLineNo">1400</span>   */<a name="line.1400"></a>
+<span class="sourceLineNo">1401</span>  public void addChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1401"></a>
+<span class="sourceLineNo">1402</span>    this.changedReaderObservers.add(o);<a name="line.1402"></a>
+<span class="sourceLineNo">1403</span>  }<a name="line.1403"></a>
+<span class="sourceLineNo">1404</span><a name="line.1404"></a>
+<span class="sourceLineNo">1405</span>  /**<a name="line.1405"></a>
+<span class="sourceLineNo">1406</span>   * @param o Observer no longer interested in changes in set of Readers.<a name="line.1406"></a>
+<span class="sourceLineNo">1407</span>   */<a name="line.1407"></a>
+<span class="sourceLineNo">1408</span>  public void deleteChangedReaderObserver(ChangedReadersObserver o) {<a name="line.1408"></a>
+<span class="sourceLineNo">1409</span>    // We don't check if observer present; it may not be (legitimately)<a name="line.1409"></a>
+<span class="sourceLineNo">1410</span>    this.changedReaderObservers.remove(o);<a name="line.1410"></a>
+<span class="sourceLineNo">1411</span>  }<a name="line.1411"></a>
+<span class="sourceLineNo">1412</span><a name="line.1412"></a>
+<span class="sourceLineNo">1413</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1413"></a>
+<span class="sourceLineNo">1414</span>  // Compaction<a name="line.1414"></a>
+<span class="sourceLineNo">1415</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.1415"></a>
+<span class="sourceLineNo">1416</span><a name="line.1416"></a>
+<span class="sourceLineNo">1417</span>  /**<a name="line.1417"></a>
+<span class="sourceLineNo">1418</span>   * Compact the StoreFiles.  This method may take some time, so the calling<a name="line.1418"></a>
+<span class="sourceLineNo">1419</span>   * thread must be able to block for long periods.<a name="line.1419"></a>
+<span class="sourceLineNo">1420</span>   *<a name="line.1420"></a>
+<span class="sourceLineNo">1421</span>   * &lt;p&gt;During this time, the Store can work as usual, getting values from<a name="line.1421"></a>
+<span class="sourceLineNo">1422</span>   * StoreFiles and writing new StoreFiles from the memstore.<a name="line.1422"></a>
+<span class="sourceLineNo">1423</span>   *<a name="line.1423"></a>
+<span class="sourceLineNo">1424</span>   * Existing StoreFiles are not destroyed until the new compacted StoreFile is<a name="line.1424"></a>
+<span class="sourceLineNo">1425</span>   * completely written-out to disk.<a name="line.1425"></a>
+<span class="sourceLineNo">1426</span>   *<a name="line.1426"></a>
+<span class="sourceLineNo">1427</span>   * &lt;p&gt;The compactLock prevents multiple simultaneous compactions.<a name="line.1427"></a>
+<span class="sourceLineNo">1428</span>   * The structureLock prevents us from interfering with other write operations.<a name="line.1428"></a>
+<span class="sourceLineNo">1429</span>   *<a name="line.1429"></a>
+<span class="sourceLineNo">1430</span>   * &lt;p&gt;We don't want to hold the structureLock for the whole time, as a compact()<a name="line.1430"></a>
+<span class="sourceLineNo">1431</span>   * can be lengthy and we want to allow cache-flushes during this period.<a name="line.1431"></a>
 <span class="sourceLineNo">1432</span>   *<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span>   * Failure conditions are handled like this:<a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>   *  - If RS fails before 2, compaction wont complete. Even if RS lives on and finishes<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   *  the compaction later, it will only write the new data file to the region directory.<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   *  Since we already have this data, this will be idempotent but we will have a redundant<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   *  copy of the data.<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   *  - If RS fails between 2 and 3, the region will have a redundant copy of the data. The<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   *  RS that failed won't be able to finish snyc() for WAL because of lease recovery in WAL.<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   *  - If RS fails after 3, the region region server who opens the region will pick up the<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>   *  the compaction marker from the WAL and replay it by removing the compaction input files.<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>   *  Failed RS can also attempt to delete those files, but the operation will be idempotent<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>   *<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>   * See HBASE-2231 for details.<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>   *<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>   * @param compaction compaction details obtained from requestCompaction()<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>   * @throws IOException<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>   * @return Storefile we compacted into or null if we failed or opted out early.<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>   */<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  public List&lt;HStoreFile&gt; compact(CompactionContext compaction,<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span>    ThroughputController throughputController, User user) throws IOException {<a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>    assert compaction != null;<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>    CompactionRequestImpl cr = compaction.getRequest();<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>    try {<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>      // Do all sanity checking in here if we have a valid CompactionRequestImpl<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>      // because we need to clean up after it on the way out in a finally<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>      // block below<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      long compactionStartTime = EnvironmentEdgeManager.currentTime();<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      assert compaction.hasSelection();<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>      Collection&lt;HStoreFile&gt; filesToCompact = cr.getFiles();<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>      assert !filesToCompact.isEmpty();<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      synchronized (filesCompacting) {<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>        // sanity check: we're compacting files that this store knows about<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>        // TODO: change this to LOG.error() after more debugging<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>        Preconditions.checkArgument(filesCompacting.containsAll(filesToCompact));<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span>      }<a name="line.1466"></a>
-<span class="sourceLineNo">1467</span><a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>      // Ready to go. Have list of files to compact.<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>      LOG.info("Starting compaction of " + filesToCompact +<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>        " into tmpdir=" + fs.getTempDir() + ", totalSize=" +<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>          TraditionalBinaryPrefix.long2String(cr.getSize(), "", 1));<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span><a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>      return doCompaction(cr, filesToCompact, user, compactionStartTime,<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>          compaction.compact(throughputController, user));<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>    } finally {<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>      finishCompactionRequest(cr);<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    }<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span>  }<a name="line.1478"></a>
-<span class="sourceLineNo">1479</span><a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>  @VisibleForTesting<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>  protected List&lt;HStoreFile&gt; doCompaction(CompactionRequestImpl cr,<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Collection&lt;HStoreFile&gt; filesToCompact, User user, long compactionStartTime,<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>      List&lt;Path&gt; newFiles) throws IOException {<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>    // Do the steps necessary to complete the compaction.<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>    List&lt;HStoreFile&gt; sfs = moveCompactedFilesIntoPlace(cr, newFiles, user);<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>    writeCompactionWalRecord(filesToCompact, sfs);<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>    replaceStoreFiles(filesToCompact, sfs);<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>    if (cr.isMajor()) {<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>      majorCompactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>      majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>    } else {<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>      compactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>      compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>    }<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>    long outputBytes = getTotalSize(sfs);<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span><a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>    // At this point the store will use new files for all new scanners.<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>    completeCompaction(filesToCompact); // update store size.<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span><a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>    long now = EnvironmentEdgeManager.currentTime();<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>    if (region.getRegionServerServices() != null<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>        &amp;&amp; region.getRegionServerServices().getMetrics() != null) {<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>      region.getRegionServerServices().getMetrics().updateCompaction(<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>          region.getTableDescriptor().getTableName().getNameAsString(),<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          cr.isMajor(), now - compactionStartTime, cr.getFiles().size(),<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>          newFiles.size(), cr.getSize(), outputBytes);<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span><a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>    }<a name="line.1508"></a>
+<span class="sourceLineNo">1433</span>   * &lt;p&gt; Compaction event should be idempotent, since there is no IO Fencing for<a name="line.1433"></a>
+<span class="sourceLineNo">1434</span>   * the region directory in hdfs. A region server might still try to complete the<a name="line.1434"></a>
+<span class="sourceLineNo">1435</span>   * compaction after it lost the region. That is why the following events are carefully<a name="line.1435"></a>
+<span class="sourceLineNo">1436</span>   * ordered for a compaction:<a name="line.1436"></a>
+<span class="sourceLineNo">1437</span>   *  1. Compaction writes new files under region/.tmp directory (compaction output)<a name="line.1437"></a>
+<span class="sourceLineNo">1438</span>   *  2. Compaction atomically moves the temporary file under region directory<a name="line.1438"></a>
+<span class="sourceLineNo">1439</span>   *  3. Compaction appends a WAL edit containing the compaction input and output files.<a name="line.1439"></a>
+<span class="sourceLineNo">1440</span>   *  Forces sync on WAL.<a name="line.1440"></a>
+<span class="sourceLineNo">1441</span>   *  4. Compaction deletes the input files from the region directory.<a name="line.1441"></a>
+<span class="sourceLineNo">1442</span>   *<a name="line.1442"></a>
+<span class="sourceLineNo">1443</span>   * Failure conditions are handled like this:<a name="line.1443"></a>
+<span class="sourceLineNo">1444</span>   *  - If RS fails before 2, compaction wont complete. Even if RS lives on and finishes<a name="line.1444"></a>
+<span class="sourceLineNo">1445</span>   *  the compaction later, it will only write the new data file to the region directory.<a name="line.1445"></a>
+<span class="sourceLineNo">1446</span>   *  Since we already have this data, this will be idempotent but we will have a redundant<a name="line.1446"></a>
+<span class="sourceLineNo">1447</span>   *  copy of the data.<a name="line.1447"></a>
+<span class="sourceLineNo">1448</span>   *  - If RS fails between 2 and 3, the region will have a redundant copy of the data. The<a name="line.1448"></a>
+<span class="sourceLineNo">1449</span>   *  RS that failed won't be able to finish snyc() for WAL because of lease recovery in WAL.<a name="line.1449"></a>
+<span class="sourceLineNo">1450</span>   *  - If RS fails after 3, the region region server who opens the region will pick up the<a name="line.1450"></a>
+<span class="sourceLineNo">1451</span>   *  the compaction marker from the WAL and replay it by removing the compaction input files.<a name="line.1451"></a>
+<span class="sourceLineNo">1452</span>   *  Failed RS can also attempt to delete those files, but the operation will be idempotent<a name="line.1452"></a>
+<span class="sourceLineNo">1453</span>   *<a name="line.1453"></a>
+<span class="sourceLineNo">1454</span>   * See HBASE-2231 for details.<a name="line.1454"></a>
+<span class="sourceLineNo">1455</span>   *<a name="line.1455"></a>
+<span class="sourceLineNo">1456</span>   * @param compaction compaction details obtained from requestCompaction()<a name="line.1456"></a>
+<span class="sourceLineNo">1457</span>   * @throws IOException<a name="line.1457"></a>
+<span class="sourceLineNo">1458</span>   * @return Storefile we compacted into or null if we failed or opted out early.<a name="line.1458"></a>
+<span class="sourceLineNo">1459</span>   */<a name="line.1459"></a>
+<span class="sourceLineNo">1460</span>  public List&lt;HStoreFile&gt; compact(CompactionContext compaction,<a name="line.1460"></a>
+<span class="sourceLineNo">1461</span>    ThroughputController throughputController, User user) throws IOException {<a name="line.1461"></a>
+<span class="sourceLineNo">1462</span>    assert compaction != null;<a name="line.1462"></a>
+<span class="sourceLineNo">1463</span>    CompactionRequestImpl cr = compaction.getRequest();<a name="line.1463"></a>
+<span class="sourceLineNo">1464</span>    try {<a name="line.1464"></a>
+<span class="sourceLineNo">1465</span>      // Do all sanity checking in here if we have a valid CompactionRequestImpl<a name="line.1465"></a>
+<span class="sourceLineNo">1466</span>      // because we need to clean up after it on the way out in a finally<a name="line.1466"></a>
+<span class="sourceLineNo">1467</span>      // block below<a name="line.1467"></a>
+<span class="sourceLineNo">1468</span>      long compactionStartTime = EnvironmentEdgeManager.currentTime();<a name="line.1468"></a>
+<span class="sourceLineNo">1469</span>      assert compaction.hasSelection();<a name="line.1469"></a>
+<span class="sourceLineNo">1470</span>      Collection&lt;HStoreFile&gt; filesToCompact = cr.getFiles();<a name="line.1470"></a>
+<span class="sourceLineNo">1471</span>      assert !filesToCompact.isEmpty();<a name="line.1471"></a>
+<span class="sourceLineNo">1472</span>      synchronized (filesCompacting) {<a name="line.1472"></a>
+<span class="sourceLineNo">1473</span>        // sanity check: we're compacting files that this store knows about<a name="line.1473"></a>
+<span class="sourceLineNo">1474</span>        // TODO: change this to LOG.error() after more debugging<a name="line.1474"></a>
+<span class="sourceLineNo">1475</span>        Preconditions.checkArgument(filesCompacting.containsAll(filesToCompact));<a name="line.1475"></a>
+<span class="sourceLineNo">1476</span>      }<a name="line.1476"></a>
+<span class="sourceLineNo">1477</span><a name="line.1477"></a>
+<span class="sourceLineNo">1478</span>      // Ready to go. Have list of files to compact.<a name="line.1478"></a>
+<span class="sourceLineNo">1479</span>      LOG.info("Starting compaction of " + filesToCompact +<a name="line.1479"></a>
+<span class="sourceLineNo">1480</span>        " into tmpdir=" + fs.getTempDir() + ", totalSize=" +<a name="line.1480"></a>
+<span class="sourceLineNo">1481</span>          TraditionalBinaryPrefix.long2String(cr.getSize(), "", 1));<a name="line.1481"></a>
+<span class="sourceLineNo">1482</span><a name="line.1482"></a>
+<span class="sourceLineNo">1483</span>      return doCompaction(cr, filesToCompact, user, compactionStartTime,<a name="line.1483"></a>
+<span class="sourceLineNo">1484</span>          compaction.compact(throughputController, user));<a name="line.1484"></a>
+<span class="sourceLineNo">1485</span>    } finally {<a name="line.1485"></a>
+<span class="sourceLineNo">1486</span>      finishCompactionRequest(cr);<a name="line.1486"></a>
+<span class="sourceLineNo">1487</span>    }<a name="line.1487"></a>
+<span class="sourceLineNo">1488</span>  }<a name="line.1488"></a>
+<span class="sourceLineNo">1489</span><a name="line.1489"></a>
+<span class="sourceLineNo">1490</span>  @VisibleForTesting<a name="line.1490"></a>
+<span class="sourceLineNo">1491</span>  protected List&lt;HStoreFile&gt; doCompaction(CompactionRequestImpl cr,<a name="line.1491"></a>
+<span class="sourceLineNo">1492</span>      Collection&lt;HStoreFile&gt; filesToCompact, User user, long compactionStartTime,<a name="line.1492"></a>
+<span class="sourceLineNo">1493</span>      List&lt;Path&gt; newFiles) throws IOException {<a name="line.1493"></a>
+<span class="sourceLineNo">1494</span>    // Do the steps necessary to complete the compaction.<a name="line.1494"></a>
+<span class="sourceLineNo">1495</span>    List&lt;HStoreFile&gt; sfs = moveCompactedFilesIntoPlace(cr, newFiles, user);<a name="line.1495"></a>
+<span class="sourceLineNo">1496</span>    writeCompactionWalRecord(filesToCompact, sfs);<a name="line.1496"></a>
+<span class="sourceLineNo">1497</span>    replaceStoreFiles(filesToCompact, sfs);<a name="line.1497"></a>
+<span class="sourceLineNo">1498</span>    if (cr.isMajor()) {<a name="line.1498"></a>
+<span class="sourceLineNo">1499</span>      majorCompactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1499"></a>
+<span class="sourceLineNo">1500</span>      majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1500"></a>
+<span class="sourceLineNo">1501</span>    } else {<a name="line.1501"></a>
+<span class="sourceLineNo">1502</span>      compactedCellsCount.addAndGet(getCompactionProgress().getTotalCompactingKVs());<a name="line.1502"></a>
+<span class="sourceLineNo">1503</span>      compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);<a name="line.1503"></a>
+<span class="sourceLineNo">1504</span>    }<a name="line.1504"></a>
+<span class="sourceLineNo">1505</span>    long outputBytes = getTotalSize(sfs);<a name="line.1505"></a>
+<span class="sourceLineNo">1506</span><a name="line.1506"></a>
+<span class="sourceLineNo">1507</span>    // At this point the store will use new files for all new scanners.<a name="line.1507"></a>
+<span class="sourceLineNo">1508</span>    completeCompaction(filesToCompact); // update store size.<a name="line.1508"></a>
 <span class="sourceLineNo">1509</span><a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>    logCompactionEndMessage(cr, sfs, now, compactionStartTime);<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>    return sfs;<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span>  }<a name="line.1512"></a>
-<span class="sourceLineNo">1513</span><a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>  private List&lt;HStoreFile&gt; moveCompactedFilesIntoPlace(CompactionRequestImpl cr, List&lt;Path&gt; newFiles,<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>      User user) throws IOException {<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>    List&lt;HStoreFile&gt; sfs = new ArrayList&lt;&gt;(newFiles.size());<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>    for (Path newFile : newFiles) {<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      assert newFile != null;<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>      HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>      if (this.getCoprocessorHost() != null) {<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>        getCoprocessorHost().postCompact(this, sf, cr.getTracker(), cr, user);<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span>      }<a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>      assert sf != null;<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>      sfs.add(sf);<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    }<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span>    return sfs;<a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  // Package-visible for tests<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>  HStoreFile moveFileIntoPlace(Path newFile) throws IOException {<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>    validateStoreFile(newFile);<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>    // Move the file into the right spot<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>    Path destPath = fs.commitStoreFile(getColumnFamilyName(), newFile);<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>    return createStoreFileAndReader(destPath);<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>  }<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span><a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>  /**<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>   * Writes the compaction WAL record.<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>   * @param filesCompacted Files compacted (input).<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>   * @param newFiles Files from compaction.<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>   */<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>  private void writeCompactionWalRecord(Collection&lt;HStoreFile&gt; filesCompacted,<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>      Collection&lt;HStoreFile&gt; newFiles) throws IOException {<a name="line.1543"></a>
-<span class="sourceLineNo">1544</span>    if (region.getWAL() == null) {<a name="line.1544"></a>
-<span class="sourceLineNo">1545</span>      return;<a name="line.1545"></a>
-<span class="sourceLineNo">1546</span>    }<a name="line.1546"></a>
-<span class="sourceLineNo">1547</span>    List&lt;Path&gt; inputPaths =<a name="line.1547"></a>
-<span class="sourceLineNo">1548</span>        filesCompacted.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1548"></a>
-<span class="sourceLineNo">1549</span>    List&lt;Path&gt; outputPaths =<a name="line.1549"></a>
-<span class="sourceLineNo">1550</span>        newFiles.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1550"></a>
-<span class="sourceLineNo">1551</span>    RegionInfo info = this.region.getRegionInfo();<a name="line.1551"></a>
-<span class="sourceLineNo">1552</span>    CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(info,<a name="line.1552"></a>
-<span class="sourceLineNo">1553</span>        family.getName(), inputPaths, outputPaths, fs.getStoreDir(getColumnFamilyDescriptor().getNameAsString()));<a name="line.1553"></a>
-<span class="sourceLineNo">1554</span>    // Fix reaching into Region to get the maxWaitForSeqId.<a name="line.1554"></a>
-<span class="sourceLineNo">1555</span>    // Does this method belong in Region altogether given it is making so many references up there?<a name="line.1555"></a>
-<span class="sourceLineNo">1556</span>    // Could be Region#writeCompactionMarker(compactionDescriptor);<a name="line.1556"></a>
-<span class="sourceLineNo">1557</span>    WALUtil.writeCompactionMarker(this.region.getWAL(), this.region.getReplicationScope(),<a name="line.1557"></a>
-<span class="sourceLineNo">1558</span>        this.region.getRegionInfo(), compactionDescriptor, this.region.getMVCC());<a name="line.1558"></a>
-<span class="sourceLineNo">1559</span>  }<a name="line.1559"></a>
-<span class="sourceLineNo">1560</span><a name="line.1560"></a>
-<span class="sourceLineNo">1561</span>  @VisibleForTesting<a name="line.1561"></a>
-<span class="sourceLineNo">1562</span>  void replaceStoreFiles(Collection&lt;HStoreFile&gt; compactedFiles, Collection&lt;HStoreFile&gt; result)<a name="line.1562"></a>
-<span class="sourceLineNo">1563</span>      throws IOException {<a name="line.1563"></a>
-<span class="sourceLineNo">1564</span>    this.lock.writeLock().lock();<a name="line.1564"></a>
-<span class="sourceLineNo">1565</span>    try {<a name="line.1565"></a>
-<span class="sourceLineNo">1566</span>      this.storeEngine.getStoreFileManager().addCompactionResults(compactedFiles, result);<a name="line.1566"></a>
-<span class="sourceLineNo">1567</span>      synchronized (filesCompacting) {<a name="line.1567"></a>
-<span class="sourceLineNo">1568</span>        filesCompacting.removeAll(compactedFiles);<a name="line.1568"></a>
-<span class="sourceLineNo">1569</span>      }<a name="line.1569"></a>
+<span class="sourceLineNo">1510</span>    long now = EnvironmentEdgeManager.currentTime();<a name="line.1510"></a>
+<span class="sourceLineNo">1511</span>    if (region.getRegionServerServices() != null<a name="line.1511"></a>
+<span class="sourceLineNo">1512</span>        &amp;&amp; region.getRegionServerServices().getMetrics() != null) {<a name="line.1512"></a>
+<span class="sourceLineNo">1513</span>      region.getRegionServerServices().getMetrics().updateCompaction(<a name="line.1513"></a>
+<span class="sourceLineNo">1514</span>          region.getTableDescriptor().getTableName().getNameAsString(),<a name="line.1514"></a>
+<span class="sourceLineNo">1515</span>          cr.isMajor(), now - compactionStartTime, cr.getFiles().size(),<a name="line.1515"></a>
+<span class="sourceLineNo">1516</span>          newFiles.size(), cr.getSize(), outputBytes);<a name="line.1516"></a>
+<span class="sourceLineNo">1517</span><a name="line.1517"></a>
+<span class="sourceLineNo">1518</span>    }<a name="line.1518"></a>
+<span class="sourceLineNo">1519</span><a name="line.1519"></a>
+<span class="sourceLineNo">1520</span>    logCompactionEndMessage(cr, sfs, now, compactionStartTime);<a name="line.1520"></a>
+<span class="sourceLineNo">1521</span>    return sfs;<a name="line.1521"></a>
+<span class="sourceLineNo">1522</span>  }<a name="line.1522"></a>
+<span class="sourceLineNo">1523</span><a name="line.1523"></a>
+<span class="sourceLineNo">1524</span>  private List&lt;HStoreFile&gt; moveCompactedFilesIntoPlace(CompactionRequestImpl cr, List&lt;Path&gt; newFiles,<a name="line.1524"></a>
+<span class="sourceLineNo">1525</span>      User user) throws IOException {<a name="line.1525"></a>
+<span class="sourceLineNo">1526</span>    List&lt;HStoreFile&gt; sfs = new ArrayList&lt;&gt;(newFiles.size());<a name="line.1526"></a>
+<span class="sourceLineNo">1527</span>    for (Path newFile : newFiles) {<a name="line.1527"></a>
+<span class="sourceLineNo">1528</span>      assert newFile != null;<a name="line.1528"></a>
+<span class="sourceLineNo">1529</span>      HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1529"></a>
+<span class="sourceLineNo">1530</span>      if (this.getCoprocessorHost() != null) {<a name="line.1530"></a>
+<span class="sourceLineNo">1531</span>        getCoprocessorHost().postCompact(this, sf, cr.getTracker(), cr, user);<a name="line.1531"></a>
+<span class="sourceLineNo">1532</span>      }<a name="line.1532"></a>
+<span class="sourceLineNo">1533</span>      assert sf != null;<a name="line.1533"></a>
+<span class="sourceLineNo">1534</span>      sfs.add(sf);<a name="line.1534"></a>
+<span class="sourceLineNo">1535</span>    }<a name="line.1535"></a>
+<span class="sourceLineNo">1536</span>    return sfs;<a name="line.1536"></a>
+<span class="sourceLineNo">1537</span>  }<a name="line.1537"></a>
+<span class="sourceLineNo">1538</span><a name="line.1538"></a>
+<span class="sourceLineNo">1539</span>  // Package-visible for tests<a name="line.1539"></a>
+<span class="sourceLineNo">1540</span>  HStoreFile moveFileIntoPlace(Path newFile) throws IOException {<a name="line.1540"></a>
+<span class="sourceLineNo">1541</span>    validateStoreFile(newFile);<a name="line.1541"></a>
+<span class="sourceLineNo">1542</span>    // Move the file into the right spot<a name="line.1542"></a>
+<span class="sourceLineNo">1543</span>    Path destPath = fs.commitStoreFile(getColumnFamilyName(), newFile);<a name="line.1543"></a>
+<span class="sourceLineNo">1544</span>    return createStoreFileAndReader(destPath);<a name="line.1544"></a>
+<span class="sourceLineNo">1545</span>  }<a name="line.1545"></a>
+<span class="sourceLineNo">1546</span><a name="line.1546"></a>
+<span class="sourceLineNo">1547</span>  /**<a name="line.1547"></a>
+<span class="sourceLineNo">1548</span>   * Writes the compaction WAL record.<a name="line.1548"></a>
+<span class="sourceLineNo">1549</span>   * @param filesCompacted Files compacted (input).<a name="line.1549"></a>
+<span class="sourceLineNo">1550</span>   * @param newFiles Files from compaction.<a name="line.1550"></a>
+<span class="sourceLineNo">1551</span>   */<a name="line.1551"></a>
+<span class="sourceLineNo">1552</span>  private void writeCompactionWalRecord(Collection&lt;HStoreFile&gt; filesCompacted,<a name="line.1552"></a>
+<span class="sourceLineNo">1553</span>      Collection&lt;HStoreFile&gt; newFiles) throws IOException {<a name="line.1553"></a>
+<span class="sourceLineNo">1554</span>    if (region.getWAL() == null) {<a name="line.1554"></a>
+<span class="sourceLineNo">1555</span>      return;<a name="line.1555"></a>
+<span class="sourceLineNo">1556</span>    }<a name="line.1556"></a>
+<span class="sourceLineNo">1557</span>    List&lt;Path&gt; inputPaths =<a name="line.1557"></a>
+<span class="sourceLineNo">1558</span>        filesCompacted.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1558"></a>
+<span class="sourceLineNo">1559</span>    List&lt;Path&gt; outputPaths =<a name="line.1559"></a>
+<span class="sourceLineNo">1560</span>        newFiles.stream().map(HStoreFile::getPath).collect(Collectors.toList());<a name="line.1560"></a>
+<span class="sourceLineNo">1561</span>    RegionInfo info = this.region.getRegionInfo();<a name="line.1561"></a>
+<span class="sourceLineNo">1562</span>    CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(info,<a name="line.1562"></a>
+<span class="sourceLineNo">1563</span>        family.getName(), inputPaths, outputPaths, fs.getStoreDir(getColumnFamilyDescriptor().getNameAsString()));<a name="line.1563"></a>
+<span class="sourceLineNo">1564</span>    // Fix reaching into Region to get the maxWaitForSeqId.<a name="line.1564"></a>
+<span class="sourceLineNo">1565</span>    // Does this method belong in Region altogether given it is making so many references up there?<a name="line.1565"></a>
+<span class="sourceLineNo">1566</span>    // Could be Region#writeCompactionMarker(compactionDescriptor);<a name="line.1566"></a>
+<span class="sourceLineNo">1567</span>    WALUtil.writeCompactionMarker(this.region.getWAL(), this.region.getReplicationScope(),<a name="line.1567"></a>
+<span class="sourceLineNo">1568</span>        this.region.getRegionInfo(), compactionDescriptor, this.region.getMVCC());<a name="line.1568"></a>
+<span class="sourceLineNo">1569</span>  }<a name="line.1569"></a>
 <span class="sourceLineNo">1570</span><a name="line.1570"></a>
-<span class="sourceLineNo">1571</span>      // These may be null when the RS is shutting down. The space quota Chores will fix the Region<a name="line.1571"></a>
-<span class="sourceLineNo">1572</span>      // sizes later so it's not super-critical if we miss these.<a name="line.1572"></a>
-<span class="sourceLineNo">1573</span>      RegionServerServices rsServices = region.getRegionServerServices();<a name="line.1573"></a>
-<span class="sourceLineNo">1574</span>      if (rsServices != null &amp;&amp; rsServices.getRegionServerSpaceQuotaManager() != null) {<a name="line.1574"></a>
-<span class="sourceLineNo">1575</span>        updateSpaceQuotaAfterFileReplacement(<a name="line.1575"></a>
-<span class="sourceLineNo">1576</span>            rsServices.getRegionServerSpaceQuotaManager().getRegionSizeStore(), getRegionInfo(),<a name="line.1576"></a>
-<span class="sourceLineNo">1577</span>            compactedFiles, result);<a name="line.1577"></a>
-<span class="sourceLineNo">1578</span>      }<a name="line.1578"></a>
-<span class="sourceLineNo">1579</span>    } finally {<a name="line.1579"></a>
-<span class="sourceLineNo">1580</span>      this.lock.writeLock().unlock();<a name="line.1580"></a>
-<span class="sourceLineNo">1581</span>    }<a name="line.1581"></a>
-<span class="sourceLineNo">1582</span>  }<a name="line.1582"></a>
-<span class="sourceLineNo">1583</span><a name="line.1583"></a>
-<span class="sourceLineNo">1584</span>  /**<a name="line.1584"></a>
-<span class="sourceLineNo">1585</span>   * Updates the space quota usage for this region, removing the size for files compacted away<a name="line.1585"></a>
-<span class="sourceLineNo">1586</span>   * and adding in the size for new files.<a name="line.1586"></a>
-<span class="sourceLineNo">1587</span>   *<a name="line.1587"></a>
-<span class="sourceLineNo">1588</span>   * @param sizeStore The object tracking changes in region size for space quotas.<a name="line.1588"></a>
-<span class="sourceLineNo">1589</span>   * @param regionInfo The identifier for the region whose size is being updated.<a name="line.1589"></a>
-<span class="sourceLineNo">1590</span>   * @param oldFiles Files removed from this store's region.<a name="line.1590"></a>
-<span class="sourceLineNo">1591</span>   * @param newFiles Files added to this store's region.<a name="line.1591"></a>
-<span class="sourceLineNo">1592</span>   */<a name="line.1592"></a>
-<span class="sourceLineNo">1593</span>  void updateSpaceQuotaAfterFileReplacement(<a name="line.1593"></a>
-<span class="sourceLineNo">1594</span>      RegionSizeStore sizeStore, RegionInfo regionInfo, Collection&lt;HStoreFile&gt; oldFiles,<a name="line.1594"></a>
-<span class="sourceLineNo">1595</span>      Collection&lt;HStoreFile&gt; newFiles) {<a name="line.1595"></a>
-<span class="sourceLineNo">1596</span>    long delta = 0;<a name="line.1596"></a>
-<span class="sourceLineNo">1597</span>    if (oldFiles != null) {<a name="line.1597"></a>
-<span class="sourceLineNo">1598</span>      for (HStoreFile compactedFile : oldFiles) {<a name="line.1598"></a>
-<span class="sourceLineNo">1599</span>        if (compactedFile.isHFile()) {<a name="line.1599"></a>
-<span class="sourceLineNo">1600</span>          delta -= compactedFile.getReader().length();<a name="line.1600"></a>
-<span class="sourceLineNo">1601</span>        }<a name="line.1601"></a>
-<span class="sourceLineNo">1602</span>      }<a name="line.1602"></a>
-<span class="sourceLineNo">1603</span>    }<a name="line.1603"></a>
-<span class="sourceLineNo">1604</span>    if (newFiles != null) {<a name="line.1604"></a>
-<span class="sourceLineNo">1605</span>      for (HStoreFile newFile : newFiles) {<a name="line.1605"></a>
-<span class="sourceLineNo">1606</span>        if (newFile.isHFile()) {<a name="line.1606"></a>
-<span class="sourceLineNo">1607</span>          delta += newFile.getReader().length();<a name="line.1607"></a>
-<span class="sourceLineNo">1608</span>        }<a name="line.1608"></a>
-<span class="sourceLineNo">1609</span>      }<a name="line.1609"></a>
-<span class="sourceLineNo">1610</span>    }<a name="line.1610"></a>
-<span class="sourceLineNo">1611</span>    sizeStore.incrementRegionSize(regionInfo, delta);<a name="line.1611"></a>
-<span class="sourceLineNo">1612</span>  }<a name="line.1612"></a>
-<span class="sourceLineNo">1613</span><a name="line.1613"></a>
-<span class="sourceLineNo">1614</span>  /**<a name="line.1614"></a>
-<span class="sourceLineNo">1615</span>   * Log a very elaborate compaction completion message.<a name="line.1615"></a>
-<span class="sourceLineNo">1616</span>   * @param cr Request.<a name="line.1616"></a>
-<span class="sourceLineNo">1617</span>   * @param sfs Resulting files.<a name="line.1617"></a>
-<span class="sourceLineNo">1618</span>   * @param compactionStartTime Start time.<a name="line.1618"></a>
-<span class="sourceLineNo">1619</span>   */<a name="line.1619"></a>
-<span class="sourceLineNo">1620</span>  private void logCompactionEndMessage(<a name="line.1620"></a>
-<span class="sourceLineNo">1621</span>      CompactionRequestImpl cr, List&lt;HStoreFile&gt; sfs, long now, long compactionStartTime) {<a name="line.1621"></a>
-<span class="sourceLineNo">1622</span>    StringBuilder message = new StringBuilder(<a name="line.1622"></a>
-<span class="sourceLineNo">1623</span>      "Completed" + (cr.isMajor() ? " major" : "") + " compaction of "<a name="line.1623"></a>
-<span class="sourceLineNo">1624</span>      + cr.getFiles().size() + (cr.isAllFiles() ? " (all)" : "") + " file(s) in "<a name="line.1624"></a>
-<span class="sourceLineNo">1625</span>      + this + " of " + this.getRegionInfo().getShortNameToLog() + " into ");<a name="line.1625"></a>
-<span class="sourceLineNo">1626</span>    if (sfs.isEmpty()) {<a name="line.1626"></a>
-<span class="sourceLineNo">1627</span>      message.append("none, ");<a name="line.1627"></a>
-<span class="sourceLineNo">1628</span>    } else {<a name="line.1628"></a>
-<span class="sourceLineNo">1629</span>      for (HStoreFile sf: sfs) {<a name="line.1629"></a>
-<span class="sourceLineNo">1630</span>        message.append(sf.getPath().getName());<a name="line.1630"></a>
-<span class="sourceLineNo">1631</span>        message.append("(size=");<a name="line.1631"></a>
-<span class="sourceLineNo">1632</span>        message.append(TraditionalBinaryPrefix.long2String(sf.getReader().length(), "", 1));<a name="line.1632"></a>
-<span class="sourceLineNo">1633</span>        message.append("), ");<a name="line.1633"></a>
-<span class="sourceLineNo">1634</span>      }<a name="line.1634"></a>
-<span class="sourceLineNo">1635</span>    }<a name="line.1635"></a>
-<span class="sourceLineNo">1636</span>    message.append("total size for store is ")<a name="line.1636"></a>
-<span class="sourceLineNo">1637</span>      .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1))<a name="line.1637"></a>
-<span class="sourceLineNo">1638</span>      .append(". This selection was in queue for ")<a name="line.1638"></a>
-<span class="sourceLineNo">1639</span>      .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))<a name="line.1639"></a>
-<span class="sourceLineNo">1640</span>      .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))<a name="line.1640"></a>
-<span class="sourceLineNo">1641</span>      .append(" to execute.");<a name="line.1641"></a>
-<span class="sourceLineNo">1642</span>    LOG.info(message.toString());<a name="line.1642"></a>
-<span class="sourceLineNo">1643</span>    if (LOG.isTraceEnabled()) {<a name="line.1643"></a>
-<span class="sourceLineNo">1644</span>      int fileCount = storeEngine.getStoreFileManager().getStorefileCount();<a name="line.1644"></a>
-<span class="sourceLineNo">1645</span>      long resultSize = getTotalSize(sfs);<a name="line.1645"></a>
-<span class="sourceLineNo">1646</span>      String traceMessage = "COMPACTION start,end,size out,files in,files out,store size,"<a name="line.1646"></a>
-<span class="sourceLineNo">1647</span>        + "store files [" + compactionStartTime + "," + now + "," + resultSize + ","<a name="line.1647"></a>
-<span class="sourceLineNo">1648</span>          + cr.getFiles().size() + "," + sfs.size() + "," +  storeSize + "," + fileCount + "]";<a name="line.1648"></a>
-<span class="sourceLineNo">1649</span>      LOG.trace(traceMessage);<a name="line.1649"></a>
-<span class="sourceLineNo">1650</span>    }<a name="line.1650"></a>
-<span class="sourceLineNo">1651</span>  }<a name="line.1651"></a>
-<span class="sourceLineNo">1652</span><a name="line.1652"></a>
-<span class="sourceLineNo">1653</span>  /**<a name="line.1653"></a>
-<span class="sourceLineNo">1654</span>   * Call to complete a compaction. Its for the case where we find in the WAL a compaction<a name="line.1654"></a>
-<span class="sourceLineNo">1655</span>   * that was not finished.  We could find one recovering a WAL after a regionserver crash.<a name="line.1655"></a>
-<span class="sourceLineNo">1656</span>   * See HBASE-2231.<a name="line.1656"></a>
-<span class="sourceLineNo">1657</span>   * @param compaction<a name="line.1657"></a>
-<span class="sourceLineNo">1658</span>   */<a name="line.1658"></a>
-<span class="sourceLineNo">1659</span>  public void replayCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,<a name="line.1659"></a>
-<span class="sourceLineNo">1660</span>      boolean removeFiles) throws IOException {<a name="line.1660"></a>
-<span class="sourceLineNo">1661</span>    LOG.debug("Completing compaction from the WAL marker");<a name="line.1661"></a>
-<span class="sourceLineNo">1662</span>    List&lt;String&gt; compactionInputs = compaction.getCompactionInputList();<a name="line.1662"></a>
-<span class="sourceLineNo">1663</span>    List&lt;String&gt; compactionOutputs = Lists.newArrayList(compaction.getCompactionOutputList());<a name="line.1663"></a>
-<span class="sourceLineNo">1664</span><a name="line.1664"></a>
-<span class="sourceLineNo">1665</span>    // The Compaction Marker is written after the compaction is completed,<a name="line.1665"></a>
-<span class="sourceLineNo">1666</span>    // and the files moved into the region/family folder.<a name="line.1666"></a>
-<span class="sourceLineNo">1667</span>    //<a name="line.1667"></a>
-<span class="sourceLineNo">1668</span>    // If we crash after the entry is written, we may not have removed the<a name="line.1668"></a>
-<span class="sourceLineNo">1669</span>    // input files, but the output file is present.<a name="line.1669"></a>
-<span class="sourceLineNo">1670</span>    // (The unremoved input files will be removed by this function)<a name="line.1670"></a>
-<span class="sourceLineNo">1671</span>    //<a name="line.1671"></a>
-<span class="sourceLineNo">1672</span>    // If we scan the directory and the file is not present, it can mean that:<a name="line.1672"></a>
-<span class="sourceLineNo">1673</span>    //   - The file was manually removed by the user<a name="line.1673"></a>
-<span class="sourceLineNo">1674</span>    //   - The file was removed as consequence of subsequent compaction<a name="line.1674"></a>
-<span class="sourceLineNo">1675</span>    // so, we can't do anything with the "compaction output list" because those<a name="line.1675"></a>
-<span class="sourceLineNo">1676</span>    // files have already been loaded when opening the region (by virtue of<a name="line.1676"></a>
-<span class="sourceLineNo">1677</span>    // being in the store's folder) or they may be missing due to a compaction.<a name="line.1677"></a>
-<span class="sourceLineNo">1678</span><a name="line.1678"></a>
-<span class="sourceLineNo">1679</span>    String familyName = this.getColumnFamilyName();<a name="line.1679"></a>
-<span class="sourceLineNo">1680</span>    Set&lt;String&gt; inputFiles = new HashSet&lt;&gt;();<a name="line.1680"></a>
-<span class="sourceLineNo">1681</span>    for (String compactionInput : compactionInputs) {<a name="line.1681"></a>
-<span class="sourceLineNo">1682</span>      Path inputPath = fs.getStoreFilePath(familyName, compactionInput);<a name="line.1682"></a>
-<span class="sourceLineNo">1683</span>      inputFiles.add(inputPath.getName());<a name="line.1683"></a>
-<span class="sourceLineNo">1684</span>    }<a name="line.1684"></a>
-<span class="sourceLineNo">1685</span><a name="line.1685"></a>
-<span class="sourceLineNo">1686</span>    //some of the input files might already be deleted<a name="line.1686"></a>
-<span class="sourceLineNo">1687</span>    List&lt;HStoreFile&gt; inputStoreFiles = new ArrayList&lt;&gt;(compactionInputs.size());<a name="line.1687"></a>
-<span class="sourceLineNo">1688</span>    for (HStoreFile sf : this.getStorefiles()) {<a name="line.1688"></a>
-<span class="sourceLineNo">1689</span>      if (inputFiles.contains(sf.getPath().getName())) {<a name="line.1689"></a>
-<span class="sourceLineNo">1690</span>        inputStoreFiles.add(sf);<a name="line.1690"></a>
-<span class="sourceLineNo">1691</span>      }<a name="line.1691"></a>
-<span class="sourceLineNo">1692</span>    }<a name="line.1692"></a>
-<span class="sourceLineNo">1693</span><a name="line.1693"></a>
-<span class="sourceLineNo">1694</span>    // check whether we need to pick up the new files<a name="line.1694"></a>
-<span class="sourceLineNo">1695</span>    List&lt;HStoreFile&gt; outputStoreFiles = new ArrayList&lt;&gt;(compactionOutputs.size());<a name="line.1695"></a>
-<span class="sourceLineNo">1696</span><a name="line.1696"></a>
-<span class="sourceLineNo">1697</span>    if (pickCompactionFiles) {<a name="line.1697"></a>
-<span class="sourceLineNo">1698</span>      for (HStoreFile sf : this.getStorefiles()) {<a name="line.1698"></a>
-<span class="sourceLineNo">1699</span>        compactionOutputs.remove(sf.getPath().getName());<a name="line.1699"></a>
-<span class="sourceLineNo">1700</span>      }<a name="line.1700"></a>
-<span class="sourceLineNo">1701</span>      for (String compactionOutput : compactionOutputs) {<a name="line.1701"></a>
-<span class="sourceLineNo">1702</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), compactionOutput);<a name="line.1702"></a>
-<span class="sourceLineNo">1703</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.1703"></a>
-<span class="sourceLineNo">1704</span>        outputStoreFiles.add(storeFile);<a name="line.1704"></a>
-<span class="sourceLineNo">1705</span>      }<a name="line.1705"></a>
-<span class="sourceLineNo">1706</span>    }<a name="line.1706"></a>
-<span class="sourceLineNo">1707</span><a name="line.1707"></a>
-<span class="sourceLineNo">1708</span>    if (!inputStoreFiles.isEmpty() || !outputStoreFiles.isEmpty()) {<a name="line.1708"></a>
-<span class="sourceLineNo">1709</span>      LOG.info("Replaying compaction marker, replacing input files: " +<a name="line.1709"></a>
-<span class="sourceLineNo">1710</span>          inputStoreFiles + " with output files : " + outputStoreFiles);<a name="line.1710"></a>
-<span class="sourceLineNo">1711</span>      this.replaceStoreFiles(inputStoreFiles, outputStoreFiles);<a name="line.1711"></a>
-<span class="sourceLineNo">1712</span>      this.completeCompaction(inputStoreFiles);<a name="line.1712"></a>
-<span class="sourceLineNo">1713</span>    }<a name="line.1713"></a>
-<span class="sourceLineNo">1714</span>  }<a name="line.1714"></a>
-<span class="sourceLineNo">1715</span><a name="line.1715"></a>
-<span class="sourceLineNo">1716</span>  /**<a name="line.1716"></a>
-<span class="sourceLineNo">1717</span>   * This method tries to compact N recent files for testing.<a name="line.1717"></a>
-<span class="sourceLineNo">1718</span>   * Note that because compacting "recent" files only makes sense for some policies,<a name="line.1718"></a>
-<span class="sourceLineNo">1719</span>   * e.g. the default one, it assumes default policy is used. It doesn't use policy,<a name="line.1719"></a>
-<span class="sourceLineNo">1720</span>   * but instead makes a compaction candidate list by itself.<a name="line.1720"></a>
-<span class="sourceLineNo">1721</span>   * @param N Number of files.<a name="line.1721"></a>
-<span class="sourceLineNo">1722</span>   */<a name="line.1722"></a>
-<span class="sourceLineNo">1723</span>  @VisibleForTesting<a name="line.1723"></a>
-<span class="sourceLineNo">1724</span>  public void compactRecentForTestingAssumingDefaultPolicy(int N) throws IOException {<a name="line.1724"></a>
-<span class="sourceLineNo">1725</span>    List&lt;HStoreFile&gt; filesToCompact;<a name="line.1725"></a>
-<span class="sourceLineNo">1726</span>    boolean isMajor;<a name="line.1726"></a>
-<span class="sourceLineNo">1727</span><a name="line.1727"></a>
-<span class="sourceLineNo">1728</span>    this.lock.readLock().lock();<a name="line.1728"></a>
-<span class="sourceLineNo">1729</span>    try {<a name="line.1729"></a>
-<span class="sourceLineNo">1730</span>      synchronized (filesCompacting) {<a name="line.1730"></a>
-<span class="sourceLineNo">1731</span>        filesToCompact = Lists.newArrayList(storeEngine.getStoreFileManager().getStorefiles());<a name="line.1731"></a>
-<span class="sourceLineNo">1732</span>        if (!filesCompacting.isEmpty()) {<a name="line.1732"></a>
-<span class="sourceLineNo">1733</span>          // exclude all files older than the newest file we're currently<a name="line.1733"></a>
-<span class="sourceLineNo">1734</span>          // compacting. this allows us to preserve contiguity (HBASE-2856)<a name="line.1734"></a>
-<span class="sourceLineNo">1735</span>          HStoreFile last = filesCompacting.get(filesCompacting.size() - 1);<a name="line.1735"></a>
-<span class="sourceLineNo">1736</span>          int idx = filesToCompact.indexOf(last);<a name="line.1736"></a>
-<span class="sourceLineNo">1737</span>          Preconditions.checkArgument(idx != -1);<a name="line.1737"></a>
-<span class="sourceLineNo">1738</span>          filesToCompact.subList(0, idx + 1).clear();<a name="line.1738"></a>
-<span class="sourceLineNo">1739</span>        }<a name="line.1739"></a>
-<span class="sourceLineNo">1740</span>        int count = filesToCompact.size();<a name="line.1740"></a>
-<span class="sourceLineNo">1741</span>        if (N &gt; count) {<a name="line.1741"></a>
-<span class="sourceLineNo">1742</span>          throw new RuntimeException("Not enough files");<a name="line.1742"></a>
-<span class="sourceLineNo">1743</span>        }<a name="line.1743"></a>
-<span class="sourceLineNo">1744</span><a name="line.1744"></a>
-<span class="sourceLineNo">1745</span>        filesToCompact = filesToCompact.subList(count - N, count);<a name="line.1745"></a>
-<span class="sourceLineNo">1746</span>        isMajor = (filesToCompact.size() == storeEngine.getStoreFileManager().getStorefileCount());<a name="line.1746"></a>
-<span class="sourceLineNo">1747</span>        filesCompacting.addAll(filesToCompact);<a name="line.1747"></a>
-<span class="sourceLineNo">1748</span>        Collections.sort(filesCompacting, storeEngine.getStoreFileManager()<a name="line.1748"></a>
-<span class="sourceLineNo">1749</span>            .getStoreFileComparator());<a name="line.1749"></a>
-<span class="sourceLineNo">1750</span>      }<a name="line.1750"></a>
-<span class="sourceLineNo">1751</span>    } finally {<a name="line.1751"></a>
-<span class="sourceLineNo">1752</span>      this.lock.readLock().unlock();<a name="line.1752"></a>
-<span class="sourceLineNo">1753</span>    }<a name="line.1753"></a>
+<span class="sourceLineNo">1571</span>  @VisibleForTesting<a name="line.1571"></a>
+<span class="sourceLineNo">1572</span>  void replaceStoreFiles(Collection&lt;HStoreFile&gt; compactedFiles, Collection&lt;HStoreFile&gt; result)<a name="line.1572"></a>
+<span class="sourceLineNo">1573</span>      throws IOException {<a name="line.1573"></a>
+<span class="sourceLineNo">1574</span>    this.lock.writeLock().lock();<a name="line.1574"></a>
+<span class="sourceLineNo">1575</span>    try {<a name="line.1575"></a>
+<span class="sourceLineNo">1576</span>      this.storeEngine.getStoreFileManager().addCompactionResults(compactedFiles, result);<a name="line.1576"></a>
+<span class="sourceLineNo">1577</span>      synchronized (filesCompacting) {<a name="line.1577"></a>
+<span class="sourceLineNo">1578</span>        filesCompacting.removeAll(compactedFiles);<a name="line.1578"></a>
+<span class="sourceLineNo">1579</span>      }<a name="line.1579"></a>
+<span class="sourceLineNo">1580</span><a name="line.1580"></a>
+<span class="sourceLineNo">1581</span>      // These may be null when the RS is shutting down. The space quota Chores will fix the Region<a name="line.1581"></a>
+<span class="sourceLineNo">1582</span>      // sizes later so it's not super-critical if we miss these.<a name="line.1582"></a>
+<span class="sourceLineNo">1583</span>      RegionServerServices rsServices = region.getRegionServerServices();<a name="line.1583"></a>
+<span class="sourceLineNo">1584</span>      if (rsServices != null &amp;&amp; rsServices.getRegionServerSpaceQuotaManager() != null) {<a name="line.1584"></a>
+<span class="sourceLineNo">1585</span>        updateSpaceQuotaAfterFileReplacement(<a name="line.1585"></a>
+<span class="sourceLineNo">1586</span>            rsServices.getRegionServerSpaceQuotaManager().getRegionSizeStore(), getRegionInfo(),<a name="line.1586"></a>
+<span class="sourceLineNo">1587</span>            compactedFiles, result);<a name="line.1587"></a>
+<span class="sourceLineNo">1588</span>      }<a name="line.1588"></a>
+<span class="sourceLineNo">1589</span>    } finally {<a name="line.1589"></a>
+<span class="sourceLineNo">1590</span>      this.lock.writeLock().unlock();<a name="line.1590"></a>
+<span class="sourceLineNo">1591</span>    }<a name="line.1591"></a>
+<span class="sourceLineNo">1592</span>  }<a name="line.1592"></a>
+<span class="sourceLineNo">1593</span><a name="line.1593"></a>
+<span class="sourceLineNo">1594</span>  /**<a name="line.1594"></a>
+<span class="sourceLineNo">1595</span>   * Updates the space quota usage for this region, removing the size for files compacted away<a name="line.1595"></a>
+<span class="sourceLineNo">1596</span>   * and adding in the size for new files.<a name="line.1596"></a>
+<span class="sourceLineNo">1597</span>   *<a name="line.1597"></a>
+<span class="sourceLineNo">1598</span>   * @param sizeStore The object tracking changes in region size for space quotas.<a name="line.1598"></a>
+<span class="sourceLineNo">1599</span>   * @param regionInfo The identifier for the region whose size is being updated.<a name="line.1599"></a>
+<span class="sourceLineNo">1600</span>   * @param oldFiles Files removed from this store's region.<a name="line.1600"></a>
+<span class="sourceLineNo">1601</span>   * @param newFiles Files added to this store's region.<a name="line.1601"></a>
+<span class="sourceLineNo">1602</span>   */<a name="line.1602"></a>
+<span class="sourceLineNo">1603</span>  void updateSpaceQuotaAfterFileReplacement(<a name="line.1603"></a>
+<span class="sourceLineNo">1604</span>      RegionSizeStore sizeStore, RegionInfo regionInfo, Collection&lt;HStoreFile&gt; oldFiles,<a name="line.1604"></a>
+<span class="sourceLineNo">1605</span>      Collection&lt;HStoreFile&gt; newFiles) {<a name="line.1605"></a>
+<span class="sourceLineNo">1606</span>    long delta = 0;<a name="line.1606"></a>
+<span class="sourceLineNo">1607</span>    if (oldFiles != null) {<a name="line.1607"></a>
+<span class="sourceLineNo">1608</span>      for (HStoreFile compactedFile : oldFiles) {<a name="line.1608"></a>
+<span class="sourceLineNo">1609</span>        if (compactedFile.isHFile()) {<a name="line.1609"></a>
+<span class="sourceLineNo">1610</span>          delta -= compactedFile.getReader().length();<a name="line.1610"></a>
+<span class="sourceLineNo">1611</span>        }<a name="line.1611"></a>
+<span class="sourceLineNo">1612</span>      }<a name="line.1612"></a>
+<span class="sourceLineNo">1613</span>    }<a name="line.1613"></a>
+<span class="sourceLineNo">1614</span>    if (newFiles != null) {<a name="line.1614"></a>
+<span class="sourceLineNo">1615</span>      for (HStoreFile newFile : newFiles) {<a name="line.1615"></a>
+<span class="sourceLineNo">1616</span>        if (newFile.isHFile()) {<a name="line.1616"></a>
+<span class="sourceLineNo">1617</span>          delta += newFile.getReader().length();<a name="line.1617"></a>
+<span class="sourceLineNo">1618</span>        }<a name="line.1618"></a>
+<span class="sourceLineNo">1619</span>      }<a name="line.1619"></a>
+<span class="sourceLineNo">1620</span>    }<a name="line.1620"></a>
+<span class="sourceLineNo">1621</span>    sizeStore.incrementRegionSize(regionInfo, delta);<a name="line.1621"></a>
+<span class="sourceLineNo">1622</span>  }<a name="line.1622"></a>
+<span class="sourceLineNo">1623</span><a name="line.1623"></a>
+<span class="sourceLineNo">1624</span>  /**<a name="line.1624"></a>
+<span class="sourceLineNo">1625</span>   * Log a very elaborate compaction completion message.<a name="line.1625"></a>
+<span class="sourceLineNo">1626</span>   * @param cr Request.<a name="line.1626"></a>
+<span class="sourceLineNo">1627</span>   * @param sfs Resulting files.<a name="line.1627"></a>
+<span class="sourceLineNo">1628</span>   * @param compactionStartTime Start time.<a name="line.1628"></a>
+<span class="sourceLineNo">1629</span>   */<a name="line.1629"></a>
+<span class="sourceLineNo">1630</span>  private void logCompactionEndMessage(<a name="line.1630"></a>
+<span class="sourceLineNo">1631</span>      CompactionRequestImpl cr, List&lt;HStoreFile&gt; sfs, long now, long compactionStartTime) {<a name="line.1631"></a>
+<span class="sourceLineNo">1632</span>    StringBuilder message = new StringBuilder(<a name="line.1632"></a>
+<span class="sourceLineNo">1633</span>      "Completed" + (cr.isMajor() ? " major" : "") + " compaction of "<a name="line.1633"></a>
+<span class="sourceLineNo">1634</span>      + cr.getFiles().size() + (cr.isAllFiles() ? " (all)" : "") + " file(s) in "<a name="line.1634"></a>
+<span class="sourceLineNo">1635</span>      + this + " of " + this.getRegionInfo().getShortNameToLog() + " into ");<a name="line.1635"></a>
+<span class="sourceLineNo">1636</span>    if (sfs.isEmpty()) {<a name="line.1636"></a>
+<span class="sourceLineNo">1637</span>      message.append("none, ");<a name="line.1637"></a>
+<span class="sourceLineNo">1638</span>    } else {<a name="line.1638"></a>
+<span class="sourceLineNo">1639</span>      for (HStoreFile sf: sfs) {<a name="line.1639"></a>
+<span class="sourceLineNo">1640</span>        message.append(sf.getPath().getName());<a name="line.1640"></a>
+<span class="sourceLineNo">1641</span>        message.append("(size=");<a name="line.1641"></a>
+<span class="sourceLineNo">1642</span>        message.append(TraditionalBinaryPrefix.long2String(sf.getReader().length(), "", 1));<a name="line.1642"></a>
+<span class="sourceLineNo">1643</span>        message.append("), ");<a name="line.1643"></a>
+<span class="sourceLineNo">1644</span>      }<a name="line.1644"></a>
+<span class="sourceLineNo">1645</span>    }<a name="line.1645"></a>
+<span class="sourceLineNo">1646</span>    message.append("total size for store is ")<a name="line.1646"></a>
+<span class="sourceLineNo">1647</span>      .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1))<a name="line.1647"></a>
+<span class="sourceLineNo">1648</span>      .append(". This selection was in queue for ")<a name="line.1648"></a>
+<span class="sourceLineNo">1649</span>      .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))<a name="line.1649"></a>
+<span class="sourceLineNo">1650</span>      .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))<a name="line.1650"></a>
+<span class="sourceLineNo">1651</span>      .append(" to execute.");<a name="line.1651"></a>
+<span class="sourceLineNo">1652</span>    LOG.info(message.toString());<a name="line.1652"></a>
+<span class="sourceLineNo">1653</span>    if (LOG.isTraceEnabled()) {<a name="line.1653"></a>
+<span class="sourceLineNo">1654</span>      int fileCount = storeEngine.getStoreFileManager().getStorefileCount();<a name="line.1654"></a>
+<span class="sourceLineNo">1655</span>      long resultSize = getTotalSize(sfs);<a name="line.1655"></a>
+<span class="sourceLineNo">1656</span>      String traceMessage = "COMPACTION start,end,size out,files in,files out,store size,"<a name="line.1656"></a>
+<span class="sourceLineNo">1657</span>        + "store files [" + compactionStartTime + "," + now + "," + resultSize + ","<a name="line.1657"></a>
+<span class="sourceLineNo">1658</span>          + cr.getFiles().size() + "," + sfs.size() + "," +  storeSize + "," + fileCount + "]";<a name="line.1658"></a>
+<span class="sourceLineNo">1659</span>      LOG.trace(traceMessage);<a name="line.1659"></a>
+<span class="sourceLineNo">1660</span>    }<a name="line.1660"></a>
+<span class="sourceLineNo">1661</span>  }<a name="line.1661"></a>
+<span class="sourceLineNo">1662</span><a name="line.1662"></a>
+<span class="sourceLineNo">1663</span>  /**<a name="line.1663"></a>
+<span class="sourceLineNo">1664</span>   * Call to complete a compaction. Its for the case where we find in the WAL a compaction<a name="line.1664"></a>
+<span class="sourceLineNo">1665</span>   * that was not finished.  We could find one recovering a WAL after a regionserver crash.<a name="line.1665"></a>
+<span class="sourceLineNo">1666</span>   * See HBASE-2231.<a name="line.1666"></a>
+<span class="sourceLineNo">1667</span>   * @param compaction<a name="line.1667"></a>
+<span class="sourceLineNo">1668</span>   */<a name="line.1668"></a>
+<span class="sourceLineNo">1669</span>  public void replayCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,<a name="line.1669"></a>
+<span class="sourceLineNo">1670</span>      boolean removeFiles) throws IOException {<a name="line.1670"></a>
+<span class="sourceLineNo">1671</span>    LOG.debug("Completing compaction from the WAL marker");<a name="line.1671"></a>
+<span class="sourceLineNo">1672</span>    List&lt;String&gt; compactionInputs = compaction.getCompactionInputList();<a name="line.1672"></a>
+<span class="sourceLineNo">1673</span>    List&lt;String&gt; compactionOutputs = Lists.newArrayList(compaction.getCompactionOutputList());<a name="line.1673"></a>
+<span class="sourceLineNo">1674</span><a name="line.1674"></a>
+<span class="sourceLineNo">1675</span>    // The Compaction Marker is written after the compaction is completed,<a name="line.1675"></a>
+<span class="sourceLineNo">1676</span>    // and the files moved into the region/family folder.<a name="line.1676"></a>
+<span class="sourceLineNo">1677</span>    //<a name="line.1677"></a>
+<span class="sourceLineNo">1678</span>    // If we crash after the entry is written, we may not have removed the<a name="line.1678"></a>
+<span class="sourceLineNo">1679</span>    // input files, but the output file is present.<a name="line.1679"></a>
+<span class="sourceLineNo">1680</span>    // (The unremoved input files will be removed by this function)<a name="line.1680"></a>
+<span class="sourceLineNo">1681</span>    //<a name="line.1681"></a>
+<span class="sourceLineNo">1682</span>    // If we scan the directory and the file is not present, it can mean that:<a name="line.1682"></a>
+<span class="sourceLineNo">1683</span>    //   - The file was manually removed by the user<a name="line.1683"></a>
+<span class="sourceLineNo">1684</span>    //   - The file was removed as consequence of subsequent compaction<a name="line.1684"></a>
+<span class="sourceLineNo">1685</span>    // so, we can't do anything with the "compaction output list" because those<a name="line.1685"></a>
+<span class="sourceLineNo">1686</span>    // files have already been loaded when opening the region (by virtue of<a name="line.1686"></a>
+<span class="sourceLineNo">1687</span>    // being in the store's folder) or they may be missing due to a compaction.<a name="line.1687"></a>
+<span class="sourceLineNo">1688</span><a name="line.1688"></a>
+<span class="sourceLineNo">1689</span>    String familyName = this.getColumnFamilyName();<a name="line.1689"></a>
+<span class="sourceLineNo">1690</span>    Set&lt;String&gt; inputFiles = new HashSet&lt;&gt;();<a name="line.1690"></a>
+<span class="sourceLineNo">1691</span>    for (String compactionInput : compactionInputs) {<a name="line.1691"></a>
+<span class="sourceLineNo">1692</span>      Path inputPath = fs.getStoreFilePath(familyName, compactionInput);<a name="line.1692"></a>
+<span class="sourceLineNo">1693</span>      inputFiles.add(inputPath.getName());<a name="line.1693"></a>
+<span class="sourceLineNo">1694</span>    }<a name="line.1694"></a>
+<span class="sourceLineNo">1695</span><a name="line.1695"></a>
+<span class="sourceLineNo">1696</span>    //some of the input files might already be deleted<a name="line.1696"></a>
+<span class="sourceLineNo">1697</span>    List&lt;HStoreFile&gt; inputStoreFiles = new ArrayList&lt;&gt;(compactionInputs.size());<a name="line.1697"></a>
+<span class="sourceLineNo">1698</span>    for (HStoreFile sf : this.getStorefiles()) {<a name="line.1698"></a>
+<span class="sourceLineNo">1699</span>      if (inputFiles.contains(sf.getPath().getName())) {<a name="line.1699"></a>
+<span class="sourceLineNo">1700</span>        inputStoreFiles.add(sf);<a name="line.1700"></a>
+<span class="sourceLineNo">1701</span>      }<a name="line.1701"></a>
+<span class="sourceLineNo">1702</span>    }<a name="line.1702"></a>
+<span class="sourceLineNo">1703</span><a name="line.1703"></a>
+<span class="sourceLineNo">1704</span>    // check whether we need to pick up the new files<a name="line.1704"></a>
+<span class="sourceLineNo">1705</span>    List&lt;HStoreFile&gt; outputStoreFiles = new ArrayList&lt;&gt;(compactionOutputs.size());<a name="line.1705"></a>
+<span class="sourceLineNo">1706</span><a name="line.1706"></a>
+<span class="sourceLineNo">1707</span>    if (pickCompactionFiles) {<a name="line.1707"></a>
+<span class="sourceLineNo">1708</span>      for (HStoreFile sf : this.getStorefiles()) {<a name="line.1708"></a>
+<span class="sourceLineNo">1709</span>        compactionOutputs.remove(sf.getPath().getName());<a name="line.1709"></a>
+<span class="sourceLineNo">1710</span>      }<a name="line.1710"></a>
+<span class="sourceLineNo">1711</span>      for (String compactionOutput : compactionOutputs) {<a name="line.1711"></a>
+<span class="sourceLineNo">1712</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), compactionOutput);<a name="line.1712"></a>
+<span class="sourceLineNo">1713</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.1713"></a>
+<span class="sourceLineNo">1714</span>        outputStoreFiles.add(storeFile);<a name="line.1714"></a>
+<span class="sourceLineNo">1715</span>      }<a name="line.1715"></a>
+<span class="sourceLineNo">1716</span>    }<a name="line.1716"></a>
+<span class="sourceLineNo">1717</span><a name="line.1717"></a>
+<span class="sourceLineNo">1718</span>    if (!inputStoreFiles.isEmpty() || !outputStoreFiles.isEmpty()) {<a name="line.1718"></a>
+<span class="sourceLineNo">1719</span>      LOG.info("Replaying compaction marker, replacing input files: " +<a name="line.1719"></a>
+<span class="sourceLineNo">1720</span>          inputStoreFiles + " with output files : " + outputStoreFiles);<a name="line.1720"></a>
+<span class="sourceLineNo">1721</span>      this.replaceStoreFiles(inputStoreFiles, outputStoreFiles);<a name="line.1721"></a>
+<span class="sourceLineNo">1722</span>      this.completeCompaction(inputStoreFiles);<a name="line.1722"></a>
+<span class="sourceLineNo">1723</span>    }<a name="line.1723"></a>
+<span class="sourceLineNo">1724</span>  }<a name="line.1724"></a>
+<span class="sourceLineNo">1725</span><a name="line.1725"></a>
+<span class="sourceLineNo">1726</span>  /**<a name="line.1726"></a>
+<span class="sourceLineNo">1727</span>   * This method tries to compact N recent files for testing.<a name="line.1727"></a>
+<span class="sourceLineNo">1728</span>   * Note that because compacting "recent" files only makes sense for some policies,<a name="line.1728"></a>
+<span class="sourceLineNo">1729</span>   * e.g. the default one, it assumes default policy is used. It doesn't use policy,<a name="line.1729"></a>
+<span class="sourceLineNo">1730</span>   * but instead makes a compaction candidate list by itself.<a name="line.1730"></a>
+<span class="sourceLineNo">1731</span>   * @param N Number of files.<a name="line.1731"></a>
+<span class="sourceLineNo">1732</span>   */<a name="line.1732"></a>
+<span class="sourceLineNo">1733</span>  @VisibleForTesting<a name="line.1733"></a>
+<span class="sourceLineNo">1734</span>  public void compactRecentForTestingAssumingDefaultPolicy(int N) throws IOException {<a name="line.1734"></a>
+<span class="sourceLineNo">1735</span>    List&lt;HStoreFile&gt; filesToCompact;<a name="line.1735"></a>
+<span class="sourceLineNo">1736</span>    boolean isMajor;<a name="line.1736"></a>
+<span class="sourceLineNo">1737</span><a name="line.1737"></a>
+<span class="sourceLineNo">1738</span>    this.lock.readLock().lock();<a name="line.1738"></a>
+<span class="sourceLineNo">1739</span>    try {<a name="line.1739"></a>
+<span class="sourceLineNo">1740</span>      synchronized (filesCompacting) {<a name="line.1740"></a>
+<span class="sourceLineNo">1741</span>        filesToCompact = Lists.newArrayList(storeEngine.getStoreFileManager().getStorefiles());<a name="line.1741"></a>
+<span class="sourceLineNo">1742</span>        if (!filesCompacting.isEmpty()) {<a name="line.1742"></a>
+<span class="sourceLineNo">1743</span>          // exclude all files older than the newest file we're currently<a name="line.1743"></a>
+<span class="sourceLineNo">1744</span>          // compacting. this allows us to preserve contiguity (HBASE-2856)<a name="line.1744"></a>
+<span class="sourceLineNo">1745</span>          HStoreFile last = filesCompacting.get(filesCompacting.size() - 1);<a name="line.1745"></a>
+<span class="sourceLineNo">1746</span>          int idx = filesToCompact.indexOf(last);<a name="line.1746"></a>
+<span class="sourceLineNo">1747</span>          Preconditions.checkArgument(idx != -1);<a name="line.1747"></a>
+<span class="sourceLineNo">1748</span>          filesToCompact.subList(0, idx + 1).clear();<a name="line.1748"></a>
+<span class="sourceLineNo">1749</span>        }<a name="line.1749"></a>
+<span class="sourceLineNo">1750</span>        int count = filesToCompact.size();<a name="line.1750"></a>
+<span class="sourceLineNo">1751</span>        if (N &gt; count) {<a name="line.1751"></a>
+<span class="sourceLineNo">1752</span>          throw new RuntimeException("Not enough files");<a name="line.1752"></a>
+<span class="sourceLineNo">1753</span>        }<a name="line.1753"></a>
 <span class="sourceLineNo">1754</span><a name="line.1754"></a>
-<span class="sourceLineNo">1755</span>    try {<a name="line.1755"></a>
-<span class="sourceLineNo">1756</span>      // Ready to go. Have list of files to compact.<a name="line.1756"></a>
-<span class="sourceLineNo">1757</span>      List&lt;Path&gt; newFiles = ((DefaultCompactor)this.storeEngine.getCompactor())<a name="line.1757"></a>
-<span class="sourceLineNo">1758</span>          .compactForTesting(filesToCompact, isMajor);<a name="line.1758"></a>
-<span class="sourceLineNo">1759</span>      for (Path newFile: newFiles) {<a name="line.1759"></a>
-<span class="sourceLineNo">1760</span>        // Move the compaction into place.<a name="line.1760"></a>
-<span class="sourceLineNo">1761</span>        HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1761"></a>
-<span class="sourceLineNo">1762</span>        if (this.getCoprocessorHost() != null) {<a name="line.1762"></a>
-<span class="sourceLineNo">1763</span>          this.getCoprocessorHost().postCompact(this, sf, null, null, null);<a name="line.1763"></a>
-<span class="sourceLineNo">1764</span>        }<a name="line.1764"></a>
-<span class="sourceLineNo">1765</span>        replaceStoreFiles(filesToCompact, Collections.singletonList(sf));<a name="line.1765"></a>
-<span class="sourceLineNo">1766</span>        completeCompaction(filesToCompact);<a name="line.1766"></a>
-<span class="sourceLineNo">1767</span>      }<a name="line.1767"></a>
-<span class="sourceLineNo">1768</span>    } finally {<a name="line.1768"></a>
-<span class="sourceLineNo">1769</span>      synchronized (filesCompacting) {<a name="line.1769"></a>
-<span class="sourceLineNo">1770</span>        filesCompacting.removeAll(filesToCompact);<a name="line.1770"></a>
-<span class="sourceLineNo">1771</span>      }<a name="line.1771"></a>
-<span class="sourceLineNo">1772</span>    }<a name="line.1772"></a>
-<span class="sourceLineNo">1773</span>  }<a name="line.1773"></a>
-<span class="sourceLineNo">1774</span><a name="line.1774"></a>
-<span class="sourceLineNo">1775</span>  @Override<a name="line.1775"></a>
-<span class="sourceLineNo">1776</span>  public boolean hasReferences() {<a name="line.1776"></a>
-<span class="sourceLineNo">1777</span>    // Grab the read lock here, because we need to ensure that: only when the atomic<a name="line.1777"></a>
-<span class="sourceLineNo">1778</span>    // replaceStoreFiles(..) finished, we can get all the complete store file list.<a name="line.1778"></a>
-<span class="sourceLineNo">1779</span>    this.lock.readLock().lock();<a name="line.1779"></a>
-<span class="sourceLineNo">1780</span>    try {<a name="line.1780"></a>
-<span class="sourceLineNo">1781</span>      // Merge the current store files with compacted files here due to HBASE-20940.<a name="line.1781"></a>
-<span class="sourceLineNo">1782</span>      Collection&lt;HStoreFile&gt; allStoreFiles = new ArrayList&lt;&gt;(getStorefiles());<a name="line.1782"></a>
-<span class="sourceLineNo">1783</span>      allStoreFiles.addAll(getCompactedFiles());<a name="line.1783"></a>
-<span class="sourceLineNo">1784</span>      return StoreUtils.hasReferences(allStoreFiles);<a name="line.1784"></a>
-<span class="sourceLineNo">1785</span>    } finally {<a name="line.1785"></a>
-<span class="sourceLineNo">1786</span>      this.lock.readLock().unlock();<a name="line.1786"></a>
-<span class="sourceLineNo">1787</span>    }<a name="line.1787"></a>
-<span class="sourceLineNo">1788</span>  }<a name="line.1788"></a>
-<span class="sourceLineNo">1789</span><a name="line.1789"></a>
-<span class="sourceLineNo">1790</span>  /**<a name="line.1790"></a>
-<span class="sourceLineNo">1791</span>   * getter for CompactionProgress object<a name="line.1791"></a>
-<span class="sourceLineNo">1792</span>   * @return CompactionProgress object; can be null<a name="line.1792"></a>
-<span class="sourceLineNo">1793</span>   */<a name="line.1793"></a>
-<span class="sourceLineNo">1794</span>  public CompactionProgress getCompactionProgress() {<a name="line.1794"></a>
-<span class="sourceLineNo">1795</span>    return this.storeEngine.getCompactor().getProgress();<a name="line.1795"></a>
-<span class="sourceLineNo">1796</span>  }<a name="line.1796"></a>
-<span class="sourceLineNo">1797</span><a name="line.1797"></a>
-<span class="sourceLineNo">1798</span>  @Override<a name="line.1798"></a>
-<span class="sourceLineNo">1799</span>  public boolean shouldPerformMajorCompaction() throws IOException {<a name="line.1799"></a>
-<span class="sourceLineNo">1800</span>    for (HStoreFile sf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1800"></a>
-<span class="sourceLineNo">1801</span>      // TODO: what are these reader checks all over the place?<a name="line.1801"></a>
-<span class="sourceLineNo">1802</span>      if (sf.getReader() == null) {<a name="line.1802"></a>
-<span class="sourceLineNo">1803</span>        LOG.debug("StoreFile {} has null Reader", sf);<a name="line.1803"></a>
-<span class="sourceLineNo">1804</span>        return false;<a name="line.1804"></a>
-<span class="sourceLineNo">1805</span>      }<a name="line.1805"></a>
-<span class="sourceLineNo">1806</span>    }<a name="line.1806"></a>
-<span class="sourceLineNo">1807</span>    return storeEngine.getCompactionPolicy().shouldPerformMajorCompaction(<a name="line.1807"></a>
-<span class="sourceLineNo">1808</span>        this.storeEngine.getStoreFileManager().getStorefiles());<a name="line.1808"></a>
-<span class="sourceLineNo">1809</span>  }<a name="line.1809"></a>
-<span class="sourceLineNo">1810</span><a name="line.1810"></a>
-<span class="sourceLineNo">1811</span>  public Optional&lt;CompactionContext&gt; requestCompaction() throws IOException {<a name="line.1811"></a>
-<span class="sourceLineNo">1812</span>    return requestCompaction(NO_PRIORITY, CompactionLifeCycleTracker.DUMMY, null);<a name="line.1812"></a>
-<span class="sourceLineNo">1813</span>  }<a name="line.1813"></a>
-<span class="sourceLineNo">1814</span><a name="line.1814"></a>
-<span class="sourceLineNo">1815</span>  public Optional&lt;CompactionContext&gt; requestCompaction(int priority,<a name="line.1815"></a>
-<span class="sourceLineNo">1816</span>      CompactionLifeCycleTracker tracker, User user) throws IOException {<a name="line.1816"></a>
-<span class="sourceLineNo">1817</span>    // don't even select for compaction if writes are disabled<a name="line.1817"></a>
-<span class="sourceLineNo">1818</span>    if (!this.areWritesEnabled()) {<a name="line.1818"></a>
-<span class="sourceLineNo">1819</span>      return Optional.empty();<a name="line.1819"></a>
-<span class="sourceLineNo">1820</span>    }<a name="line.1820"></a>
-<span class="sourceLineNo">1821</span>    // Before we do compaction, try to get rid of unneeded files to simplify things.<a name="line.1821"></a>
-<span class="sourceLineNo">1822</span>    removeUnneededFiles();<a name="line.1822"></a>
-<span class="sourceLineNo">1823</span><a name="line.1823"></a>
-<span class="sourceLineNo">1824</span>    final CompactionContext compaction = storeEngine.createCompaction();<a name="line.1824"></a>
-<span class="sourceLineNo">1825</span>    CompactionRequestImpl request = null;<a name="line.1825"></a>
-<span class="sourceLineNo">1826</span>    this.lock.readLock().lock();<a name="line.1826"></a>
-<span class="sourceLineNo">1827</span>    try {<a name="line.1827"></a>
-<span class="sourceLineNo">1828</span>      synchronized (filesCompacting) {<a name="line.1828"></a>
-<span class="sourceLineNo">1829</span>        // First, see if coprocessor would want to override selection.<a name="line.1829"></a>
-<span class="sourceLineNo">1830</span>        if (this.getCoprocessorHost() != null) {<a name="line.1830"></a>
-<span class="sourceLineNo">1831</span>          final List&lt;HStoreFile&gt; candidatesForCoproc = compaction.preSelect(this.filesCompacting);<a name="line.1831"></a>
-<span class="sourceLineNo">1832</span>          boolean override = getCoprocessorHost().preCompactSelection(this,<a name="line.1832"></a>
-<span class="sourceLineNo">1833</span>              candidatesForCoproc, tracker, user);<a name="line.1833"></a>
-<span class="sourceLineNo">1834</span>          if (override) {<a name="line.1834"></a>
-<span class="sourceLineNo">1835</span>            // Coprocessor is overriding normal file selection.<a name="line.1835"></a>
-<span class="sourceLineNo">1836</span>            compaction.forceSelect(new CompactionRequestImpl(candidatesForCoproc));<a name="line.1836"></a>
-<span class="sourceLineNo">1837</span>          }<a name="line.1837"></a>
-<span class="sourceLineNo">1838</span>        }<a name="line.1838"></a>
-<span class="sourceLineNo">1839</span><a name="line.1839"></a>
-<span class="sourceLineNo">1840</span>        // Normal case - coprocessor is not overriding file selection.<a name="line.1840"></a>
-<span class="sourceLineNo">1841</span>        if (!compaction.hasSelection()) {<a name="line.1841"></a>
-<span class="sourceLineNo">1842</span>          boolean isUserCompaction = priority == Store.PRIORITY_USER;<a name="line.1842"></a>
-<span class="sourceLineNo">1843</span>          boolean mayUseOffPeak = offPeakHours.isOffPeakHour() &amp;&amp;<a name="line.1843"></a>
-<span class="sourceLineNo">1844</span>              offPeakCompactionTracker.compareAndSet(false, true);<a name="line.1844"></a>
-<span class="sourceLineNo">1845</span>          try {<a name="line.1845"></a>
-<span class="sourceLineNo">1846</span>            compaction.select(this.filesCompacting, isUserCompaction,<a name="line.1846"></a>
-<span class="sourceLineNo">1847</span>              mayUseOffPeak, forceMajor &amp;&amp; filesCompacting.isEmpty());<a name="line.1847"></a>
-<span class="sourceLineNo">1848</span>          } catch (IOException e) {<a name="line.1848"></a>
-<span class="sourceLineNo">1849</span>            if (mayUseOffPeak) {<a name="line.1849"></a>
-<span class="sourceLineNo">1850</span>              offPeakCompactionTracker.set(false);<a name="line.1850"></a>
-<span class="sourceLineNo">1851</span>            }<a name="line.1851"></a>
-<span class="sourceLineNo">1852</span>            throw e;<a name="line.1852"></a>
-<span class="sourceLineNo">1853</span>          }<a name="line.1853"></a>
-<span class="sourceLineNo">1854</span>          assert compaction.hasSelection();<a name="line.1854"></a>
-<span class="sourceLineNo">1855</span>          if (mayUseOffPeak &amp;&amp; !compaction.getRequest().isOffPeak()) {<a name="line.1855"></a>
-<span class="sourceLineNo">1856</span>            // Compaction policy doesn't want to take advantage of off-peak.<a name="line.1856"></a>
-<span class="sourceLineNo">1857</span>            offPeakCompactionTracker.set(false);<a name="line.1857"></a>
-<span class="sourceLineNo">1858</span>          }<a name="line.1858"></a>
-<span class="sourceLineNo">1859</span>        }<a name="line.1859"></a>
-<span class="sourceLineNo">1860</span>        if (this.getCoprocessorHost() != null) {<a name="line.1860"></a>
-<span class="sourceLineNo">1861</span>          this.getCoprocessorHost().postCompactSelection(<a name="line.1861"></a>
-<span class="sourceLineNo">1862</span>              this, ImmutableList.copyOf(compaction.getRequest().getFiles()), tracker,<a name="line.1862"></a>
-<span class="sourceLineNo">1863</span>              compaction.getRequest(), user);<a name="line.1863"></a>
-<span class="sourceLineNo">1864</span>        }<a name="line.1864"></a>
-<span class="sourceLineNo">1865</span>        // Finally, we have the resulting files list. Check if we have any files at all.<a name="line.1865"></a>
-<span class="sourceLineNo">1866</span>        request = compaction.getRequest();<a name="line.1866"></a>
-<span class="sourceLineNo">1867</span>        Collection&lt;HStoreFile&gt; selectedFiles = request.getFiles();<a name="line.1867"></a>
-<span class="sourceLineNo">1868</span>        if (selectedFiles.isEmpty()) {<a name="line.1868"></a>
-<span class="sourceLineNo">1869</span>          return Optional.empty();<a name="line.1869"></a>
-<span class="sourceLineNo">1870</span>        }<a name="line.1870"></a>
-<span class="sourceLineNo">1871</span><a name="line.1871"></a>
-<span class="sourceLineNo">1872</span>        addToCompactingFiles(selectedFiles);<a name="line.1872"></a>
-<span class="sourceLineNo">1873</span><a name="line.1873"></a>
-<span class="sourceLineNo">1874</span>        // If we're enqueuing a major, clear the force flag.<a name="line.1874"></a>
-<span class="sourceLineNo">1875</span>        this.forceMajor = this.forceMajor &amp;&amp; !request.isMajor();<a name="line.1875"></a>
-<span class="sourceLineNo">1876</span><a name="line.1876"></a>
-<span class="sourceLineNo">1877</span>        // Set common request properties.<a name="line.1877"></a>
-<span class="sourceLineNo">1878</span>        // Set priority, either override value supplied by caller or from store.<a name="line.1878"></a>
-<span class="sourceLineNo">1879</span>        request.setPriority((priority != Store.NO_PRIORITY) ? priority : getCompactPriority());<a name="line.1879"></a>
-<span class="sourceLineNo">1880</span>        request.setDescription(getRegionInfo().getRegionNameAsString(), getColumnFamilyName());<a name="line.1880"></a>
-<span class="sourceLineNo">1881</span>        request.setTracker(tracker);<a name="line.1881"></a>
-<span class="sourceLineNo">1882</span>      }<a name="line.1882"></a>
-<span class="sourceLineNo">1883</span>    } finally {<a name="line.1883"></a>
-<span class="sourceLineNo">1884</span>      this.lock.readLock().unlock();<a name="line.1884"></a>
-<span class="sourceLineNo">1885</span>    }<a name="line.1885"></a>
+<span class="sourceLineNo">1755</span>        filesToCompact = filesToCompact.subList(count - N, count);<a name="line.1755"></a>
+<span class="sourceLineNo">1756</span>        isMajor = (filesToCompact.size() == storeEngine.getStoreFileManager().getStorefileCount());<a name="line.1756"></a>
+<span class="sourceLineNo">1757</span>        filesCompacting.addAll(filesToCompact);<a name="line.1757"></a>
+<span class="sourceLineNo">1758</span>        Collections.sort(filesCompacting, storeEngine.getStoreFileManager()<a name="line.1758"></a>
+<span class="sourceLineNo">1759</span>            .getStoreFileComparator());<a name="line.1759"></a>
+<span class="sourceLineNo">1760</span>      }<a name="line.1760"></a>
+<span class="sourceLineNo">1761</span>    } finally {<a name="line.1761"></a>
+<span class="sourceLineNo">1762</span>      this.lock.readLock().unlock();<a name="line.1762"></a>
+<span class="sourceLineNo">1763</span>    }<a name="line.1763"></a>
+<span class="sourceLineNo">1764</span><a name="line.1764"></a>
+<span class="sourceLineNo">1765</span>    try {<a name="line.1765"></a>
+<span class="sourceLineNo">1766</span>      // Ready to go. Have list of files to compact.<a name="line.1766"></a>
+<span class="sourceLineNo">1767</span>      List&lt;Path&gt; newFiles = ((DefaultCompactor)this.storeEngine.getCompactor())<a name="line.1767"></a>
+<span class="sourceLineNo">1768</span>          .compactForTesting(filesToCompact, isMajor);<a name="line.1768"></a>
+<span class="sourceLineNo">1769</span>      for (Path newFile: newFiles) {<a name="line.1769"></a>
+<span class="sourceLineNo">1770</span>        // Move the compaction into place.<a name="line.1770"></a>
+<span class="sourceLineNo">1771</span>        HStoreFile sf = moveFileIntoPlace(newFile);<a name="line.1771"></a>
+<span class="sourceLineNo">1772</span>        if (this.getCoprocessorHost() != null) {<a name="line.1772"></a>
+<span class="sourceLineNo">1773</span>          this.getCoprocessorHost().postCompact(this, sf, null, null, null);<a name="line.1773"></a>
+<span class="sourceLineNo">1774</span>        }<a name="line.1774"></a>
+<span class="sourceLineNo">1775</span>        replaceStoreFiles(filesToCompact, Collections.singletonList(sf));<a name="line.1775"></a>
+<span class="sourceLineNo">1776</span>        completeCompaction(filesToCompact);<a name="line.1776"></a>
+<span class="sourceLineNo">1777</span>      }<a name="line.1777"></a>
+<span class="sourceLineNo">1778</span>    } finally {<a name="line.1778"></a>
+<span class="sourceLineNo">1779</span>      synchronized (filesCompacting) {<a name="line.1779"></a>
+<span class="sourceLineNo">1780</span>        filesCompacting.removeAll(filesToCompact);<a name="line.1780"></a>
+<span class="sourceLineNo">1781</span>      }<a name="line.1781"></a>
+<span class="sourceLineNo">1782</span>    }<a name="line.1782"></a>
+<span class="sourceLineNo">1783</span>  }<a name="line.1783"></a>
+<span class="sourceLineNo">1784</span><a name="line.1784"></a>
+<span class="sourceLineNo">1785</span>  @Override<a name="line.1785"></a>
+<span class="sourceLineNo">1786</span>  public boolean hasReferences() {<a name="line.1786"></a>
+<span class="sourceLineNo">1787</span>    // Grab the read lock here, because we need to ensure that: only when the atomic<a name="line.1787"></a>
+<span class="sourceLineNo">1788</span>    // replaceStoreFiles(..) finished, we can get all the complete store file list.<a name="line.1788"></a>
+<span class="sourceLineNo">1789</span>    this.lock.readLock().lock();<a name="line.1789"></a>
+<span class="sourceLineNo">1790</span>    try {<a name="line.1790"></a>
+<span class="sourceLineNo">1791</span>      // Merge the current store files with compacted files here due to HBASE-20940.<a name="line.1791"></a>
+<span class="sourceLineNo">1792</span>      Collection&lt;HStoreFile&gt; allStoreFiles = new ArrayList&lt;&gt;(getStorefiles());<a name="line.1792"></a>
+<span class="sourceLineNo">1793</span>      allStoreFiles.addAll(getCompactedFiles());<a name="line.1793"></a>
+<span class="sourceLineNo">1794</span>      return StoreUtils.hasReferences(allStoreFiles);<a name="line.1794"></a>
+<span class="sourceLineNo">1795</span>    } finally {<a name="line.1795"></a>
+<span class="sourceLineNo">1796</span>      this.lock.readLock().unlock();<a name="line.1796"></a>
+<span class="sourceLineNo">1797</span>    }<a name="line.1797"></a>
+<span class="sourceLineNo">1798</span>  }<a name="line.1798"></a>
+<span class="sourceLineNo">1799</span><a name="line.1799"></a>
+<span class="sourceLineNo">1800</span>  /**<a name="line.1800"></a>
+<span class="sourceLineNo">1801</span>   * getter for CompactionProgress object<a name="line.1801"></a>
+<span class="sourceLineNo">1802</span>   * @return CompactionProgress object; can be null<a name="line.1802"></a>
+<span class="sourceLineNo">1803</span>   */<a name="line.1803"></a>
+<span class="sourceLineNo">1804</span>  public CompactionProgress getCompactionProgress() {<a name="line.1804"></a>
+<span class="sourceLineNo">1805</span>    return this.storeEngine.getCompactor().getProgress();<a name="line.1805"></a>
+<span class="sourceLineNo">1806</span>  }<a name="line.1806"></a>
+<span class="sourceLineNo">1807</span><a name="line.1807"></a>
+<span class="sourceLineNo">1808</span>  @Override<a name="line.1808"></a>
+<span class="sourceLineNo">1809</span>  public boolean shouldPerformMajorCompaction() throws IOException {<a name="line.1809"></a>
+<span class="sourceLineNo">1810</span>    for (HStoreFile sf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1810"></a>
+<span class="sourceLineNo">1811</span>      // TODO: what are these reader checks all over the place?<a name="line.1811"></a>
+<span class="sourceLineNo">1812</span>      if (sf.getReader() == null) {<a name="line.1812"></a>
+<span class="sourceLineNo">1813</span>        LOG.debug("StoreFile {} has null Reader", sf);<a name="line.1813"></a>
+<span class="sourceLineNo">1814</span>        return false;<a name="line.1814"></a>
+<span class="sourceLineNo">1815</span>      }<a name="line.1815"></a>
+<span class="sourceLineNo">1816</span>    }<a name="line.1816"></a>
+<span class="sourceLineNo">1817</span>    return storeEngine.getCompactionPolicy().shouldPerformMajorCompaction(<a name="line.1817"></a>
+<span class="sourceLineNo">1818</span>        this.storeEngine.getStoreFileManager().getStorefiles());<a name="line.1818"></a>
+<span class="sourceLineNo">1819</span>  }<a name="line.1819"></a>
+<span class="sourceLineNo">1820</span><a name="line.1820"></a>
+<span class="sourceLineNo">1821</span>  public Optional&lt;CompactionContext&gt; requestCompaction() throws IOException {<a name="line.1821"></a>
+<span class="sourceLineNo">1822</span>    return requestCompaction(NO_PRIORITY, CompactionLifeCycleTracker.DUMMY, null);<a name="line.1822"></a>
+<span class="sourceLineNo">1823</span>  }<a name="line.1823"></a>
+<span class="sourceLineNo">1824</span><a name="line.1824"></a>
+<span class="sourceLineNo">1825</span>  public Optional&lt;CompactionContext&gt; requestCompaction(int priority,<a name="line.1825"></a>
+<span class="sourceLineNo">1826</span>      CompactionLifeCycleTracker tracker, User user) throws IOException {<a name="line.1826"></a>
+<span class="sourceLineNo">1827</span>    // don't even select for compaction if writes are disabled<a name="line.1827"></a>
+<span class="sourceLineNo">1828</span>    if (!this.areWritesEnabled()) {<a name="line.1828"></a>
+<span class="sourceLineNo">1829</span>      return Optional.empty();<a name="line.1829"></a>
+<span class="sourceLineNo">1830</span>    }<a name="line.1830"></a>
+<span class="sourceLineNo">1831</span>    // Before we do compaction, try to get rid of unneeded files to simplify things.<a name="line.1831"></a>
+<span class="sourceLineNo">1832</span>    removeUnneededFiles();<a name="line.1832"></a>
+<span class="sourceLineNo">1833</span><a name="line.1833"></a>
+<span class="sourceLineNo">1834</span>    final CompactionContext compaction = storeEngine.createCompaction();<a name="line.1834"></a>
+<span class="sourceLineNo">1835</span>    CompactionRequestImpl request = null;<a name="line.1835"></a>
+<span class="sourceLineNo">1836</span>    this.lock.readLock().lock();<a name="line.1836"></a>
+<span class="sourceLineNo">1837</span>    try {<a name="line.1837"></a>
+<span class="sourceLineNo">1838</span>      synchronized (filesCompacting) {<a name="line.1838"></a>
+<span class="sourceLineNo">1839</span>        // First, see if coprocessor would want to override selection.<a name="line.1839"></a>
+<span class="sourceLineNo">1840</span>        if (this.getCoprocessorHost() != null) {<a name="line.1840"></a>
+<span class="sourceLineNo">1841</span>          final List&lt;HStoreFile&gt; candidatesForCoproc = compaction.preSelect(this.filesCompacting);<a name="line.1841"></a>
+<span class="sourceLineNo">1842</span>          boolean override = getCoprocessorHost().preCompactSelection(this,<a name="line.1842"></a>
+<span class="sourceLineNo">1843</span>              candidatesForCoproc, tracker, user);<a name="line.1843"></a>
+<span class="sourceLineNo">1844</span>          if (override) {<a name="line.1844"></a>
+<span class="sourceLineNo">1845</span>            // Coprocessor is overriding normal file selection.<a name="line.1845"></a>
+<span class="sourceLineNo">1846</span>            compaction.forceSelect(new CompactionRequestImpl(candidatesForCoproc));<a name="line.1846"></a>
+<span class="sourceLineNo">1847</span>          }<a name="line.1847"></a>
+<span class="sourceLineNo">1848</span>        }<a name="line.1848"></a>
+<span class="sourceLineNo">1849</span><a name="line.1849"></a>
+<span class="sourceLineNo">1850</span>        // Normal case - coprocessor is not overriding file selection.<a name="line.1850"></a>
+<span class="sourceLineNo">1851</span>        if (!compaction.hasSelection()) {<a name="line.1851"></a>
+<span class="sourceLineNo">1852</span>          boolean isUserCompaction = priority == Store.PRIORITY_USER;<a name="line.1852"></a>
+<span class="sourceLineNo">1853</span>          boolean mayUseOffPeak = offPeakHours.isOffPeakHour() &amp;&amp;<a name="line.1853"></a>
+<span class="sourceLineNo">1854</span>              offPeakCompactionTracker.compareAndSet(false, true);<a name="line.1854"></a>
+<span class="sourceLineNo">1855</span>          try {<a name="line.1855"></a>
+<span class="sourceLineNo">1856</span>            compaction.select(this.filesCompacting, isUserCompaction,<a name="line.1856"></a>
+<span class="sourceLineNo">1857</span>              mayUseOffPeak, forceMajor &amp;&amp; filesCompacting.isEmpty());<a name="line.1857"></a>
+<span class="sourceLineNo">1858</span>          } catch (IOException e) {<a name="line.1858"></a>
+<span class="sourceLineNo">1859</span>            if (mayUseOffPeak) {<a name="line.1859"></a>
+<span class="sourceLineNo">1860</span>              offPeakCompactionTracker.set(false);<a name="line.1860"></a>
+<span class="sourceLineNo">1861</span>            }<a name="line.1861"></a>
+<span class="sourceLineNo">1862</span>            throw e;<a name="line.1862"></a>
+<span class="sourceLineNo">1863</span>          }<a name="line.1863"></a>
+<span class="sourceLineNo">1864</span>          assert compaction.hasSelection();<a name="line.1864"></a>
+<span class="sourceLineNo">1865</span>          if (mayUseOffPeak &amp;&amp; !compaction.getRequest().isOffPeak()) {<a name="line.1865"></a>
+<span class="sourceLineNo">1866</span>            // Compaction policy doesn't want to take advantage of off-peak.<a name="line.1866"></a>
+<span class="sourceLineNo">1867</span>            offPeakCompactionTracker.set(false);<a name="line.1867"></a>
+<span class="sourceLineNo">1868</span>          }<a name="line.1868"></a>
+<span class="sourceLineNo">1869</span>        }<a name="line.1869"></a>
+<span class="sourceLineNo">1870</span>        if (this.getCoprocessorHost() != null) {<a name="line.1870"></a>
+<span class="sourceLineNo">1871</span>          this.getCoprocessorHost().postCompactSelection(<a name="line.1871"></a>
+<span class="sourceLineNo">1872</span>              this, ImmutableList.copyOf(compaction.getRequest().getFiles()), tracker,<a name="line.1872"></a>
+<span class="sourceLineNo">1873</span>              compaction.getRequest(), user);<a name="line.1873"></a>
+<span class="sourceLineNo">1874</span>        }<a name="line.1874"></a>
+<span class="sourceLineNo">1875</span>        // Finally, we have the resulting files list. Check if we have any files at all.<a name="line.1875"></a>
+<span class="sourceLineNo">1876</span>        request = compaction.getRequest();<a name="line.1876"></a>
+<span class="sourceLineNo">1877</span>        Collection&lt;HStoreFile&gt; selectedFiles = request.getFiles();<a name="line.1877"></a>
+<span class="sourceLineNo">1878</span>        if (selectedFiles.isEmpty()) {<a name="line.1878"></a>
+<span class="sourceLineNo">1879</span>          return Optional.empty();<a name="line.1879"></a>
+<span class="sourceLineNo">1880</span>        }<a name="line.1880"></a>
+<span class="sourceLineNo">1881</span><a name="line.1881"></a>
+<span class="sourceLineNo">1882</span>        addToCompactingFiles(selectedFiles);<a name="line.1882"></a>
+<span class="sourceLineNo">1883</span><a name="line.1883"></a>
+<span class="sourceLineNo">1884</span>        // If we're enqueuing a major, clear the force flag.<a name="line.1884"></a>
+<span class="sourceLineNo">1885</span>        this.forceMajor = this.forceMajor &amp;&amp; !request.isMajor();<a name="line.1885"></a>
 <span class="sourceLineNo">1886</span><a name="line.1886"></a>
-<span class="sourceLineNo">1887</span>    if (LOG.isDebugEnabled()) {<a name="line.1887"></a>
-<span class="sourceLineNo">1888</span>      LOG.debug(getRegionInfo().getEncodedName() + " - " + getColumnFamilyName()<a name="line.1888"></a>
-<span class="sourceLineNo">1889</span>          + ": Initiating " + (request.isMajor() ? "major" : "minor") + " compaction"<a name="line.1889"></a>
-<span class="sourceLineNo">1890</span>          + (request.isAllFiles() ? " (all files)" : ""));<a name="line.1890"></a>
-<span class="sourceLineNo">1891</span>    }<a name="line.1891"></a>
-<span class="sourceLineNo">1892</span>    this.region.reportCompactionRequestStart(request.isMajor());<a name="line.1892"></a>
-<span class="sourceLineNo">1893</span>    return Optional.of(compaction);<a name="line.1893"></a>
-<span class="sourceLineNo">1894</span>  }<a name="line.1894"></a>
-<span class="sourceLineNo">1895</span><a name="line.1895"></a>
-<span class="sourceLineNo">1896</span>  /** Adds the files to compacting files. filesCompacting must be locked. */<a name="line.1896"></a>
-<span class="sourceLineNo">1897</span>  private void addToCompactingFiles(Collection&lt;HStoreFile&gt; filesToAdd) {<a name="line.1897"></a>
-<span class="sourceLineNo">1898</span>    if (CollectionUtils.isEmpty(filesToAdd)) {<a name="line.1898"></a>
-<span class="sourceLineNo">1899</span>      return;<a name="line.1899"></a>
-<span class="sourceLineNo">1900</span>    }<a name="line.1900"></a>
-<span class="sourceLineNo">1901</span>    // Check that we do not try to compact the same StoreFile twice.<a name="line.1901"></a>
-<span class="sourceLineNo">1902</span>    if (!Collections.disjoint(filesCompacting, filesToAdd)) {<a name="line.1902"></a>
-<span class="sourceLineNo">1903</span>      Preconditions.checkArgument(false, "%s overlaps with %s", filesToAdd, filesCompacting);<a name="line.1903"></a>
-<span class="sourceLineNo">1904</span>    }<a name="line.1904"></a>
-<span class="sourceLineNo">1905</span>    filesCompacting.addAll(filesToAdd);<a name="line.1905"></a>
-<span class="sourceLineNo">1906</span>    Collections.sort(filesCompacting, storeEngine.getStoreFileManager().getStoreFileComparator());<a name="line.1906"></a>
-<span class="sourceLineNo">1907</span>  }<a name="line.1907"></a>
-<span class="sourceLineNo">1908</span><a name="line.1908"></a>
-<span class="sourceLineNo">1909</span>  private void removeUnneededFiles() throws IOException {<a name="line.1909"></a>
-<span class="sourceLineNo">1910</span>    if (!conf.getBoolean("hbase.store.delete.expired.storefile", true)) return;<a name="line.1910"></a>
-<span class="sourceLineNo">1911</span>    if (getColumnFamilyDescriptor().getMinVersions() &gt; 0) {<a name="line.1911"></a>
-<span class="sourceLineNo">1912</span>      LOG.debug("Skipping expired store file removal due to min version being {}",<a name="line.1912"></a>
-<span class="sourceLineNo">1913</span>          getColumnFamilyDescriptor().getMinVersions());<a name="line.1913"></a>
-<span class="sourceLineNo">1914</span>      return;<a name="line.1914"></a>
-<span class="sourceLineNo">1915</span>    }<a name="line.1915"></a>
-<span class="sourceLineNo">1916</span>    this.lock.readLock().lock();<a name="line.1916"></a>
-<span class="sourceLineNo">1917</span>    Collection&lt;HStoreFile&gt; delSfs = null;<a name="line.1917"></a>
-<span class="sourceLineNo">1918</span>    try {<a name="line.1918"></a>
-<span class="sourceLineNo">1919</span>      synchronized (filesCompacting) {<a name="line.1919"></a>
-<span class="sourceLineNo">1920</span>        long cfTtl = getStoreFileTtl();<a name="line.1920"></a>
-<span class="sourceLineNo">1921</span>        if (cfTtl != Long.MAX_VALUE) {<a name="line.1921"></a>
-<span class="sourceLineNo">1922</span>          delSfs = storeEngine.getStoreFileManager().getUnneededFiles(<a name="line.1922"></a>
-<span class="sourceLineNo">1923</span>              EnvironmentEdgeManager.currentTime() - cfTtl, filesCompacting);<a name="line.1923"></a>
-<span class="sourceLineNo">1924</span>          addToCompactingFiles(delSfs);<a name="line.1924"></a>
-<span class="sourceLineNo">1925</span>        }<a name="line.1925"></a>
-<span class="sourceLineNo">1926</span>      }<a name="line.1926"></a>
-<span class="sourceLineNo">1927</span>    } finally {<a name="line.1927"></a>
-<span class="sourceLineNo">1928</span>      this.lock.readLock().unlock();<a name="line.1928"></a>
-<span class="sourceLineNo">1929</span>    }<a name="line.1929"></a>
-<span class="sourceLineNo">1930</span><a name="line.1930"></a>
-<span class="sourceLineNo">1931</span>    if (CollectionUtils.isEmpty(delSfs)) {<a name="line.1931"></a>
-<span class="sourceLineNo">1932</span>      return;<a name="line.1932"></a>
-<span class="sourceLineNo">1933</span>    }<a name="line.1933"></a>
-<span class="sourceLineNo">1934</span><a name="line.1934"></a>
-<span class="sourceLineNo">1935</span>    Collection&lt;HStoreFile&gt; newFiles = Collections.emptyList(); // No new files.<a name="line.1935"></a>
-<span class="sourceLineNo">1936</span>    writeCompactionWalRecord(delSfs, newFiles);<a name="line.1936"></a>
-<span class="sourceLineNo">1937</span>    replaceStoreFiles(delSfs, newFiles);<a name="line.1937"></a>
-<span class="sourceLineNo">1938</span>    completeCompaction(delSfs);<a name="line.1938"></a>
-<span class="sourceLineNo">1939</span>    LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in "<a name="line.1939"></a>
-<span class="sourceLineNo">1940</span>        + this + " of " + this.getRegionInfo().getRegionNameAsString()<a name="line.1940"></a>
-<span class="sourceLineNo">1941</span>        + "; total size for store is "<a name="line.1941"></a>
-<span class="sourceLineNo">1942</span>        + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1));<a name="line.1942"></a>
-<span class="sourceLineNo">1943</span>  }<a name="line.1943"></a>
+<span class="sourceLineNo">1887</span>        // Set common request properties.<a name="line.1887"></a>
+<span class="sourceLineNo">1888</span>        // Set priority, either override value supplied by caller or from store.<a name="line.1888"></a>
+<span class="sourceLineNo">1889</span>        request.setPriority((priority != Store.NO_PRIORITY) ? priority : getCompactPriority());<a name="line.1889"></a>
+<span class="sourceLineNo">1890</span>        request.setDescription(getRegionInfo().getRegionNameAsString(), getColumnFamilyName());<a name="line.1890"></a>
+<span class="sourceLineNo">1891</span>        request.setTracker(tracker);<a name="line.1891"></a>
+<span class="sourceLineNo">1892</span>      }<a name="line.1892"></a>
+<span class="sourceLineNo">1893</span>    } finally {<a name="line.1893"></a>
+<span class="sourceLineNo">1894</span>      this.lock.readLock().unlock();<a name="line.1894"></a>
+<span class="sourceLineNo">1895</span>    }<a name="line.1895"></a>
+<span class="sourceLineNo">1896</span><a name="line.1896"></a>
+<span class="sourceLineNo">1897</span>    if (LOG.isDebugEnabled()) {<a name="line.1897"></a>
+<span class="sourceLineNo">1898</span>      LOG.debug(getRegionInfo().getEncodedName() + " - " + getColumnFamilyName()<a name="line.1898"></a>
+<span class="sourceLineNo">1899</span>          + ": Initiating " + (request.isMajor() ? "major" : "minor") + " compaction"<a name="line.1899"></a>
+<span class="sourceLineNo">1900</span>          + (request.isAllFiles() ? " (all files)" : ""));<a name="line.1900"></a>
+<span class="sourceLineNo">1901</span>    }<a name="line.1901"></a>
+<span class="sourceLineNo">1902</span>    this.region.reportCompactionRequestStart(request.isMajor());<a name="line.1902"></a>
+<span class="sourceLineNo">1903</span>    return Optional.of(compaction);<a name="line.1903"></a>
+<span class="sourceLineNo">1904</span>  }<a name="line.1904"></a>
+<span class="sourceLineNo">1905</span><a name="line.1905"></a>
+<span class="sourceLineNo">1906</span>  /** Adds the files to compacting files. filesCompacting must be locked. */<a name="line.1906"></a>
+<span class="sourceLineNo">1907</span>  private void addToCompactingFiles(Collection&lt;HStoreFile&gt; filesToAdd) {<a name="line.1907"></a>
+<span class="sourceLineNo">1908</span>    if (CollectionUtils.isEmpty(filesToAdd)) {<a name="line.1908"></a>
+<span class="sourceLineNo">1909</span>      return;<a name="line.1909"></a>
+<span class="sourceLineNo">1910</span>    }<a name="line.1910"></a>
+<span class="sourceLineNo">1911</span>    // Check that we do not try to compact the same StoreFile twice.<a name="line.1911"></a>
+<span class="sourceLineNo">1912</span>    if (!Collections.disjoint(filesCompacting, filesToAdd)) {<a name="line.1912"></a>
+<span class="sourceLineNo">1913</span>      Preconditions.checkArgument(false, "%s overlaps with %s", filesToAdd, filesCompacting);<a name="line.1913"></a>
+<span class="sourceLineNo">1914</span>    }<a name="line.1914"></a>
+<span class="sourceLineNo">1915</span>    filesCompacting.addAll(filesToAdd);<a name="line.1915"></a>
+<span class="sourceLineNo">1916</span>    Collections.sort(filesCompacting, storeEngine.getStoreFileManager().getStoreFileComparator());<a name="line.1916"></a>
+<span class="sourceLineNo">1917</span>  }<a name="line.1917"></a>
+<span class="sourceLineNo">1918</span><a name="line.1918"></a>
+<span class="sourceLineNo">1919</span>  private void removeUnneededFiles() throws IOException {<a name="line.1919"></a>
+<span class="sourceLineNo">1920</span>    if (!conf.getBoolean("hbase.store.delete.expired.storefile", true)) return;<a name="line.1920"></a>
+<span class="sourceLineNo">1921</span>    if (getColumnFamilyDescriptor().getMinVersions() &gt; 0) {<a name="line.1921"></a>
+<span class="sourceLineNo">1922</span>      LOG.debug("Skipping expired store file removal due to min version being {}",<a name="line.1922"></a>
+<span class="sourceLineNo">1923</span>          getColumnFamilyDescriptor().getMinVersions());<a name="line.1923"></a>
+<span class="sourceLineNo">1924</span>      return;<a name="line.1924"></a>
+<span class="sourceLineNo">1925</span>    }<a name="line.1925"></a>
+<span class="sourceLineNo">1926</span>    this.lock.readLock().lock();<a name="line.1926"></a>
+<span class="sourceLineNo">1927</span>    Collection&lt;HStoreFile&gt; delSfs = null;<a name="line.1927"></a>
+<span class="sourceLineNo">1928</span>    try {<a name="line.1928"></a>
+<span class="sourceLineNo">1929</span>      synchronized (filesCompacting) {<a name="line.1929"></a>
+<span class="sourceLineNo">1930</span>        long cfTtl = getStoreFileTtl();<a name="line.1930"></a>
+<span class="sourceLineNo">1931</span>        if (cfTtl != Long.MAX_VALUE) {<a name="line.1931"></a>
+<span class="sourceLineNo">1932</span>          delSfs = storeEngine.getStoreFileManager().getUnneededFiles(<a name="line.1932"></a>
+<span class="sourceLineNo">1933</span>              EnvironmentEdgeManager.currentTime() - cfTtl, filesCompacting);<a name="line.1933"></a>
+<span class="sourceLineNo">1934</span>          addToCompactingFiles(delSfs);<a name="line.1934"></a>
+<span class="sourceLineNo">1935</span>        }<a name="line.1935"></a>
+<span class="sourceLineNo">1936</span>      }<a name="line.1936"></a>
+<span class="sourceLineNo">1937</span>    } finally {<a name="line.1937"></a>
+<span class="sourceLineNo">1938</span>      this.lock.readLock().unlock();<a name="line.1938"></a>
+<span class="sourceLineNo">1939</span>    }<a name="line.1939"></a>
+<span class="sourceLineNo">1940</span><a name="line.1940"></a>
+<span class="sourceLineNo">1941</span>    if (CollectionUtils.isEmpty(delSfs)) {<a name="line.1941"></a>
+<span class="sourceLineNo">1942</span>      return;<a name="line.1942"></a>
+<span class="sourceLineNo">1943</span>    }<a name="line.1943"></a>
 <span class="sourceLineNo">1944</span><a name="line.1944"></a>
-<span class="sourceLineNo">1945</span>  public void cancelRequestedCompaction(CompactionContext compaction) {<a name="line.1945"></a>
-<span class="sourceLineNo">1946</span>    finishCompactionRequest(compaction.getRequest());<a name="line.1946"></a>
-<span class="sourceLineNo">1947</span>  }<a name="line.1947"></a>
-<span class="sourceLineNo">1948</span><a name="line.1948"></a>
-<span class="sourceLineNo">1949</span>  private void finishCompactionRequest(CompactionRequestImpl cr) {<a name="line.1949"></a>
-<span class="sourceLineNo">1950</span>    this.region.reportCompactionRequestEnd(cr.isMajor(), cr.getFiles().size(), cr.getSize());<a name="line.1950"></a>
-<span class="sourceLineNo">1951</span>    if (cr.isOffPeak()) {<a name="line.1951"></a>
-<span class="sourceLineNo">1952</span>      offPeakCompactionTracker.set(false);<a name="line.1952"></a>
-<span class="sourceLineNo">1953</span>      cr.setOffPeak(false);<a name="line.1953"></a>
-<span class="sourceLineNo">1954</span>    }<a name="line.1954"></a>
-<span class="sourceLineNo">1955</span>    synchronized (filesCompacting) {<a name="line.1955"></a>
-<span class="sourceLineNo">1956</span>      filesCompacting.removeAll(cr.getFiles());<a name="line.1956"></a>
-<span class="sourceLineNo">1957</span>    }<a name="line.1957"></a>
-<span class="sourceLineNo">1958</span>  }<a name="line.1958"></a>
-<span class="sourceLineNo">1959</span><a name="line.1959"></a>
-<span class="sourceLineNo">1960</span>  /**<a name="line.1960"></a>
-<span class="sourceLineNo">1961</span>   * Validates a store file by opening and closing it. In HFileV2 this should not be an expensive<a name="line.1961"></a>
-<span class="sourceLineNo">1962</span>   * operation.<a name="line.1962"></a>
-<span class="sourceLineNo">1963</span>   * @param path the path to the store file<a name="line.1963"></a>
-<span class="sourceLineNo">1964</span>   */<a name="line.1964"></a>
-<span class="sourceLineNo">1965</span>  private void validateStoreFile(Path path) throws IOException {<a name="line.1965"></a>
-<span class="sourceLineNo">1966</span>    HStoreFile storeFile = null;<a name="line.1966"></a>
-<span class="sourceLineNo">1967</span>    try {<a name="line.1967"></a>
-<span class="sourceLineNo">1968</span>      storeFile = createStoreFileAndReader(path);<a name="line.1968"></a>
-<span class="sourceLineNo">1969</span>    } catch (IOException e) {<a name="line.1969"></a>
-<span class="sourceLineNo">1970</span>      LOG.error("Failed to open store file : {}, keeping it in tmp location", path, e);<a name="line.1970"></a>
-<span class="sourceLineNo">1971</span>      throw e;<a name="line.1971"></a>
-<span class="sourceLineNo">1972</span>    } finally {<a name="line.1972"></a>
-<span class="sourceLineNo">1973</span>      if (storeFile != null) {<a name="line.1973"></a>
-<span class="sourceLineNo">1974</span>        storeFile.closeStoreFile(false);<a name="line.1974"></a>
-<span class="sourceLineNo">1975</span>      }<a name="line.1975"></a>
-<span class="sourceLineNo">1976</span>    }<a name="line.1976"></a>
-<span class="sourceLineNo">1977</span>  }<a name="line.1977"></a>
-<span class="sourceLineNo">1978</span><a name="line.1978"></a>
-<span class="sourceLineNo">1979</span>  /**<a name="line.1979"></a>
-<span class="sourceLineNo">1980</span>   * Update counts.<a name="line.1980"></a>
-<span class="sourceLineNo">1981</span>   * @param compactedFiles list of files that were compacted<a name="line.1981"></a>
-<span class="sourceLineNo">1982</span>   */<a name="line.1982"></a>
-<span class="sourceLineNo">1983</span>  @VisibleForTesting<a name="line.1983"></a>
-<span class="sourceLineNo">1984</span>  protected void completeCompaction(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.1984"></a>
-<span class="sourceLineNo">1985</span>  // Rename this method! TODO.<a name="line.1985"></a>
-<span class="sourceLineNo">1986</span>    throws IOException {<a name="line.1986"></a>
-<span class="sourceLineNo">1987</span>    this.storeSize.set(0L);<a name="line.1987"></a>
-<span class="sourceLineNo">1988</span>    this.totalUncompressedBytes.set(0L);<a name="line.1988"></a>
-<span class="sourceLineNo">1989</span>    for (HStoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1989"></a>
-<span class="sourceLineNo">1990</span>      StoreFileReader r = hsf.getReader();<a name="line.1990"></a>
-<span class="sourceLineNo">1991</span>      if (r == null) {<a name="line.1991"></a>
-<span class="sourceLineNo">1992</span>        LOG.warn("StoreFile {} has a null Reader", hsf);<a name="line.1992"></a>
-<span class="sourceLineNo">1993</span>        continue;<a name="line.1993"></a>
-<span class="sourceLineNo">1994</span>      }<a name="line.1994"></a>
-<span class="sourceLineNo">1995</span>      this.storeSize.addAndGet(r.length());<a name="line.1995"></a>
-<span class="sourceLineNo">1996</span>      this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.1996"></a>
-<span class="sourceLineNo">1997</span>    }<a name="line.1997"></a>
-<span class="sourceLineNo">1998</span>  }<a name="line.1998"></a>
-<span class="sourceLineNo">1999</span><a name="line.1999"></a>
-<span class="sourceLineNo">2000</span>  /*<a name="line.2000"></a>
-<span class="sourceLineNo">2001</span>   * @param wantedVersions How many versions were asked for.<a name="line.2001"></a>
-<span class="sourceLineNo">2002</span>   * @return wantedVersions or this families' {@link HConstants#VERSIONS}.<a name="line.2002"></a>
-<span class="sourceLineNo">2003</span>   */<a name="line.2003"></a>
-<span class="sourceLineNo">2004</span>  int versionsToReturn(final int wantedVersions) {<a name="line.2004"></a>
-<span class="sourceLineNo">2005</span>    if (wantedVersions &lt;= 0) {<a name="line.2005"></a>
-<span class="sourceLineNo">2006</span>      throw new IllegalArgumentException("Number of versions must be &gt; 0");<a name="line.2006"></a>
+<span class="sourceLineNo">1945</span>    Collection&lt;HStoreFile&gt; newFiles = Collections.emptyList(); // No new files.<a name="line.1945"></a>
+<span class="sourceLineNo">1946</span>    writeCompactionWalRecord(delSfs, newFiles);<a name="line.1946"></a>
+<span class="sourceLineNo">1947</span>    replaceStoreFiles(delSfs, newFiles);<a name="line.1947"></a>
+<span class="sourceLineNo">1948</span>    completeCompaction(delSfs);<a name="line.1948"></a>
+<span class="sourceLineNo">1949</span>    LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in "<a name="line.1949"></a>
+<span class="sourceLineNo">1950</span>        + this + " of " + this.getRegionInfo().getRegionNameAsString()<a name="line.1950"></a>
+<span class="sourceLineNo">1951</span>        + "; total size for store is "<a name="line.1951"></a>
+<span class="sourceLineNo">1952</span>        + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1));<a name="line.1952"></a>
+<span class="sourceLineNo">1953</span>  }<a name="line.1953"></a>
+<span class="sourceLineNo">1954</span><a name="line.1954"></a>
+<span class="sourceLineNo">1955</span>  public void cancelRequestedCompaction(CompactionContext compaction) {<a name="line.1955"></a>
+<span class="sourceLineNo">1956</span>    finishCompactionRequest(compaction.getRequest());<a name="line.1956"></a>
+<span class="sourceLineNo">1957</span>  }<a name="line.1957"></a>
+<span class="sourceLineNo">1958</span><a name="line.1958"></a>
+<span class="sourceLineNo">1959</span>  private void finishCompactionRequest(CompactionRequestImpl cr) {<a name="line.1959"></a>
+<span class="sourceLineNo">1960</span>    this.region.reportCompactionRequestEnd(cr.isMajor(), cr.getFiles().size(), cr.getSize());<a name="line.1960"></a>
+<span class="sourceLineNo">1961</span>    if (cr.isOffPeak()) {<a name="line.1961"></a>
+<span class="sourceLineNo">1962</span>      offPeakCompactionTracker.set(false);<a name="line.1962"></a>
+<span class="sourceLineNo">1963</span>      cr.setOffPeak(false);<a name="line.1963"></a>
+<span class="sourceLineNo">1964</span>    }<a name="line.1964"></a>
+<span class="sourceLineNo">1965</span>    synchronized (filesCompacting) {<a name="line.1965"></a>
+<span class="sourceLineNo">1966</span>      filesCompacting.removeAll(cr.getFiles());<a name="line.1966"></a>
+<span class="sourceLineNo">1967</span>    }<a name="line.1967"></a>
+<span class="sourceLineNo">1968</span>  }<a name="line.1968"></a>
+<span class="sourceLineNo">1969</span><a name="line.1969"></a>
+<span class="sourceLineNo">1970</span>  /**<a name="line.1970"></a>
+<span class="sourceLineNo">1971</span>   * Validates a store file by opening and closing it. In HFileV2 this should not be an expensive<a name="line.1971"></a>
+<span class="sourceLineNo">1972</span>   * operation.<a name="line.1972"></a>
+<span class="sourceLineNo">1973</span>   * @param path the path to the store file<a name="line.1973"></a>
+<span class="sourceLineNo">1974</span>   */<a name="line.1974"></a>
+<span class="sourceLineNo">1975</span>  private void validateStoreFile(Path path) throws IOException {<a name="line.1975"></a>
+<span class="sourceLineNo">1976</span>    HStoreFile storeFile = null;<a name="line.1976"></a>
+<span class="sourceLineNo">1977</span>    try {<a name="line.1977"></a>
+<span class="sourceLineNo">1978</span>      storeFile = createStoreFileAndReader(path);<a name="line.1978"></a>
+<span class="sourceLineNo">1979</span>    } catch (IOException e) {<a name="line.1979"></a>
+<span class="sourceLineNo">1980</span>      LOG.error("Failed to open store file : {}, keeping it in tmp location", path, e);<a name="line.1980"></a>
+<span class="sourceLineNo">1981</span>      throw e;<a name="line.1981"></a>
+<span class="sourceLineNo">1982</span>    } finally {<a name="line.1982"></a>
+<span class="sourceLineNo">1983</span>      if (storeFile != null) {<a name="line.1983"></a>
+<span class="sourceLineNo">1984</span>        storeFile.closeStoreFile(false);<a name="line.1984"></a>
+<span class="sourceLineNo">1985</span>      }<a name="line.1985"></a>
+<span class="sourceLineNo">1986</span>    }<a name="line.1986"></a>
+<span class="sourceLineNo">1987</span>  }<a name="line.1987"></a>
+<span class="sourceLineNo">1988</span><a name="line.1988"></a>
+<span class="sourceLineNo">1989</span>  /**<a name="line.1989"></a>
+<span class="sourceLineNo">1990</span>   * Update counts.<a name="line.1990"></a>
+<span class="sourceLineNo">1991</span>   * @param compactedFiles list of files that were compacted<a name="line.1991"></a>
+<span class="sourceLineNo">1992</span>   */<a name="line.1992"></a>
+<span class="sourceLineNo">1993</span>  @VisibleForTesting<a name="line.1993"></a>
+<span class="sourceLineNo">1994</span>  protected void completeCompaction(Collection&lt;HStoreFile&gt; compactedFiles)<a name="line.1994"></a>
+<span class="sourceLineNo">1995</span>  // Rename this method! TODO.<a name="line.1995"></a>
+<span class="sourceLineNo">1996</span>    throws IOException {<a name="line.1996"></a>
+<span class="sourceLineNo">1997</span>    this.storeSize.set(0L);<a name="line.1997"></a>
+<span class="sourceLineNo">1998</span>    this.totalUncompressedBytes.set(0L);<a name="line.1998"></a>
+<span class="sourceLineNo">1999</span>    for (HStoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {<a name="line.1999"></a>
+<span class="sourceLineNo">2000</span>      StoreFileReader r = hsf.getReader();<a name="line.2000"></a>
+<span class="sourceLineNo">2001</span>      if (r == null) {<a name="line.2001"></a>
+<span class="sourceLineNo">2002</span>        LOG.warn("StoreFile {} has a null Reader", hsf);<a name="line.2002"></a>
+<span class="sourceLineNo">2003</span>        continue;<a name="line.2003"></a>
+<span class="sourceLineNo">2004</span>      }<a name="line.2004"></a>
+<span class="sourceLineNo">2005</span>      this.storeSize.addAndGet(r.length());<a name="line.2005"></a>
+<span class="sourceLineNo">2006</span>      this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());<a name="line.2006"></a>
 <span class="sourceLineNo">2007</span>    }<a name="line.2007"></a>
-<span class="sourceLineNo">2008</span>    // Make sure we do not return more than maximum versions for this store.<a name="line.2008"></a>
-<span class="sourceLineNo">2009</span>    int maxVersions = this.family.getMaxVersions();<a name="line.2009"></a>
-<span class="sourceLineNo">2010</span>    return wantedVersions &gt; maxVersions ? maxVersions: wantedVersions;<a name="line.2010"></a>
-<span class="sourceLineNo">2011</span>  }<a name="line.2011"></a>
-<span class="sourceLineNo">2012</span><a name="line.2012"></a>
-<span class="sourceLineNo">2013</span>  @Override<a name="line.2013"></a>
-<span class="sourceLineNo">2014</span>  public boolean canSplit() {<a name="line.2014"></a>
-<span class="sourceLineNo">2015</span>    this.lock.readLock().lock();<a name="line.2015"></a>
-<span class="sourceLineNo">2016</span>    try {<a name="line.2016"></a>
-<span class="sourceLineNo">2017</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2017"></a>
-<span class="sourceLineNo">2018</span>      boolean result = !hasReferences();<a name="line.2018"></a>
-<span class="sourceLineNo">2019</span>      if (!result) {<a name="line.2019"></a>
-<span class="sourceLineNo">2020</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2020"></a>
-<span class="sourceLineNo">2021</span>      }<a name="line.2021"></a>
-<span class="sourceLineNo">2022</span>      return result;<a name="line.2022"></a>
-<span class="sourceLineNo">2023</span>    } finally {<a name="line.2023"></a>
-<span class="sourceLineNo">2024</span>      this.lock.readLock().unlock();<a name="line.2024"></a>
-<span class="sourceLineNo">2025</span>    }<a name="line.2025"></a>
-<span class="sourceLineNo">2026</span>  }<a name="line.2026"></a>
-<span class="sourceLineNo">2027</span><a name="line.2027"></a>
-<span class="sourceLineNo">2028</span>  /**<a name="line.2028"></a>
-<span class="sourceLineNo">2029</span>   * Determines if Store should be split.<a name="line.2029"></a>
-<span class="sourceLineNo">2030</span>   */<a name="line.2030"></a>
-<span class="sourceLineNo">2031</span>  public Optional&lt;byte[]&gt; getSplitPoint() {<a name="line.2031"></a>
-<span class="sourceLineNo">2032</span>    this.lock.readLock().lock();<a name="line.2032"></a>
-<span class="sourceLineNo">2033</span>    try {<a name="line.2033"></a>
-<span class="sourceLineNo">2034</span>      // Should already be enforced by the split policy!<a name="line.2034"></a>
-<span class="sourceLineNo">2035</span>      assert !this.getRegionInfo().isMetaRegion();<a name="line.2035"></a>
-<span class="sourceLineNo">2036</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2036"></a>
-<span class="sourceLineNo">2037</span>      if (hasReferences()) {<a name="line.2037"></a>
-<span class="sourceLineNo">2038</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2038"></a>
-<span class="sourceLineNo">2039</span>        return Optional.empty();<a name="line.2039"></a>
-<span class="sourceLineNo">2040</span>      }<a name="line.2040"></a>
-<span class="sourceLineNo">2041</span>      return this.storeEngine.getStoreFileManager().getSplitPoint();<a name="line.2041"></a>
-<span class="sourceLineNo">2042</span>    } catch(IOException e) {<a name="line.2042"></a>
-<span class="sourceLineNo">2043</span>      LOG.warn("Failed getting store size for {}", this, e);<a name="line.2043"></a>
-<span class="sourceLineNo">2044</span>    } finally {<a name="line.2044"></a>
-<span class="sourceLineNo">2045</span>      this.lock.readLock().unlock();<a name="line.2045"></a>
-<span class="sourceLineNo">2046</span>    }<a name="line.2046"></a>
-<span class="sourceLineNo">2047</span>    return Optional.empty();<a name="line.2047"></a>
-<span class="sourceLineNo">2048</span>  }<a name="line.2048"></a>
-<span class="sourceLineNo">2049</span><a name="line.2049"></a>
-<span class="sourceLineNo">2050</span>  @Override<a name="line.2050"></a>
-<span class="sourceLineNo">2051</span>  public long getLastCompactSize() {<a name="line.2051"></a>
-<span class="sourceLineNo">2052</span>    return this.lastCompactSize;<a name="line.2052"></a>
-<span class="sourceLineNo">2053</span>  }<a name="line.2053"></a>
-<span class="sourceLineNo">2054</span><a name="line.2054"></a>
-<span class="sourceLineNo">2055</span>  @Override<a name="line.2055"></a>
-<span class="sourceLineNo">2056</span>  public long getSize() {<a name="line.2056"></a>
-<span class="sourceLineNo">2057</span>    return storeSize.get();<a name="line.2057"></a>
+<span class="sourceLineNo">2008</span>  }<a name="line.2008"></a>
+<span class="sourceLineNo">2009</span><a name="line.2009"></a>
+<span class="sourceLineNo">2010</span>  /*<a name="line.2010"></a>
+<span class="sourceLineNo">2011</span>   * @param wantedVersions How many versions were asked for.<a name="line.2011"></a>
+<span class="sourceLineNo">2012</span>   * @return wantedVersions or this families' {@link HConstants#VERSIONS}.<a name="line.2012"></a>
+<span class="sourceLineNo">2013</span>   */<a name="line.2013"></a>
+<span class="sourceLineNo">2014</span>  int versionsToReturn(final int wantedVersions) {<a name="line.2014"></a>
+<span class="sourceLineNo">2015</span>    if (wantedVersions &lt;= 0) {<a name="line.2015"></a>
+<span class="sourceLineNo">2016</span>      throw new IllegalArgumentException("Number of versions must be &gt; 0");<a name="line.2016"></a>
+<span class="sourceLineNo">2017</span>    }<a name="line.2017"></a>
+<span class="sourceLineNo">2018</span>    // Make sure we do not return more than maximum versions for this store.<a name="line.2018"></a>
+<span class="sourceLineNo">2019</span>    int maxVersions = this.family.getMaxVersions();<a name="line.2019"></a>
+<span class="sourceLineNo">2020</span>    return wantedVersions &gt; maxVersions ? maxVersions: wantedVersions;<a name="line.2020"></a>
+<span class="sourceLineNo">2021</span>  }<a name="line.2021"></a>
+<span class="sourceLineNo">2022</span><a name="line.2022"></a>
+<span class="sourceLineNo">2023</span>  @Override<a name="line.2023"></a>
+<span class="sourceLineNo">2024</span>  public boolean canSplit() {<a name="line.2024"></a>
+<span class="sourceLineNo">2025</span>    this.lock.readLock().lock();<a name="line.2025"></a>
+<span class="sourceLineNo">2026</span>    try {<a name="line.2026"></a>
+<span class="sourceLineNo">2027</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2027"></a>
+<span class="sourceLineNo">2028</span>      boolean result = !hasReferences();<a name="line.2028"></a>
+<span class="sourceLineNo">2029</span>      if (!result) {<a name="line.2029"></a>
+<span class="sourceLineNo">2030</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2030"></a>
+<span class="sourceLineNo">2031</span>      }<a name="line.2031"></a>
+<span class="sourceLineNo">2032</span>      return result;<a name="line.2032"></a>
+<span class="sourceLineNo">2033</span>    } finally {<a name="line.2033"></a>
+<span class="sourceLineNo">2034</span>      this.lock.readLock().unlock();<a name="line.2034"></a>
+<span class="sourceLineNo">2035</span>    }<a name="line.2035"></a>
+<span class="sourceLineNo">2036</span>  }<a name="line.2036"></a>
+<span class="sourceLineNo">2037</span><a name="line.2037"></a>
+<span class="sourceLineNo">2038</span>  /**<a name="line.2038"></a>
+<span class="sourceLineNo">2039</span>   * Determines if Store should be split.<a name="line.2039"></a>
+<span class="sourceLineNo">2040</span>   */<a name="line.2040"></a>
+<span class="sourceLineNo">2041</span>  public Optional&lt;byte[]&gt; getSplitPoint() {<a name="line.2041"></a>
+<span class="sourceLineNo">2042</span>    this.lock.readLock().lock();<a name="line.2042"></a>
+<span class="sourceLineNo">2043</span>    try {<a name="line.2043"></a>
+<span class="sourceLineNo">2044</span>      // Should already be enforced by the split policy!<a name="line.2044"></a>
+<span class="sourceLineNo">2045</span>      assert !this.getRegionInfo().isMetaRegion();<a name="line.2045"></a>
+<span class="sourceLineNo">2046</span>      // Not split-able if we find a reference store file present in the store.<a name="line.2046"></a>
+<span class="sourceLineNo">2047</span>      if (hasReferences()) {<a name="line.2047"></a>
+<span class="sourceLineNo">2048</span>        LOG.trace("Not splittable; has references: {}", this);<a name="line.2048"></a>
+<span class="sourceLineNo">2049</span>        return Optional.empty();<a name="line.2049"></a>
+<span class="sourceLineNo">2050</span>      }<a name="line.2050"></a>
+<span class="sourceLineNo">2051</span>      return this.storeEngine.getStoreFileManager().getSplitPoint();<a name="line.2051"></a>
+<span class="sourceLineNo">2052</span>    } catch(IOException e) {<a name="line.2052"></a>
+<span class="sourceLineNo">2053</span>      LOG.warn("Failed getting store size for {}", this, e);<a name="line.2053"></a>
+<span class="sourceLineNo">2054</span>    } finally {<a name="line.2054"></a>
+<span class="sourceLineNo">2055</span>      this.lock.readLock().unlock();<a name="line.2055"></a>
+<span class="sourceLineNo">2056</span>    }<a name="line.2056"></a>
+<span class="sourceLineNo">2057</span>    return Optional.empty();<a name="line.2057"></a>
 <span class="sourceLineNo">2058</span>  }<a name="line.2058"></a>
 <span class="sourceLineNo">2059</span><a name="line.2059"></a>
-<span class="sourceLineNo">2060</span>  public void triggerMajorCompaction() {<a name="line.2060"></a>
-<span class="sourceLineNo">2061</span>    this.forceMajor = true;<a name="line.2061"></a>
-<span class="sourceLineNo">2062</span>  }<a name="line.2062"></a>
-<span class="sourceLineNo">2063</span><a name="line.2063"></a>
-<span class="sourceLineNo">2064</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2064"></a>
-<span class="sourceLineNo">2065</span>  // File administration<a name="line.2065"></a>
-<span class="sourceLineNo">2066</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2066"></a>
-<span class="sourceLineNo">2067</span><a name="line.2067"></a>
-<span class="sourceLineNo">2068</span>  /**<a name="line.2068"></a>
-<span class="sourceLineNo">2069</span>   * Return a scanner for both the memstore and the HStore files. Assumes we are not in a<a name="line.2069"></a>
-<span class="sourceLineNo">2070</span>   * compaction.<a name="line.2070"></a>
-<span class="sourceLineNo">2071</span>   * @param scan Scan to apply when scanning the stores<a name="line.2071"></a>
-<span class="sourceLineNo">2072</span>   * @param targetCols columns to scan<a name="line.2072"></a>
-<span class="sourceLineNo">2073</span>   * @return a scanner over the current key values<a name="line.2073"></a>
-<span class="sourceLineNo">2074</span>   * @throws IOException on failure<a name="line.2074"></a>
-<span class="sourceLineNo">2075</span>   */<a name="line.2075"></a>
-<span class="sourceLineNo">2076</span>  public KeyValueScanner getScanner(Scan scan, final NavigableSet&lt;byte[]&gt; targetCols, long readPt)<a name="line.2076"></a>
-<span class="sourceLineNo">2077</span>      throws IOException {<a name="line.2077"></a>
-<span class="sourceLineNo">2078</span>    lock.readLock().lock();<a name="line.2078"></a>
-<span class="sourceLineNo">2079</span>    try {<a name="line.2079"></a>
-<span class="sourceLineNo">2080</span>      ScanInfo scanInfo;<a name="line.2080"></a>
-<span class="sourceLineNo">2081</span>      if (this.getCoprocessorHost() != null) {<a name="line.2081"></a>
-<span class="sourceLineNo">2082</span>        scanInfo = this.getCoprocessorHost().preStoreScannerOpen(this);<a name="line.2082"></a>
-<span class="sourceLineNo">2083</span>      } else {<a name="line.2083"></a>
-<span class="sourceLineNo">2084</span>        scanInfo = getScanInfo();<a name="line.2084"></a>
-<span class="sourceLineNo">2085</span>      }<a name="line.2085"></a>
-<span class="sourceLineNo">2086</span>      return createScanner(scan, scanInfo, targetCols, readPt);<a name="line.2086"></a>
-<span class="sourceLineNo">2087</span>    } finally {<a name="line.2087"></a>
-<span class="sourceLineNo">2088</span>      lock.readLock().unlock();<a name="line.2088"></a>
-<span class="sourceLineNo">2089</span>    }<a name="line.2089"></a>
-<span class="sourceLineNo">2090</span>  }<a name="line.2090"></a>
-<span class="sourceLineNo">2091</span><a name="line.2091"></a>
-<span class="sourceLineNo">2092</span>  // HMobStore will override this method to return its own implementation.<a name="line.2092"></a>
-<span class="sourceLineNo">2093</span>  protected KeyValueScanner createScanner(Scan scan, ScanInfo scanInfo,<a name="line.2093"></a>
-<span class="sourceLineNo">2094</span>      NavigableSet&lt;byte[]&gt; targetCols, long readPt) throws IOException {<a name="line.2094"></a>
-<span class="sourceLineNo">2095</span>    return scan.isReversed() ? new ReversedStoreScanner(this, scanInfo, scan, targetCols, readPt)<a name="line.2095"></a>
-<span class="sourceLineNo">2096</span>        : new StoreScanner(this, scanInfo, scan, targetCols, readPt);<a name="line.2096"></a>
-<span class="sourceLineNo">2097</span>  }<a name="line.2097"></a>
-<span class="sourceLineNo">2098</span><a name="line.2098"></a>
-<span class="sourceLineNo">2099</span>  /**<a name="line.2099"></a>
-<span class="sourceLineNo">2100</span>   * Recreates the scanners on the current list of active store file scanners<a name="line.2100"></a>
-<span class="sourceLineNo">2101</span>   * @param currentFileScanners the current set of active store file scanners<a name="line.2101"></a>
-<span class="sourceLineNo">2102</span>   * @param cacheBlocks cache the blocks or not<a name="line.2102"></a>
-<span class="sourceLineNo">2103</span>   * @param usePread use pread or not<a name="line.2103"></a>
-<span class="sourceLineNo">2104</span>   * @param isCompaction is the scanner for compaction<a name="line.2104"></a>
-<span class="sourceLineNo">2105</span>   * @param matcher the scan query matcher<a name="line.2105"></a>
-<span class="sourceLineNo">2106</span>   * @param startRow the scan's start row<a name="line.2106"></a>
-<span class="sourceLineNo">2107</span>   * @param includeStartRow should the scan include the start row<a name="line.2107"></a>
-<span class="sourceLineNo">2108</span>   * @param stopRow the scan's stop row<a name="line.2108"></a>
-<span class="sourceLineNo">2109</span>   * @param includeStopRow should the scan include the stop row<a name="line.2109"></a>
-<span class="sourceLineNo">2110</span>   * @param readPt the read point of the current scane<a name="line.2110"></a>
-<span class="sourceLineNo">2111</span>   * @param includeMemstoreScanner whether the current scanner should include memstorescanner<a name="line.2111"></a>
-<span class="sourceLineNo">2112</span>   * @return list of scanners recreated on the current Scanners<a name="line.2112"></a>
-<span class="sourceLineNo">2113</span>   * @throws IOException<a name="line.2113"></a>
-<span class="sourceLineNo">2114</span>   */<a name="line.2114"></a>
-<span class="sourceLineNo">2115</span>  public List&lt;KeyValueScanner&gt; recreateScanners(List&lt;KeyValueScanner&gt; currentFileScanners,<a name="line.2115"></a>
-<span class="sourceLineNo">2116</span>      boolean cacheBlocks, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.2116"></a>
-<span class="sourceLineNo">2117</span>      byte[] startRow, boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.2117"></a>
-<span class="sourceLineNo">2118</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.2118"></a>
-<span class="sourceLineNo">2119</span>    this.lock.readLock().lock();<a name="line.2119"></a>
-<span class="sourceLineNo">2120</span>    try {<a name="line.2120"></a>
-<span class="sourceLineNo">2121</span>      Map&lt;String, HStoreFile&gt; name2File =<a name="line.2121"></a>
-<span class="sourceLineNo">2122</span>          new HashMap&lt;&gt;(getStorefilesCount() + getCompactedFilesCount());<a name="line.2122"></a>
-<span class="sourceLineNo">2123</span>      for (HStoreFile file : getStorefiles()) {<a name="line.2123"></a>
-<span class="sourceLineNo">2124</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2124"></a>
-<span class="sourceLineNo">2125</span>      }<a name="line.2125"></a>
-<span class="sourceLineNo">2126</span>      Collection&lt;HStoreFile&gt; compactedFiles = getCompactedFiles();<a name="line.2126"></a>
-<span class="sourceLineNo">2127</span>      for (HStoreFile file : IterableUtils.emptyIfNull(compactedFiles)) {<a name="line.2127"></a>
-<span class="sourceLineNo">2128</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2128"></a>
-<span class="sourceLineNo">2129</span>      }<a name="line.2129"></a>
-<span class="sourceLineNo">2130</span>      List&lt;HStoreFile&gt; filesToReopen = new ArrayList&lt;&gt;();<a name="line.2130"></a>
-<span class="sourceLineNo">2131</span>      for (KeyValueScanner kvs : currentFileScanners) {<a name="line.2131"></a>
-<span class="sourceLineNo">2132</span>        assert kvs.isFileScanner();<a name="line.2132"></a>
-<span class="sourceLineNo">2133</span>        if (kvs.peek() == null) {<a name="line.2133"></a>
-<span class="sourceLineNo">2134</span>          continue;<a name="line.2134"></a>
-<span class="sourceLineNo">2135</span>        }<a name="line.2135"></a>
-<span class="sourceLineNo">2136</span>        filesToReopen.add(name2File.get(kvs.getFilePath().getName()));<a name="line.2136"></a>
-<span class="sourceLineNo">2137</span>      }<a name="line.2137"></a>
-<span class="sourceLineNo">2138</span>      if (filesToReopen.isEmpty()) {<a name="line.2138"></a>
-<span class="sourceLineNo">2139</span>        return null;<a name="line.2139"></a>
-<span class="sourceLineNo">2140</span>      }<a name="line.2140"></a>
-<span class="sourceLineNo">2141</span>      return getScanners(filesToReopen, cacheBlocks, false, false, matcher, startRow,<a name="line.2141"></a>
-<span class="sourceLineNo">2142</span>        includeStartRow, stopRow, includeStopRow, readPt, false);<a name="line.2142"></a>
-<span class="sourceLineNo">2143</span>    } finally {<a name="line.2143"></a>
-<span class="sourceLineNo">2144</span>      this.lock.readLock().unlock();<a name="line.2144"></a>
-<span class="sourceLineNo">2145</span>    }<a name="line.2145"></a>
-<span class="sourceLineNo">2146</span>  }<a name="line.2146"></a>
-<span class="sourceLineNo">2147</span><a name="line.2147"></a>
-<span class="sourceLineNo">2148</span>  @Override<a name="line.2148"></a>
-<span class="sourceLineNo">2149</span>  public String toString() {<a name="line.2149"></a>
-<span class="sourceLineNo">2150</span>    return this.getColumnFamilyName();<a name="line.2150"></a>
-<span class="sourceLineNo">2151</span>  }<a name="line.2151"></a>
-<span class="sourceLineNo">2152</span><a name="line.2152"></a>
-<span class="sourceLineNo">2153</span>  @Override<a name="line.2153"></a>
-<span class="sourceLineNo">2154</span>  public int getStorefilesCount() {<a name="line.2154"></a>
-<span class="sourceLineNo">2155</span>    return this.storeEngine.getStoreFileManager().getStorefileCount();<a name="line.2155"></a>
+<span class="sourceLineNo">2060</span>  @Override<a name="line.2060"></a>
+<span class="sourceLineNo">2061</span>  public long getLastCompactSize() {<a name="line.2061"></a>
+<span class="sourceLineNo">2062</span>    return this.lastCompactSize;<a name="line.2062"></a>
+<span class="sourceLineNo">2063</span>  }<a name="line.2063"></a>
+<span class="sourceLineNo">2064</span><a name="line.2064"></a>
+<span class="sourceLineNo">2065</span>  @Override<a name="line.2065"></a>
+<span class="sourceLineNo">2066</span>  public long getSize() {<a name="line.2066"></a>
+<span class="sourceLineNo">2067</span>    return storeSize.get();<a name="line.2067"></a>
+<span class="sourceLineNo">2068</span>  }<a name="line.2068"></a>
+<span class="sourceLineNo">2069</span><a name="line.2069"></a>
+<span class="sourceLineNo">2070</span>  public void triggerMajorCompaction() {<a name="line.2070"></a>
+<span class="sourceLineNo">2071</span>    this.forceMajor = true;<a name="line.2071"></a>
+<span class="sourceLineNo">2072</span>  }<a name="line.2072"></a>
+<span class="sourceLineNo">2073</span><a name="line.2073"></a>
+<span class="sourceLineNo">2074</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2074"></a>
+<span class="sourceLineNo">2075</span>  // File administration<a name="line.2075"></a>
+<span class="sourceLineNo">2076</span>  //////////////////////////////////////////////////////////////////////////////<a name="line.2076"></a>
+<span class="sourceLineNo">2077</span><a name="line.2077"></a>
+<span class="sourceLineNo">2078</span>  /**<a name="line.2078"></a>
+<span class="sourceLineNo">2079</span>   * Return a scanner for both the memstore and the HStore files. Assumes we are not in a<a name="line.2079"></a>
+<span class="sourceLineNo">2080</span>   * compaction.<a name="line.2080"></a>
+<span class="sourceLineNo">2081</span>   * @param scan Scan to apply when scanning the stores<a name="line.2081"></a>
+<span class="sourceLineNo">2082</span>   * @param targetCols columns to scan<a name="line.2082"></a>
+<span class="sourceLineNo">2083</span>   * @return a scanner over the current key values<a name="line.2083"></a>
+<span class="sourceLineNo">2084</span>   * @throws IOException on failure<a name="line.2084"></a>
+<span class="sourceLineNo">2085</span>   */<a name="line.2085"></a>
+<span class="sourceLineNo">2086</span>  public KeyValueScanner getScanner(Scan scan, final NavigableSet&lt;byte[]&gt; targetCols, long readPt)<a name="line.2086"></a>
+<span class="sourceLineNo">2087</span>      throws IOException {<a name="line.2087"></a>
+<span class="sourceLineNo">2088</span>    lock.readLock().lock();<a name="line.2088"></a>
+<span class="sourceLineNo">2089</span>    try {<a name="line.2089"></a>
+<span class="sourceLineNo">2090</span>      ScanInfo scanInfo;<a name="line.2090"></a>
+<span class="sourceLineNo">2091</span>      if (this.getCoprocessorHost() != null) {<a name="line.2091"></a>
+<span class="sourceLineNo">2092</span>        scanInfo = this.getCoprocessorHost().preStoreScannerOpen(this);<a name="line.2092"></a>
+<span class="sourceLineNo">2093</span>      } else {<a name="line.2093"></a>
+<span class="sourceLineNo">2094</span>        scanInfo = getScanInfo();<a name="line.2094"></a>
+<span class="sourceLineNo">2095</span>      }<a name="line.2095"></a>
+<span class="sourceLineNo">2096</span>      return createScanner(scan, scanInfo, targetCols, readPt);<a name="line.2096"></a>
+<span class="sourceLineNo">2097</span>    } finally {<a name="line.2097"></a>
+<span class="sourceLineNo">2098</span>      lock.readLock().unlock();<a name="line.2098"></a>
+<span class="sourceLineNo">2099</span>    }<a name="line.2099"></a>
+<span class="sourceLineNo">2100</span>  }<a name="line.2100"></a>
+<span class="sourceLineNo">2101</span><a name="line.2101"></a>
+<span class="sourceLineNo">2102</span>  // HMobStore will override this method to return its own implementation.<a name="line.2102"></a>
+<span class="sourceLineNo">2103</span>  protected KeyValueScanner createScanner(Scan scan, ScanInfo scanInfo,<a name="line.2103"></a>
+<span class="sourceLineNo">2104</span>      NavigableSet&lt;byte[]&gt; targetCols, long readPt) throws IOException {<a name="line.2104"></a>
+<span class="sourceLineNo">2105</span>    return scan.isReversed() ? new ReversedStoreScanner(this, scanInfo, scan, targetCols, readPt)<a name="line.2105"></a>
+<span class="sourceLineNo">2106</span>        : new StoreScanner(this, scanInfo, scan, targetCols, readPt);<a name="line.2106"></a>
+<span class="sourceLineNo">2107</span>  }<a name="line.2107"></a>
+<span class="sourceLineNo">2108</span><a name="line.2108"></a>
+<span class="sourceLineNo">2109</span>  /**<a name="line.2109"></a>
+<span class="sourceLineNo">2110</span>   * Recreates the scanners on the current list of active store file scanners<a name="line.2110"></a>
+<span class="sourceLineNo">2111</span>   * @param currentFileScanners the current set of active store file scanners<a name="line.2111"></a>
+<span class="sourceLineNo">2112</span>   * @param cacheBlocks cache the blocks or not<a name="line.2112"></a>
+<span class="sourceLineNo">2113</span>   * @param usePread use pread or not<a name="line.2113"></a>
+<span class="sourceLineNo">2114</span>   * @param isCompaction is the scanner for compaction<a name="line.2114"></a>
+<span class="sourceLineNo">2115</span>   * @param matcher the scan query matcher<a name="line.2115"></a>
+<span class="sourceLineNo">2116</span>   * @param startRow the scan's start row<a name="line.2116"></a>
+<span class="sourceLineNo">2117</span>   * @param includeStartRow should the scan include the start row<a name="line.2117"></a>
+<span class="sourceLineNo">2118</span>   * @param stopRow the scan's stop row<a name="line.2118"></a>
+<span class="sourceLineNo">2119</span>   * @param includeStopRow should the scan include the stop row<a name="line.2119"></a>
+<span class="sourceLineNo">2120</span>   * @param readPt the read point of the current scane<a name="line.2120"></a>
+<span class="sourceLineNo">2121</span>   * @param includeMemstoreScanner whether the current scanner should include memstorescanner<a name="line.2121"></a>
+<span class="sourceLineNo">2122</span>   * @return list of scanners recreated on the current Scanners<a name="line.2122"></a>
+<span class="sourceLineNo">2123</span>   * @throws IOException<a name="line.2123"></a>
+<span class="sourceLineNo">2124</span>   */<a name="line.2124"></a>
+<span class="sourceLineNo">2125</span>  public List&lt;KeyValueScanner&gt; recreateScanners(List&lt;KeyValueScanner&gt; currentFileScanners,<a name="line.2125"></a>
+<span class="sourceLineNo">2126</span>      boolean cacheBlocks, boolean usePread, boolean isCompaction, ScanQueryMatcher matcher,<a name="line.2126"></a>
+<span class="sourceLineNo">2127</span>      byte[] startRow, boolean includeStartRow, byte[] stopRow, boolean includeStopRow, long readPt,<a name="line.2127"></a>
+<span class="sourceLineNo">2128</span>      boolean includeMemstoreScanner) throws IOException {<a name="line.2128"></a>
+<span class="sourceLineNo">2129</span>    this.lock.readLock().lock();<a name="line.2129"></a>
+<span class="sourceLineNo">2130</span>    try {<a name="line.2130"></a>
+<span class="sourceLineNo">2131</span>      Map&lt;String, HStoreFile&gt; name2File =<a name="line.2131"></a>
+<span class="sourceLineNo">2132</span>          new HashMap&lt;&gt;(getStorefilesCount() + getCompactedFilesCount());<a name="line.2132"></a>
+<span class="sourceLineNo">2133</span>      for (HStoreFile file : getStorefiles()) {<a name="line.2133"></a>
+<span class="sourceLineNo">2134</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2134"></a>
+<span class="sourceLineNo">2135</span>      }<a name="line.2135"></a>
+<span class="sourceLineNo">2136</span>      Collection&lt;HStoreFile&gt; compactedFiles = getCompactedFiles();<a name="line.2136"></a>
+<span class="sourceLineNo">2137</span>      for (HStoreFile file : IterableUtils.emptyIfNull(compactedFiles)) {<a name="line.2137"></a>
+<span class="sourceLineNo">2138</span>        name2File.put(file.getFileInfo().getActiveFileName(), file);<a name="line.2138"></a>
+<span class="sourceLineNo">2139</span>      }<a name="line.2139"></a>
+<span class="sourceLineNo">2140</span>      List&lt;HStoreFile&gt; filesToReopen = new ArrayList&lt;&gt;();<a name="line.2140"></a>
+<span class="sourceLineNo">2141</span>      for (KeyValueScanner kvs : currentFileScanners) {<a name="line.2141"></a>
+<span class="sourceLineNo">2142</span>        assert kvs.isFileScanner();<a name="line.2142"></a>
+<span class="sourceLineNo">2143</span>        if (kvs.peek() == null) {<a name="line.2143"></a>
+<span class="sourceLineNo">2144</span>          continue;<a name="line.2144"></a>
+<span class="sourceLineNo">2145</span>        }<a name="line.2145"></a>
+<span class="sourceLineNo">2146</span>        filesToReopen.add(name2File.get(kvs.getFilePath().getName()));<a name="line.2146"></a>
+<span class="sourceLineNo">2147</span>      }<a name="line.2147"></a>
+<span class="sourceLineNo">2148</span>      if (filesToReopen.isEmpty()) {<a name="line.2148"></a>
+<span class="sourceLineNo">2149</span>        return null;<a name="line.2149"></a>
+<span class="sourceLineNo">2150</span>      }<a name="line.2150"></a>
+<span class="sourceLineNo">2151</span>      return getScanners(filesToReopen, cacheBlocks, false, false, matcher, startRow,<a name="line.2151"></a>
+<span class="sourceLineNo">2152</span>        includeStartRow, stopRow, includeStopRow, readPt, false);<a name="line.2152"></a>
+<span class="sourceLineNo">2153</span>    } finally {<a name="line.2153"></a>
+<span class="sourceLineNo">2154</span>      this.lock.readLock().unlock();<a name="line.2154"></a>
+<span class="sourceLineNo">2155</span>    }<a name="line.2155"></a>
 <span class="sourceLineNo">2156</span>  }<a name="line.2156"></a>
 <span class="sourceLineNo">2157</span><a name="line.2157"></a>
 <span class="sourceLineNo">2158</span>  @Override<a name="line.2158"></a>
-<span class="sourceLineNo">2159</span>  public int getCompactedFilesCount() {<a name="line.2159"></a>
-<span class="sourceLineNo">2160</span>    return this.storeEngine.getStoreFileManager().getCompactedFilesCount();<a name="line.2160"></a>
+<span class="sourceLineNo">2159</span>  public String toString() {<a name="line.2159"></a>
+<span class="sourceLineNo">2160</span>    return this.getColumnFamilyName();<a name="line.2160"></a>
 <span class="sourceLineNo">2161</span>  }<a name="line.2161"></a>
 <span class="sourceLineNo">2162</span><a name="line.2162"></a>
-<span class="sourceLineNo">2163</span>  private LongStream getStoreFileAgeStream() {<a name="line.2163"></a>
-<span class="sourceLineNo">2164</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream().filter(sf -&gt; {<a name="line.2164"></a>
-<span class="sourceLineNo">2165</span>      if (sf.getReader() == null) {<a name="line.2165"></a>
-<span class="sourceLineNo">2166</span>        LOG.warn("StoreFile {} has a null Reader", sf);<a name="line.2166"></a>
-<span class="sourceLineNo">2167</span>        return false;<a name="line.2167"></a>
-<span class="sourceLineNo">2168</span>      } else {<a name="line.2168"></a>
-<span class="sourceLineNo">2169</span>        return true;<a name="line.2169"></a>
-<span class="sourceLineNo">2170</span>      }<a name="line.2170"></a>
-<span class="sourceLineNo">2171</span>    }).filter(HStoreFile::isHFile).mapToLong(sf -&gt; sf.getFileInfo().getCreatedTimestamp())<a name="line.2171"></a>
-<span class="sourceLineNo">2172</span>        .map(t -&gt; EnvironmentEdgeManager.currentTime() - t);<a name="line.2172"></a>
-<span class="sourceLineNo">2173</span>  }<a name="line.2173"></a>
-<span class="sourceLineNo">2174</span><a name="line.2174"></a>
-<span class="sourceLineNo">2175</span>  @Override<a name="line.2175"></a>
-<span class="sourceLineNo">2176</span>  public OptionalLong getMaxStoreFileAge() {<a name="line.2176"></a>
-<span class="sourceLineNo">2177</span>    return getStoreFileAgeStream().max();<a name="line.2177"></a>
-<span class="sourceLineNo">2178</span>  }<a name="line.2178"></a>
-<span class="sourceLineNo">2179</span><a name="line.2179"></a>
-<span class="sourceLineNo">2180</span>  @Override<a name="line.2180"></a>
-<span class="sourceLineNo">2181</span>  public OptionalLong getMinStoreFileAge() {<a name="line.2181"></a>
-<span class="sourceLineNo">2182</span>    return getStoreFileAgeStream().min();<a name="line.2182"></a>
+<span class="sourceLineNo">2163</span>  @Override<a name="line.2163"></a>
+<span class="sourceLineNo">2164</span>  public int getStorefilesCount() {<a name="line.2164"></a>
+<span class="sourceLineNo">2165</span>    return this.storeEngine.getStoreFileManager().getStorefileCount();<a name="line.2165"></a>
+<span class="sourceLineNo">2166</span>  }<a name="line.2166"></a>
+<span class="sourceLineNo">2167</span><a name="line.2167"></a>
+<span class="sourceLineNo">2168</span>  @Override<a name="line.2168"></a>
+<span class="sourceLineNo">2169</span>  public int getCompactedFilesCount() {<a name="line.2169"></a>
+<span class="sourceLineNo">2170</span>    return this.storeEngine.getStoreFileManager().getCompactedFilesCount();<a name="line.2170"></a>
+<span class="sourceLineNo">2171</span>  }<a name="line.2171"></a>
+<span class="sourceLineNo">2172</span><a name="line.2172"></a>
+<span class="sourceLineNo">2173</span>  private LongStream getStoreFileAgeStream() {<a name="line.2173"></a>
+<span class="sourceLineNo">2174</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream().filter(sf -&gt; {<a name="line.2174"></a>
+<span class="sourceLineNo">2175</span>      if (sf.getReader() == null) {<a name="line.2175"></a>
+<span class="sourceLineNo">2176</span>        LOG.warn("StoreFile {} has a null Reader", sf);<a name="line.2176"></a>
+<span class="sourceLineNo">2177</span>        return false;<a name="line.2177"></a>
+<span class="sourceLineNo">2178</span>      } else {<a name="line.2178"></a>
+<span class="sourceLineNo">2179</span>        return true;<a name="line.2179"></a>
+<span class="sourceLineNo">2180</span>      }<a name="line.2180"></a>
+<span class="sourceLineNo">2181</span>    }).filter(HStoreFile::isHFile).mapToLong(sf -&gt; sf.getFileInfo().getCreatedTimestamp())<a name="line.2181"></a>
+<span class="sourceLineNo">2182</span>        .map(t -&gt; EnvironmentEdgeManager.currentTime() - t);<a name="line.2182"></a>
 <span class="sourceLineNo">2183</span>  }<a name="line.2183"></a>
 <span class="sourceLineNo">2184</span><a name="line.2184"></a>
 <span class="sourceLineNo">2185</span>  @Override<a name="line.2185"></a>
-<span class="sourceLineNo">2186</span>  public OptionalDouble getAvgStoreFileAge() {<a name="line.2186"></a>
-<span class="sourceLineNo">2187</span>    return getStoreFileAgeStream().average();<a name="line.2187"></a>
+<span class="sourceLineNo">2186</span>  public OptionalLong getMaxStoreFileAge() {<a name="line.2186"></a>
+<span class="sourceLineNo">2187</span>    return getStoreFileAgeStream().max();<a name="line.2187"></a>
 <span class="sourceLineNo">2188</span>  }<a name="line.2188"></a>
 <span class="sourceLineNo">2189</span><a name="line.2189"></a>
 <span class="sourceLineNo">2190</span>  @Override<a name="line.2190"></a>
-<span class="sourceLineNo">2191</span>  public long getNumReferenceFiles() {<a name="line.2191"></a>
-<span class="sourceLineNo">2192</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2192"></a>
-<span class="sourceLineNo">2193</span>        .filter(HStoreFile::isReference).count();<a name="line.2193"></a>
-<span class="sourceLineNo">2194</span>  }<a name="line.2194"></a>
-<span class="sourceLineNo">2195</span><a name="line.2195"></a>
-<span class="sourceLineNo">2196</span>  @Override<a name="line.2196"></a>
-<span class="sourceLineNo">2197</span>  public long getNumHFiles() {<a name="line.2197"></a>
-<span class="sourceLineNo">2198</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2198"></a>
-<span class="sourceLineNo">2199</span>        .filter(HStoreFile::isHFile).count();<a name="line.2199"></a>
-<span class="sourceLineNo">2200</span>  }<a name="line.2200"></a>
-<span class="sourceLineNo">2201</span><a name="line.2201"></a>
-<span class="sourceLineNo">2202</span>  @Override<a name="line.2202"></a>
-<span class="sourceLineNo">2203</span>  public long getStoreSizeUncompressed() {<a name="line.2203"></a>
-<span class="sourceLineNo">2204</span>    return this.totalUncompressedBytes.get();<a name="line.2204"></a>
-<span class="sourceLineNo">2205</span>  }<a name="line.2205"></a>
-<span class="sourceLineNo">2206</span><a name="line.2206"></a>
-<span class="sourceLineNo">2207</span>  @Override<a name="line.2207"></a>
-<span class="sourceLineNo">2208</span>  public long getStorefilesSize() {<a name="line.2208"></a>
-<span class="sourceLineNo">2209</span>    // Include all StoreFiles<a name="line.2209"></a>
-<span class="sourceLineNo">2210</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(), sf -&gt; true);<a name="line.2210"></a>
-<span class="sourceLineNo">2211</span>  }<a name="line.2211"></a>
-<span class="sourceLineNo">2212</span><a name="line.2212"></a>
-<span class="sourceLineNo">2213</span>  @Override<a name="line.2213"></a>
-<span class="sourceLineNo">2214</span>  public long getHFilesSize() {<a name="line.2214"></a>
-<span class="sourceLineNo">2215</span>    // Include only StoreFiles which are HFiles<a name="line.2215"></a>
-<span class="sourceLineNo">2216</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(),<a name="line.2216"></a>
-<span class="sourceLineNo">2217</span>      HStoreFile::isHFile);<a name="line.2217"></a>
-<span class="sourceLineNo">2218</span>  }<a name="line.2218"></a>
-<span class="sourceLineNo">2219</span><a name="line.2219"></a>
-<span class="sourceLineNo">2220</span>  private long getTotalUncompressedBytes(List&lt;HStoreFile&gt; files) {<a name="line.2220"></a>
-<span class="sourceLineNo">2221</span>    return files.stream()<a name="line.2221"></a>
-<span class="sourceLineNo">2222</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::getTotalUncompressedBytes))<a name="line.2222"></a>
-<span class="sourceLineNo">2223</span>      .sum();<a name="line.2223"></a>
-<span class="sourceLineNo">2224</span>  }<a name="line.2224"></a>
-<span class="sourceLineNo">2225</span><a name="line.2225"></a>
-<span class="sourceLineNo">2226</span>  private long getStorefilesSize(Collection&lt;HStoreFile&gt; files, Predicate&lt;HStoreFile&gt; predicate) {<a name="line.2226"></a>
-<span class="sourceLineNo">2227</span>    return files.stream().filter(predicate)<a name="line.2227"></a>
-<span class="sourceLineNo">2228</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::length)).sum();<a name="line.2228"></a>
-<span class="sourceLineNo">2229</span>  }<a name="line.2229"></a>
-<span class="sourceLineNo">2230</span><a name="line.2230"></a>
-<span class="sourceLineNo">2231</span>  private long getStorefileFieldSize(HStoreFile file, ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2231"></a>
-<span class="sourceLineNo">2232</span>    if (file == null) {<a name="line.2232"></a>
-<span class="sourceLineNo">2233</span>      return 0L;<a name="line.2233"></a>
-<span class="sourceLineNo">2234</span>    }<a name="line.2234"></a>
-<span class="sourceLineNo">2235</span>    StoreFileReader reader = file.getReader();<a name="line.2235"></a>
-<span class="sourceLineNo">2236</span>    if (reader == null) {<a name="line.2236"></a>
-<span class="sourceLineNo">2237</span>      return 0L;<a name="line.2237"></a>
-<span class="sourceLineNo">2238</span>    }<a name="line.2238"></a>
-<span class="sourceLineNo">2239</span>    return f.applyAsLong(reader);<a name="line.2239"></a>
-<span class="sourceLineNo">2240</span>  }<a name="line.2240"></a>
-<span class="sourceLineNo">2241</span><a name="line.2241"></a>
-<span class="sourceLineNo">2242</span>  private long getStorefilesFieldSize(ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2242"></a>
-<span class="sourceLineNo">2243</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2243"></a>
-<span class="sourceLineNo">2244</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, f)).sum();<a name="line.2244"></a>
-<span class="sourceLineNo">2245</span>  }<a name="line.2245"></a>
-<span class="sourceLineNo">2246</span><a name="line.2246"></a>
-<span class="sourceLineNo">2247</span>  @Override<a name="line.2247"></a>
-<span class="sourceLineNo">2248</span>  public long getStorefilesRootLevelIndexSize() {<a name="line.2248"></a>
-<span class="sourceLineNo">2249</span>    return getStorefilesFieldSize(StoreFileReader::indexSize);<a name="line.2249"></a>
+<span class="sourceLineNo">2191</span>  public OptionalLong getMinStoreFileAge() {<a name="line.2191"></a>
+<span class="sourceLineNo">2192</span>    return getStoreFileAgeStream().min();<a name="line.2192"></a>
+<span class="sourceLineNo">2193</span>  }<a name="line.2193"></a>
+<span class="sourceLineNo">2194</span><a name="line.2194"></a>
+<span class="sourceLineNo">2195</span>  @Override<a name="line.2195"></a>
+<span class="sourceLineNo">2196</span>  public OptionalDouble getAvgStoreFileAge() {<a name="line.2196"></a>
+<span class="sourceLineNo">2197</span>    return getStoreFileAgeStream().average();<a name="line.2197"></a>
+<span class="sourceLineNo">2198</span>  }<a name="line.2198"></a>
+<span class="sourceLineNo">2199</span><a name="line.2199"></a>
+<span class="sourceLineNo">2200</span>  @Override<a name="line.2200"></a>
+<span class="sourceLineNo">2201</span>  public long getNumReferenceFiles() {<a name="line.2201"></a>
+<span class="sourceLineNo">2202</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2202"></a>
+<span class="sourceLineNo">2203</span>        .filter(HStoreFile::isReference).count();<a name="line.2203"></a>
+<span class="sourceLineNo">2204</span>  }<a name="line.2204"></a>
+<span class="sourceLineNo">2205</span><a name="line.2205"></a>
+<span class="sourceLineNo">2206</span>  @Override<a name="line.2206"></a>
+<span class="sourceLineNo">2207</span>  public long getNumHFiles() {<a name="line.2207"></a>
+<span class="sourceLineNo">2208</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2208"></a>
+<span class="sourceLineNo">2209</span>        .filter(HStoreFile::isHFile).count();<a name="line.2209"></a>
+<span class="sourceLineNo">2210</span>  }<a name="line.2210"></a>
+<span class="sourceLineNo">2211</span><a name="line.2211"></a>
+<span class="sourceLineNo">2212</span>  @Override<a name="line.2212"></a>
+<span class="sourceLineNo">2213</span>  public long getStoreSizeUncompressed() {<a name="line.2213"></a>
+<span class="sourceLineNo">2214</span>    return this.totalUncompressedBytes.get();<a name="line.2214"></a>
+<span class="sourceLineNo">2215</span>  }<a name="line.2215"></a>
+<span class="sourceLineNo">2216</span><a name="line.2216"></a>
+<span class="sourceLineNo">2217</span>  @Override<a name="line.2217"></a>
+<span class="sourceLineNo">2218</span>  public long getStorefilesSize() {<a name="line.2218"></a>
+<span class="sourceLineNo">2219</span>    // Include all StoreFiles<a name="line.2219"></a>
+<span class="sourceLineNo">2220</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(), sf -&gt; true);<a name="line.2220"></a>
+<span class="sourceLineNo">2221</span>  }<a name="line.2221"></a>
+<span class="sourceLineNo">2222</span><a name="line.2222"></a>
+<span class="sourceLineNo">2223</span>  @Override<a name="line.2223"></a>
+<span class="sourceLineNo">2224</span>  public long getHFilesSize() {<a name="line.2224"></a>
+<span class="sourceLineNo">2225</span>    // Include only StoreFiles which are HFiles<a name="line.2225"></a>
+<span class="sourceLineNo">2226</span>    return getStorefilesSize(this.storeEngine.getStoreFileManager().getStorefiles(),<a name="line.2226"></a>
+<span class="sourceLineNo">2227</span>      HStoreFile::isHFile);<a name="line.2227"></a>
+<span class="sourceLineNo">2228</span>  }<a name="line.2228"></a>
+<span class="sourceLineNo">2229</span><a name="line.2229"></a>
+<span class="sourceLineNo">2230</span>  private long getTotalUncompressedBytes(List&lt;HStoreFile&gt; files) {<a name="line.2230"></a>
+<span class="sourceLineNo">2231</span>    return files.stream()<a name="line.2231"></a>
+<span class="sourceLineNo">2232</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::getTotalUncompressedBytes))<a name="line.2232"></a>
+<span class="sourceLineNo">2233</span>      .sum();<a name="line.2233"></a>
+<span class="sourceLineNo">2234</span>  }<a name="line.2234"></a>
+<span class="sourceLineNo">2235</span><a name="line.2235"></a>
+<span class="sourceLineNo">2236</span>  private long getStorefilesSize(Collection&lt;HStoreFile&gt; files, Predicate&lt;HStoreFile&gt; predicate) {<a name="line.2236"></a>
+<span class="sourceLineNo">2237</span>    return files.stream().filter(predicate)<a name="line.2237"></a>
+<span class="sourceLineNo">2238</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, StoreFileReader::length)).sum();<a name="line.2238"></a>
+<span class="sourceLineNo">2239</span>  }<a name="line.2239"></a>
+<span class="sourceLineNo">2240</span><a name="line.2240"></a>
+<span class="sourceLineNo">2241</span>  private long getStorefileFieldSize(HStoreFile file, ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2241"></a>
+<span class="sourceLineNo">2242</span>    if (file == null) {<a name="line.2242"></a>
+<span class="sourceLineNo">2243</span>      return 0L;<a name="line.2243"></a>
+<span class="sourceLineNo">2244</span>    }<a name="line.2244"></a>
+<span class="sourceLineNo">2245</span>    StoreFileReader reader = file.getReader();<a name="line.2245"></a>
+<span class="sourceLineNo">2246</span>    if (reader == null) {<a name="line.2246"></a>
+<span class="sourceLineNo">2247</span>      return 0L;<a name="line.2247"></a>
+<span class="sourceLineNo">2248</span>    }<a name="line.2248"></a>
+<span class="sourceLineNo">2249</span>    return f.applyAsLong(reader);<a name="line.2249"></a>
 <span class="sourceLineNo">2250</span>  }<a name="line.2250"></a>
 <span class="sourceLineNo">2251</span><a name="line.2251"></a>
-<span class="sourceLineNo">2252</span>  @Override<a name="line.2252"></a>
-<span class="sourceLineNo">2253</span>  public long getTotalStaticIndexSize() {<a name="line.2253"></a>
-<span class="sourceLineNo">2254</span>    return getStorefilesFieldSize(StoreFileReader::getUncompressedDataIndexSize);<a name="line.2254"></a>
+<span class="sourceLineNo">2252</span>  private long getStorefilesFieldSize(ToLongFunction&lt;StoreFileReader&gt; f) {<a name="line.2252"></a>
+<span class="sourceLineNo">2253</span>    return this.storeEngine.getStoreFileManager().getStorefiles().stream()<a name="line.2253"></a>
+<span class="sourceLineNo">2254</span>      .mapToLong(file -&gt; getStorefileFieldSize(file, f)).sum();<a name="line.2254"></a>
 <span class="sourceLineNo">2255</span>  }<a name="line.2255"></a>
 <span class="sourceLineNo">2256</span><a name="line.2256"></a>
 <span class="sourceLineNo">2257</span>  @Override<a name="line.2257"></a>
-<span class="sourceLineNo">2258</span>  public long getTotalStaticBloomSize() {<a name="line.2258"></a>
-<span class="sourceLineNo">2259</span>    return getStorefilesFieldSize(StoreFileReader::getTotalBloomSize);<a name="line.2259"></a>
+<span class="sourceLineNo">2258</span>  public long getStorefilesRootLevelIndexSize() {<a name="line.2258"></a>
+<span class="sourceLineNo">2259</span>    return getStorefilesFieldSize(StoreFileReader::indexSize);<a name="line.2259"></a>
 <span class="sourceLineNo">2260</span>  }<a name="line.2260"></a>
 <span class="sourceLineNo">2261</span><a name="line.2261"></a>
 <span class="sourceLineNo">2262</span>  @Override<a name="line.2262"></a>
-<span class="sourceLineNo">2263</span>  public MemStoreSize getMemStoreSize() {<a name="line.2263"></a>
-<span class="sourceLineNo">2264</span>    return this.memstore.size();<a name="line.2264"></a>
+<span class="sourceLineNo">2263</span>  public long getTotalStaticIndexSize() {<a name="line.2263"></a>
+<span class="sourceLineNo">2264</span>    return getStorefilesFieldSize(StoreFileReader::getUncompressedDataIndexSize);<a name="line.2264"></a>
 <span class="sourceLineNo">2265</span>  }<a name="line.2265"></a>
 <span class="sourceLineNo">2266</span><a name="line.2266"></a>
 <span class="sourceLineNo">2267</span>  @Override<a name="line.2267"></a>
-<span class="sourceLineNo">2268</span>  public int getCompactPriority() {<a name="line.2268"></a>
-<span class="sourceLineNo">2269</span>    int priority = this.storeEngine.getStoreFileManager().getStoreCompactionPriority();<a name="line.2269"></a>
-<span class="sourceLineNo">2270</span>    if (priority == PRIORITY_USER) {<a name="line.2270"></a>
-<span class="sourceLineNo">2271</span>      LOG.warn("Compaction priority is USER despite there being no user compaction");<a name="line.2271"></a>
-<span class="sourceLineNo">2272</span>    }<a name="line.2272"></a>
-<span class="sourceLineNo">2273</span>    return priority;<a name="line.2273"></a>
-<span class="sourceLineNo">2274</span>  }<a name="line.2274"></a>
-<span class="sourceLineNo">2275</span><a name="line.2275"></a>
-<span class="sourceLineNo">2276</span>  public boolean throttleCompaction(long compactionSize) {<a name="line.2276"></a>
-<span class="sourceLineNo">2277</span>    return storeEngine.getCompactionPolicy().throttleCompaction(compactionSize);<a name="line.2277"></a>
-<span class="sourceLineNo">2278</span>  }<a name="line.2278"></a>
-<span class="sourceLineNo">2279</span><a name="line.2279"></a>
-<span class="sourceLineNo">2280</span>  public HRegion getHRegion() {<a name="line.2280"></a>
-<span class="sourceLineNo">2281</span>    return this.region;<a name="line.2281"></a>
-<span class="sourceLineNo">2282</span>  }<a name="line.2282"></a>
-<span class="sourceLineNo">2283</span><a name="line.2283"></a>
-<span class="sourceLineNo">2284</span>  public RegionCoprocessorHost getCoprocessorHost() {<a name="line.2284"></a>
-<span class="sourceLineNo">2285</span>    return this.region.getCoprocessorHost();<a name="line.2285"></a>
-<span class="sourceLineNo">2286</span>  }<a name="line.2286"></a>
-<span class="sourceLineNo">2287</span><a name="line.2287"></a>
-<span class="sourceLineNo">2288</span>  @Override<a name="line.2288"></a>
-<span class="sourceLineNo">2289</span>  public RegionInfo getRegionInfo() {<a name="line.2289"></a>
-<span class="sourceLineNo">2290</span>    return this.fs.getRegionInfo();<a name="line.2290"></a>
-<span class="sourceLineNo">2291</span>  }<a name="line.2291"></a>
-<span class="sourceLineNo">2292</span><a name="line.2292"></a>
-<span class="sourceLineNo">2293</span>  @Override<a name="line.2293"></a>
-<span class="sourceLineNo">2294</span>  public boolean areWritesEnabled() {<a name="line.2294"></a>
-<span class="sourceLineNo">2295</span>    return this.region.areWritesEnabled();<a name="line.2295"></a>
+<span class="sourceLineNo">2268</span>  public long getTotalStaticBloomSize() {<a name="line.2268"></a>
+<span class="sourceLineNo">2269</span>    return getStorefilesFieldSize(StoreFileReader::getTotalBloomSize);<a name="line.2269"></a>
+<span class="sourceLineNo">2270</span>  }<a name="line.2270"></a>
+<span class="sourceLineNo">2271</span><a name="line.2271"></a>
+<span class="sourceLineNo">2272</span>  @Override<a name="line.2272"></a>
+<span class="sourceLineNo">2273</span>  public MemStoreSize getMemStoreSize() {<a name="line.2273"></a>
+<span class="sourceLineNo">2274</span>    return this.memstore.size();<a name="line.2274"></a>
+<span class="sourceLineNo">2275</span>  }<a name="line.2275"></a>
+<span class="sourceLineNo">2276</span><a name="line.2276"></a>
+<span class="sourceLineNo">2277</span>  @Override<a name="line.2277"></a>
+<span class="sourceLineNo">2278</span>  public int getCompactPriority() {<a name="line.2278"></a>
+<span class="sourceLineNo">2279</span>    int priority = this.storeEngine.getStoreFileManager().getStoreCompactionPriority();<a name="line.2279"></a>
+<span class="sourceLineNo">2280</span>    if (priority == PRIORITY_USER) {<a name="line.2280"></a>
+<span class="sourceLineNo">2281</span>      LOG.warn("Compaction priority is USER despite there being no user compaction");<a name="line.2281"></a>
+<span class="sourceLineNo">2282</span>    }<a name="line.2282"></a>
+<span class="sourceLineNo">2283</span>    return priority;<a name="line.2283"></a>
+<span class="sourceLineNo">2284</span>  }<a name="line.2284"></a>
+<span class="sourceLineNo">2285</span><a name="line.2285"></a>
+<span class="sourceLineNo">2286</span>  public boolean throttleCompaction(long compactionSize) {<a name="line.2286"></a>
+<span class="sourceLineNo">2287</span>    return storeEngine.getCompactionPolicy().throttleCompaction(compactionSize);<a name="line.2287"></a>
+<span class="sourceLineNo">2288</span>  }<a name="line.2288"></a>
+<span class="sourceLineNo">2289</span><a name="line.2289"></a>
+<span class="sourceLineNo">2290</span>  public HRegion getHRegion() {<a name="line.2290"></a>
+<span class="sourceLineNo">2291</span>    return this.region;<a name="line.2291"></a>
+<span class="sourceLineNo">2292</span>  }<a name="line.2292"></a>
+<span class="sourceLineNo">2293</span><a name="line.2293"></a>
+<span class="sourceLineNo">2294</span>  public RegionCoprocessorHost getCoprocessorHost() {<a name="line.2294"></a>
+<span class="sourceLineNo">2295</span>    return this.region.getCoprocessorHost();<a name="line.2295"></a>
 <span class="sourceLineNo">2296</span>  }<a name="line.2296"></a>
 <span class="sourceLineNo">2297</span><a name="line.2297"></a>
 <span class="sourceLineNo">2298</span>  @Override<a name="line.2298"></a>
-<span class="sourceLineNo">2299</span>  public long getSmallestReadPoint() {<a name="line.2299"></a>
-<span class="sourceLineNo">2300</span>    return this.region.getSmallestReadPoint();<a name="line.2300"></a>
+<span class="sourceLineNo">2299</span>  public RegionInfo getRegionInfo() {<a name="line.2299"></a>
+<span class="sourceLineNo">2300</span>    return this.fs.getRegionInfo();<a name="line.2300"></a>
 <span class="sourceLineNo">2301</span>  }<a name="line.2301"></a>
 <span class="sourceLineNo">2302</span><a name="line.2302"></a>
-<span class="sourceLineNo">2303</span>  /**<a name="line.2303"></a>
-<span class="sourceLineNo">2304</span>   * Adds or replaces the specified KeyValues.<a name="line.2304"></a>
-<span class="sourceLineNo">2305</span>   * &lt;p&gt;<a name="line.2305"></a>
-<span class="sourceLineNo">2306</span>   * For each KeyValue specified, if a cell with the same row, family, and qualifier exists in<a name="line.2306"></a>
-<span class="sourceLineNo">2307</span>   * MemStore, it will be replaced. Otherwise, it will just be inserted to MemStore.<a name="line.2307"></a>
-<span class="sourceLineNo">2308</span>   * &lt;p&gt;<a name="line.2308"></a>
-<span class="sourceLineNo">2309</span>   * This operation is atomic on each KeyValue (row/family/qualifier) but not necessarily atomic<a name="line.2309"></a>
-<span class="sourceLineNo">2310</span>   * across all of them.<a name="line.2310"></a>
-<span class="sourceLineNo">2311</span>   * @param readpoint readpoint below which we can safely remove duplicate KVs<a name="line.2311"></a>
-<span class="sourceLineNo">2312</span>   * @throws IOException<a name="line.2312"></a>
-<span class="sourceLineNo">2313</span>   */<a name="line.2313"></a>
-<span class="sourceLineNo">2314</span>  public void upsert(Iterable&lt;Cell&gt; cells, long readpoint, MemStoreSizing memstoreSizing)<a name="line.2314"></a>
-<span class="sourceLineNo">2315</span>      throws IOException {<a name="line.2315"></a>
-<span class="sourceLineNo">2316</span>    this.lock.readLock().lock();<a name="line.2316"></a>
-<span class="sourceLineNo">2317</span>    try {<a name="line.2317"></a>
-<span class="sourceLineNo">2318</span>      this.memstore.upsert(cells, readpoint, memstoreSizing);<a name="line.2318"></a>
-<span class="sourceLineNo">2319</span>    } finally {<a name="line.2319"></a>
-<span class="sourceLineNo">2320</span>      this.lock.readLock().unlock();<a name="line.2320"></a>
-<span class="sourceLineNo">2321</span>    }<a name="line.2321"></a>
-<span class="sourceLineNo">2322</span>  }<a name="line.2322"></a>
-<span class="sourceLineNo">2323</span><a name="line.2323"></a>
-<span class="sourceLineNo">2324</span>  public StoreFlushContext createFlushContext(long cacheFlushId, FlushLifeCycleTracker tracker) {<a name="line.2324"></a>
-<span class="sourceLineNo">2325</span>    return new StoreFlusherImpl(cacheFlushId, tracker);<a name="line.2325"></a>
-<span class="sourceLineNo">2326</span>  }<a name="line.2326"></a>
-<span class="sourceLineNo">2327</span><a name="line.2327"></a>
-<span class="sourceLineNo">2328</span>  private final class StoreFlusherImpl implements StoreFlushContext {<a name="line.2328"></a>
-<span class="sourceLineNo">2329</span><a name="line.2329"></a>
-<span class="sourceLineNo">2330</span>    private final FlushLifeCycleTracker tracker;<a name="line.2330"></a>
-<span class="sourceLineNo">2331</span>    private final long cacheFlushSeqNum;<a name="line.2331"></a>
-<span class="sourceLineNo">2332</span>    private MemStoreSnapshot snapshot;<a name="line.2332"></a>
-<span class="sourceLineNo">2333</span>    private List&lt;Path&gt; tempFiles;<a name="line.2333"></a>
-<span class="sourceLineNo">2334</span>    private List&lt;Path&gt; committedFiles;<a name="line.2334"></a>
-<span class="sourceLineNo">2335</span>    private long cacheFlushCount;<a name="line.2335"></a>
-<span class="sourceLineNo">2336</span>    private long cacheFlushSize;<a name="line.2336"></a>
-<span class="sourceLineNo">2337</span>    private long outputFileSize;<a name="line.2337"></a>
-<span class="sourceLineNo">2338</span><a name="line.2338"></a>
-<span class="sourceLineNo">2339</span>    private StoreFlusherImpl(long cacheFlushSeqNum, FlushLifeCycleTracker tracker) {<a name="line.2339"></a>
-<span class="sourceLineNo">2340</span>      this.cacheFlushSeqNum = cacheFlushSeqNum;<a name="line.2340"></a>
-<span class="sourceLineNo">2341</span>      this.tracker = tracker;<a name="line.2341"></a>
-<span class="sourceLineNo">2342</span>    }<a name="line.2342"></a>
-<span class="sourceLineNo">2343</span><a name="line.2343"></a>
-<span class="sourceLineNo">2344</span>    /**<a name="line.2344"></a>
-<span class="sourceLineNo">2345</span>     * This is not thread safe. The caller should have a lock on the region or the store.<a name="line.2345"></a>
-<span class="sourceLineNo">2346</span>     * If necessary, the lock can be added with the patch provided in HBASE-10087<a name="line.2346"></a>
-<span class="sourceLineNo">2347</span>     */<a name="line.2347"></a>
-<span class="sourceLineNo">2348</span>    @Override<a name="line.2348"></a>
-<span class="sourceLineNo">2349</span>    public MemStoreSize prepare() {<a name="line.2349"></a>
-<span class="sourceLineNo">2350</span>      // passing the current sequence number of the wal - to allow bookkeeping in the memstore<a name="line.2350"></a>
-<span class="sourceLineNo">2351</span>      this.snapshot = memstore.snapshot();<a name="line.2351"></a>
-<span class="sourceLineNo">2352</span>      this.cacheFlushCount = snapshot.getCellsCount();<a name="line.2352"></a>
-<span class="sourceLineNo">2353</span>      this.cacheFlushSize = snapshot.getDataSize();<a name="line.2353"></a>
-<span class="sourceLineNo">2354</span>      committedFiles = new ArrayList&lt;&gt;(1);<a name="line.2354"></a>
-<span class="sourceLineNo">2355</span>      return snapshot.getMemStoreSize();<a name="line.2355"></a>
-<span class="sourceLineNo">2356</span>    }<a name="line.2356"></a>
-<span class="sourceLineNo">2357</span><a name="line.2357"></a>
+<span class="sourceLineNo">2303</span>  @Override<a name="line.2303"></a>
+<span class="sourceLineNo">2304</span>  public boolean areWritesEnabled() {<a name="line.2304"></a>
+<span class="sourceLineNo">2305</span>    return this.region.areWritesEnabled();<a name="line.2305"></a>
+<span class="sourceLineNo">2306</span>  }<a name="line.2306"></a>
+<span class="sourceLineNo">2307</span><a name="line.2307"></a>
+<span class="sourceLineNo">2308</span>  @Override<a name="line.2308"></a>
+<span class="sourceLineNo">2309</span>  public long getSmallestReadPoint() {<a name="line.2309"></a>
+<span class="sourceLineNo">2310</span>    return this.region.getSmallestReadPoint();<a name="line.2310"></a>
+<span class="sourceLineNo">2311</span>  }<a name="line.2311"></a>
+<span class="sourceLineNo">2312</span><a name="line.2312"></a>
+<span class="sourceLineNo">2313</span>  /**<a name="line.2313"></a>
+<span class="sourceLineNo">2314</span>   * Adds or replaces the specified KeyValues.<a name="line.2314"></a>
+<span class="sourceLineNo">2315</span>   * &lt;p&gt;<a name="line.2315"></a>
+<span class="sourceLineNo">2316</span>   * For each KeyValue specified, if a cell with the same row, family, and qualifier exists in<a name="line.2316"></a>
+<span class="sourceLineNo">2317</span>   * MemStore, it will be replaced. Otherwise, it will just be inserted to MemStore.<a name="line.2317"></a>
+<span class="sourceLineNo">2318</span>   * &lt;p&gt;<a name="line.2318"></a>
+<span class="sourceLineNo">2319</span>   * This operation is atomic on each KeyValue (row/family/qualifier) but not necessarily atomic<a name="line.2319"></a>
+<span class="sourceLineNo">2320</span>   * across all of them.<a name="line.2320"></a>
+<span class="sourceLineNo">2321</span>   * @param readpoint readpoint below which we can safely remove duplicate KVs<a name="line.2321"></a>
+<span class="sourceLineNo">2322</span>   * @throws IOException<a name="line.2322"></a>
+<span class="sourceLineNo">2323</span>   */<a name="line.2323"></a>
+<span class="sourceLineNo">2324</span>  public void upsert(Iterable&lt;Cell&gt; cells, long readpoint, MemStoreSizing memstoreSizing)<a name="line.2324"></a>
+<span class="sourceLineNo">2325</span>      throws IOException {<a name="line.2325"></a>
+<span class="sourceLineNo">2326</span>    this.lock.readLock().lock();<a name="line.2326"></a>
+<span class="sourceLineNo">2327</span>    try {<a name="line.2327"></a>
+<span class="sourceLineNo">2328</span>      this.memstore.upsert(cells, readpoint, memstoreSizing);<a name="line.2328"></a>
+<span class="sourceLineNo">2329</span>    } finally {<a name="line.2329"></a>
+<span class="sourceLineNo">2330</span>      this.lock.readLock().unlock();<a name="line.2330"></a>
+<span class="sourceLineNo">2331</span>    }<a name="line.2331"></a>
+<span class="sourceLineNo">2332</span>  }<a name="line.2332"></a>
+<span class="sourceLineNo">2333</span><a name="line.2333"></a>
+<span class="sourceLineNo">2334</span>  public StoreFlushContext createFlushContext(long cacheFlushId, FlushLifeCycleTracker tracker) {<a name="line.2334"></a>
+<span class="sourceLineNo">2335</span>    return new StoreFlusherImpl(cacheFlushId, tracker);<a name="line.2335"></a>
+<span class="sourceLineNo">2336</span>  }<a name="line.2336"></a>
+<span class="sourceLineNo">2337</span><a name="line.2337"></a>
+<span class="sourceLineNo">2338</span>  private final class StoreFlusherImpl implements StoreFlushContext {<a name="line.2338"></a>
+<span class="sourceLineNo">2339</span><a name="line.2339"></a>
+<span class="sourceLineNo">2340</span>    private final FlushLifeCycleTracker tracker;<a name="line.2340"></a>
+<span class="sourceLineNo">2341</span>    private final long cacheFlushSeqNum;<a name="line.2341"></a>
+<span class="sourceLineNo">2342</span>    private MemStoreSnapshot snapshot;<a name="line.2342"></a>
+<span class="sourceLineNo">2343</span>    private List&lt;Path&gt; tempFiles;<a name="line.2343"></a>
+<span class="sourceLineNo">2344</span>    private List&lt;Path&gt; committedFiles;<a name="line.2344"></a>
+<span class="sourceLineNo">2345</span>    private long cacheFlushCount;<a name="line.2345"></a>
+<span class="sourceLineNo">2346</span>    private long cacheFlushSize;<a name="line.2346"></a>
+<span class="sourceLineNo">2347</span>    private long outputFileSize;<a name="line.2347"></a>
+<span class="sourceLineNo">2348</span><a name="line.2348"></a>
+<span class="sourceLineNo">2349</span>    private StoreFlusherImpl(long cacheFlushSeqNum, FlushLifeCycleTracker tracker) {<a name="line.2349"></a>
+<span class="sourceLineNo">2350</span>      this.cacheFlushSeqNum = cacheFlushSeqNum;<a name="line.2350"></a>
+<span class="sourceLineNo">2351</span>      this.tracker = tracker;<a name="line.2351"></a>
+<span class="sourceLineNo">2352</span>    }<a name="line.2352"></a>
+<span class="sourceLineNo">2353</span><a name="line.2353"></a>
+<span class="sourceLineNo">2354</span>    /**<a name="line.2354"></a>
+<span class="sourceLineNo">2355</span>     * This is not thread safe. The caller should have a lock on the region or the store.<a name="line.2355"></a>
+<span class="sourceLineNo">2356</span>     * If necessary, the lock can be added with the patch provided in HBASE-10087<a name="line.2356"></a>
+<span class="sourceLineNo">2357</span>     */<a name="line.2357"></a>
 <span class="sourceLineNo">2358</span>    @Override<a name="line.2358"></a>
-<span class="sourceLineNo">2359</span>    public void flushCache(MonitoredTask status) throws IOException {<a name="line.2359"></a>
-<span class="sourceLineNo">2360</span>      RegionServerServices rsService = region.getRegionServerServices();<a name="line.2360"></a>
-<span class="sourceLineNo">2361</span>      ThroughputController throughputController =<a name="line.2361"></a>
-<span class="sourceLineNo">2362</span>          rsService == null ? null : rsService.getFlushThroughputController();<a name="line.2362"></a>
-<span class="sourceLineNo">2363</span>      tempFiles =<a name="line.2363"></a>
-<span class="sourceLineNo">2364</span>          HStore.this.flushCache(cacheFlushSeqNum, snapshot, status, throughputController, tracker);<a name="line.2364"></a>
-<span class="sourceLineNo">2365</span>    }<a name="line.2365"></a>
-<span class="sourceLineNo">2366</span><a name="line.2366"></a>
-<span class="sourceLineNo">2367</span>    @Override<a name="line.2367"></a>
-<span class="sourceLineNo">2368</span>    public boolean commit(MonitoredTask status) throws IOException {<a name="line.2368"></a>
-<span class="sourceLineNo">2369</span>      if (CollectionUtils.isEmpty(this.tempFiles)) {<a name="line.2369"></a>
-<span class="sourceLineNo">2370</span>        return false;<a name="line.2370"></a>
-<span class="sourceLineNo">2371</span>      }<a name="line.2371"></a>
-<span class="sourceLineNo">2372</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(this.tempFiles.size());<a name="line.2372"></a>
-<span class="sourceLineNo">2373</span>      for (Path storeFilePath : tempFiles) {<a name="line.2373"></a>
-<span class="sourceLineNo">2374</span>        try {<a name="line.2374"></a>
-<span class="sourceLineNo">2375</span>          HStoreFile sf = HStore.this.commitFile(storeFilePath, cacheFlushSeqNum, status);<a name="line.2375"></a>
-<span class="sourceLineNo">2376</span>          outputFileSize += sf.getReader().length();<a name="line.2376"></a>
-<span class="sourceLineNo">2377</span>          storeFiles.add(sf);<a name="line.2377"></a>
-<span class="sourceLineNo">2378</span>        } catch (IOException ex) {<a name="line.2378"></a>
-<span class="sourceLineNo">2379</span>          LOG.error("Failed to commit store file {}", storeFilePath, ex);<a name="line.2379"></a>
-<span class="sourceLineNo">2380</span>          // Try to delete the files we have committed before.<a name="line.2380"></a>
-<span class="sourceLineNo">2381</span>          for (HStoreFile sf : storeFiles) {<a name="line.2381"></a>
-<span class="sourceLineNo">2382</span>            Path pathToDelete = sf.getPath();<a name="line.2382"></a>
-<span class="sourceLineNo">2383</span>            try {<a name="line.2383"></a>
-<span class="sourceLineNo">2384</span>              sf.deleteStoreFile();<a name="line.2384"></a>
-<span class="sourceLineNo">2385</span>            } catch (IOException deleteEx) {<a name="line.2385"></a>
-<span class="sourceLineNo">2386</span>              LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, "<a name="line.2386"></a>
-<span class="sourceLineNo">2387</span>                  + "halting {}", pathToDelete, ex);<a name="line.2387"></a>
-<span class="sourceLineNo">2388</span>              Runtime.getRuntime().halt(1);<a name="line.2388"></a>
-<span class="sourceLineNo">2389</span>            }<a name="line.2389"></a>
-<span class="sourceLineNo">2390</span>          }<a name="line.2390"></a>
-<span class="sourceLineNo">2391</span>          throw new IOException("Failed to commit the flush", ex);<a name="line.2391"></a>
-<span class="sourceLineNo">2392</span>        }<a name="line.2392"></a>
-<span class="sourceLineNo">2393</span>      }<a name="line.2393"></a>
-<span class="sourceLineNo">2394</span><a name="line.2394"></a>
-<span class="sourceLineNo">2395</span>      for (HStoreFile sf : storeFiles) {<a name="line.2395"></a>
-<span class="sourceLineNo">2396</span>        if (HStore.this.getCoprocessorHost() != null) {<a name="line.2396"></a>
-<span class="sourceLineNo">2397</span>          HStore.this.getCoprocessorHost().postFlush(HStore.this, sf, tracker);<a name="line.2397"></a>
-<span class="sourceLineNo">2398</span>        }<a name="line.2398"></a>
-<span class="sourceLineNo">2399</span>        committedFiles.add(sf.getPath());<a name="line.2399"></a>
-<span class="sourceLineNo">2400</span>      }<a name="line.2400"></a>
-<span class="sourceLineNo">2401</span><a name="line.2401"></a>
-<span class="sourceLineNo">2402</span>      HStore.this.flushedCellsCount.addAndGet(cacheFlushCount);<a name="line.2402"></a>
-<span class="sourceLineNo">2403</span>      HStore.this.flushedCellsSize.addAndGet(cacheFlushSize);<a name="line.2403"></a>
-<span class="sourceLineNo">2404</span>      HStore.this.flushedOutputFileSize.addAndGet(outputFileSize);<a name="line.2404"></a>
-<span class="sourceLineNo">2405</span><a name="line.2405"></a>
-<span class="sourceLineNo">2406</span>      // Add new file to store files.  Clear snapshot too while we have the Store write lock.<a name="line.2406"></a>
-<span class="sourceLineNo">2407</span>      return HStore.this.updateStorefiles(storeFiles, snapshot.getId());<a name="line.2407"></a>
-<span class="sourceLineNo">2408</span>    }<a name="line.2408"></a>
-<span class="sourceLineNo">2409</span><a name="line.2409"></a>
-<span class="sourceLineNo">2410</span>    @Override<a name="line.2410"></a>
-<span class="sourceLineNo">2411</span>    public long getOutputFileSize() {<a name="line.2411"></a>
-<span class="sourceLineNo">2412</span>      return outputFileSize;<a name="line.2412"></a>
-<span class="sourceLineNo">2413</span>    }<a name="line.2413"></a>
-<span class="sourceLineNo">2414</span><a name="line.2414"></a>
-<span class="sourceLineNo">2415</span>    @Override<a name="line.2415"></a>
-<span class="sourceLineNo">2416</span>    public List&lt;Path&gt; getCommittedFiles() {<a name="line.2416"></a>
-<span class="sourceLineNo">2417</span>      return committedFiles;<a name="line.2417"></a>
+<span class="sourceLineNo">2359</span>    public MemStoreSize prepare() {<a name="line.2359"></a>
+<span class="sourceLineNo">2360</span>      // passing the current sequence number of the wal - to allow bookkeeping in the memstore<a name="line.2360"></a>
+<span class="sourceLineNo">2361</span>      this.snapshot = memstore.snapshot();<a name="line.2361"></a>
+<span class="sourceLineNo">2362</span>      this.cacheFlushCount = snapshot.getCellsCount();<a name="line.2362"></a>
+<span class="sourceLineNo">2363</span>      this.cacheFlushSize = snapshot.getDataSize();<a name="line.2363"></a>
+<span class="sourceLineNo">2364</span>      committedFiles = new ArrayList&lt;&gt;(1);<a name="line.2364"></a>
+<span class="sourceLineNo">2365</span>      return snapshot.getMemStoreSize();<a name="line.2365"></a>
+<span class="sourceLineNo">2366</span>    }<a name="line.2366"></a>
+<span class="sourceLineNo">2367</span><a name="line.2367"></a>
+<span class="sourceLineNo">2368</span>    @Override<a name="line.2368"></a>
+<span class="sourceLineNo">2369</span>    public void flushCache(MonitoredTask status) throws IOException {<a name="line.2369"></a>
+<span class="sourceLineNo">2370</span>      RegionServerServices rsService = region.getRegionServerServices();<a name="line.2370"></a>
+<span class="sourceLineNo">2371</span>      ThroughputController throughputController =<a name="line.2371"></a>
+<span class="sourceLineNo">2372</span>          rsService == null ? null : rsService.getFlushThroughputController();<a name="line.2372"></a>
+<span class="sourceLineNo">2373</span>      tempFiles =<a name="line.2373"></a>
+<span class="sourceLineNo">2374</span>          HStore.this.flushCache(cacheFlushSeqNum, snapshot, status, throughputController, tracker);<a name="line.2374"></a>
+<span class="sourceLineNo">2375</span>    }<a name="line.2375"></a>
+<span class="sourceLineNo">2376</span><a name="line.2376"></a>
+<span class="sourceLineNo">2377</span>    @Override<a name="line.2377"></a>
+<span class="sourceLineNo">2378</span>    public boolean commit(MonitoredTask status) throws IOException {<a name="line.2378"></a>
+<span class="sourceLineNo">2379</span>      if (CollectionUtils.isEmpty(this.tempFiles)) {<a name="line.2379"></a>
+<span class="sourceLineNo">2380</span>        return false;<a name="line.2380"></a>
+<span class="sourceLineNo">2381</span>      }<a name="line.2381"></a>
+<span class="sourceLineNo">2382</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(this.tempFiles.size());<a name="line.2382"></a>
+<span class="sourceLineNo">2383</span>      for (Path storeFilePath : tempFiles) {<a name="line.2383"></a>
+<span class="sourceLineNo">2384</span>        try {<a name="line.2384"></a>
+<span class="sourceLineNo">2385</span>          HStoreFile sf = HStore.this.commitFile(storeFilePath, cacheFlushSeqNum, status);<a name="line.2385"></a>
+<span class="sourceLineNo">2386</span>          outputFileSize += sf.getReader().length();<a name="line.2386"></a>
+<span class="sourceLineNo">2387</span>          storeFiles.add(sf);<a name="line.2387"></a>
+<span class="sourceLineNo">2388</span>        } catch (IOException ex) {<a name="line.2388"></a>
+<span class="sourceLineNo">2389</span>          LOG.error("Failed to commit store file {}", storeFilePath, ex);<a name="line.2389"></a>
+<span class="sourceLineNo">2390</span>          // Try to delete the files we have committed before.<a name="line.2390"></a>
+<span class="sourceLineNo">2391</span>          for (HStoreFile sf : storeFiles) {<a name="line.2391"></a>
+<span class="sourceLineNo">2392</span>            Path pathToDelete = sf.getPath();<a name="line.2392"></a>
+<span class="sourceLineNo">2393</span>            try {<a name="line.2393"></a>
+<span class="sourceLineNo">2394</span>              sf.deleteStoreFile();<a name="line.2394"></a>
+<span class="sourceLineNo">2395</span>            } catch (IOException deleteEx) {<a name="line.2395"></a>
+<span class="sourceLineNo">2396</span>              LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, "<a name="line.2396"></a>
+<span class="sourceLineNo">2397</span>                  + "halting {}", pathToDelete, ex);<a name="line.2397"></a>
+<span class="sourceLineNo">2398</span>              Runtime.getRuntime().halt(1);<a name="line.2398"></a>
+<span class="sourceLineNo">2399</span>            }<a name="line.2399"></a>
+<span class="sourceLineNo">2400</span>          }<a name="line.2400"></a>
+<span class="sourceLineNo">2401</span>          throw new IOException("Failed to commit the flush", ex);<a name="line.2401"></a>
+<span class="sourceLineNo">2402</span>        }<a name="line.2402"></a>
+<span class="sourceLineNo">2403</span>      }<a name="line.2403"></a>
+<span class="sourceLineNo">2404</span><a name="line.2404"></a>
+<span class="sourceLineNo">2405</span>      for (HStoreFile sf : storeFiles) {<a name="line.2405"></a>
+<span class="sourceLineNo">2406</span>        if (HStore.this.getCoprocessorHost() != null) {<a name="line.2406"></a>
+<span class="sourceLineNo">2407</span>          HStore.this.getCoprocessorHost().postFlush(HStore.this, sf, tracker);<a name="line.2407"></a>
+<span class="sourceLineNo">2408</span>        }<a name="line.2408"></a>
+<span class="sourceLineNo">2409</span>        committedFiles.add(sf.getPath());<a name="line.2409"></a>
+<span class="sourceLineNo">2410</span>      }<a name="line.2410"></a>
+<span class="sourceLineNo">2411</span><a name="line.2411"></a>
+<span class="sourceLineNo">2412</span>      HStore.this.flushedCellsCount.addAndGet(cacheFlushCount);<a name="line.2412"></a>
+<span class="sourceLineNo">2413</span>      HStore.this.flushedCellsSize.addAndGet(cacheFlushSize);<a name="line.2413"></a>
+<span class="sourceLineNo">2414</span>      HStore.this.flushedOutputFileSize.addAndGet(outputFileSize);<a name="line.2414"></a>
+<span class="sourceLineNo">2415</span><a name="line.2415"></a>
+<span class="sourceLineNo">2416</span>      // Add new file to store files.  Clear snapshot too while we have the Store write lock.<a name="line.2416"></a>
+<span class="sourceLineNo">2417</span>      return HStore.this.updateStorefiles(storeFiles, snapshot.getId());<a name="line.2417"></a>
 <span class="sourceLineNo">2418</span>    }<a name="line.2418"></a>
 <span class="sourceLineNo">2419</span><a name="line.2419"></a>
-<span class="sourceLineNo">2420</span>    /**<a name="line.2420"></a>
-<span class="sourceLineNo">2421</span>     * Similar to commit, but called in secondary region replicas for replaying the<a name="line.2421"></a>
-<span class="sourceLineNo">2422</span>     * flush cache from primary region. Adds the new files to the store, and drops the<a name="line.2422"></a>
-<span class="sourceLineNo">2423</span>     * snapshot depending on dropMemstoreSnapshot argument.<a name="line.2423"></a>
-<span class="sourceLineNo">2424</span>     * @param fileNames names of the flushed files<a name="line.2424"></a>
-<span class="sourceLineNo">2425</span>     * @param dropMemstoreSnapshot whether to drop the prepared memstore snapshot<a name="line.2425"></a>
-<span class="sourceLineNo">2426</span>     * @throws IOException<a name="line.2426"></a>
-<span class="sourceLineNo">2427</span>     */<a name="line.2427"></a>
-<span class="sourceLineNo">2428</span>    @Override<a name="line.2428"></a>
-<span class="sourceLineNo">2429</span>    public void replayFlush(List&lt;String&gt; fileNames, boolean dropMemstoreSnapshot)<a name="line.2429"></a>
-<span class="sourceLineNo">2430</span>        throws IOException {<a name="line.2430"></a>
-<span class="sourceLineNo">2431</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(fileNames.size());<a name="line.2431"></a>
-<span class="sourceLineNo">2432</span>      for (String file : fileNames) {<a name="line.2432"></a>
-<span class="sourceLineNo">2433</span>        // open the file as a store file (hfile link, etc)<a name="line.2433"></a>
-<span class="sourceLineNo">2434</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file);<a name="line.2434"></a>
-<span class="sourceLineNo">2435</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.2435"></a>
-<span class="sourceLineNo">2436</span>        storeFiles.add(storeFile);<a name="line.2436"></a>
-<span class="sourceLineNo">2437</span>        HStore.this.storeSize.addAndGet(storeFile.getReader().length());<a name="line.2437"></a>
-<span class="sourceLineNo">2438</span>        HStore.this.totalUncompressedBytes<a name="line.2438"></a>
-<span class="sourceLineNo">2439</span>            .addAndGet(storeFile.getReader().getTotalUncompressedBytes());<a name="line.2439"></a>
-<span class="sourceLineNo">2440</span>        if (LOG.isInfoEnabled()) {<a name="line.2440"></a>
-<span class="sourceLineNo">2441</span>          LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() +<a name="line.2441"></a>
-<span class="sourceLineNo">2442</span>            " added " + storeFile + ", entries=" + storeFile.getReader().getEntries() +<a name="line.2442"></a>
-<span class="sourceLineNo">2443</span>              ", sequenceid=" + storeFile.getReader().getSequenceID() + ", filesize="<a name="line.2443"></a>
-<span class="sourceLineNo">2444</span>              + TraditionalBinaryPrefix.long2String(storeFile.getReader().length(), "", 1));<a name="line.2444"></a>
-<span class="sourceLineNo">2445</span>        }<a name="line.2445"></a>
-<span class="sourceLineNo">2446</span>      }<a name="line.2446"></a>
-<span class="sourceLineNo">2447</span><a name="line.2447"></a>
-<span class="sourceLineNo">2448</span>      long snapshotId = -1; // -1 means do not drop<a name="line.2448"></a>
-<span class="sourceLineNo">2449</span>      if (dropMemstoreSnapshot &amp;&amp; snapshot != null) {<a name="line.2449"></a>
-<span class="sourceLineNo">2450</span>        snapshotId = snapshot.getId();<a name="line.2450"></a>
-<span class="sourceLineNo">2451</span>        snapshot.close();<a name="line.2451"></a>
-<span class="sourceLineNo">2452</span>      }<a name="line.2452"></a>
-<span class="sourceLineNo">2453</span>      HStore.this.updateStorefiles(storeFiles, snapshotId);<a name="line.2453"></a>
-<span class="sourceLineNo">2454</span>    }<a name="line.2454"></a>
-<span class="sourceLineNo">2455</span><a name="line.2455"></a>
-<span class="sourceLineNo">2456</span>    /**<a name="line.2456"></a>
-<span class="sourceLineNo">2457</span>     * Abort the snapshot preparation. Drops the snapshot if any.<a name="line.2457"></a>
-<span class="sourceLineNo">2458</span>     * @throws IOException<a name="line.2458"></a>
-<span class="sourceLineNo">2459</span>     */<a name="line.2459"></a>
-<span class="sourceLineNo">2460</span>    @Override<a name="line.2460"></a>
-<span class="sourceLineNo">2461</span>    public void abort() throws IOException {<a name="line.2461"></a>
-<span class="sourceLineNo">2462</span>      if (snapshot != null) {<a name="line.2462"></a>
-<span class="sourceLineNo">2463</span>        //We need to close the snapshot when aborting, otherwise, the segment scanner<a name="line.2463"></a>
-<span class="sourceLineNo">2464</span>        //won't be closed. If we are using MSLAB, the chunk referenced by those scanners<a name="line.2464"></a>
-<span class="sourceLineNo">2465</span>        //can't be released, thus memory leak<a name="line.2465"></a>
-<span class="sourceLineNo">2466</span>        snapshot.close();<a name="line.2466"></a>
-<span class="sourceLineNo">2467</span>        HStore.this.updateStorefiles(Collections.emptyList(), snapshot.getId());<a name="line.2467"></a>
-<span class="sourceLineNo">2468</span>      }<a name="line.2468"></a>
-<span class="sourceLineNo">2469</span>    }<a name="line.2469"></a>
-<span class="sourceLineNo">2470</span>  }<a name="line.2470"></a>
-<span class="sourceLineNo">2471</span><a name="line.2471"></a>
-<span class="sourceLineNo">2472</span>  @Override<a name="line.2472"></a>
-<span class="sourceLineNo">2473</span>  public boolean needsCompaction() {<a name="line.2473"></a>
-<span class="sourceLineNo">2474</span>    List&lt;HStoreFile&gt; filesCompactingClone = null;<a name="line.2474"></a>
-<span class="sourceLineNo">2475</span>    synchronized (filesCompacting) {<a name="line.2475"></a>
-<span class="sourceLineNo">2476</span>      filesCompactingClone = Lists.newArrayList(filesCompacting);<a name="line.2476"></a>
-<span class="sourceLineNo">2477</span>    }<a name="line.2477"></a>
-<span class="sourceLineNo">2478</span>    return this.storeEngine.needsCompaction(filesCompactingClone);<a name="line.2478"></a>
-<span class="sourceLineNo">2479</span>  }<a name="line.2479"></a>
-<span class="sourceLineNo">2480</span><a name="line.2480"></a>
-<span class="sourceLineNo">2481</span>  /**<a name="line.2481"></a>
-<span class="sourceLineNo">2482</span>   * Used for tests.<a name="line.2482"></a>
-<span class="sourceLineNo">2483</span>   * @return cache configuration for this Store.<a name="line.2483"></a>
-<span class="sourceLineNo">2484</span>   */<a name="line.2484"></a>
-<span class="sourceLineNo">2485</span>  @VisibleForTesting<a name="line.2485"></a>
-<span class="sourceLineNo">2486</span>  public CacheConfig getCacheConfig() {<a name="line.2486"></a>
-<span class="sourceLineNo">2487</span>    return this.cacheConf;<a name="line.2487"></a>
-<span class="sourceLineNo">2488</span>  }<a name="line.2488"></a>
-<span class="sourceLineNo">2489</span><a name="line.2489"></a>
-<span class="sourceLineNo">2490</span>  public static final long FIXED_OVERHEAD =<a name="line.2490"></a>
-<span class="sourceLineNo">2491</span>      ClassSize.align(ClassSize.OBJECT + (27 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)<a name="line.2491"></a>
-<span class="sourceLineNo">2492</span>              + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));<a name="line.2492"></a>
-<span class="sourceLineNo">2493</span><a name="line.2493"></a>
-<span class="sourceLineNo">2494</span>  public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD<a name="line.2494"></a>
-<span class="sourceLineNo">2495</span>      + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK<a name="line.2495"></a>
-<span class="sourceLineNo">2496</span>      + ClassSize.CONCURRENT_SKIPLISTMAP<a name="line.2496"></a>
-<span class="sourceLineNo">2497</span>      + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + ClassSize.OBJECT<a name="line.2497"></a>
-<span class="sourceLineNo">2498</span>      + ScanInfo.FIXED_OVERHEAD);<a name="line.2498"></a>
+<span class="sourceLineNo">2420</span>    @Override<a name="line.2420"></a>
+<span class="sourceLineNo">2421</span>    public long getOutputFileSize() {<a name="line.2421"></a>
+<span class="sourceLineNo">2422</span>      return outputFileSize;<a name="line.2422"></a>
+<span class="sourceLineNo">2423</span>    }<a name="line.2423"></a>
+<span class="sourceLineNo">2424</span><a name="line.2424"></a>
+<span class="sourceLineNo">2425</span>    @Override<a name="line.2425"></a>
+<span class="sourceLineNo">2426</span>    public List&lt;Path&gt; getCommittedFiles() {<a name="line.2426"></a>
+<span class="sourceLineNo">2427</span>      return committedFiles;<a name="line.2427"></a>
+<span class="sourceLineNo">2428</span>    }<a name="line.2428"></a>
+<span class="sourceLineNo">2429</span><a name="line.2429"></a>
+<span class="sourceLineNo">2430</span>    /**<a name="line.2430"></a>
+<span class="sourceLineNo">2431</span>     * Similar to commit, but called in secondary region replicas for replaying the<a name="line.2431"></a>
+<span class="sourceLineNo">2432</span>     * flush cache from primary region. Adds the new files to the store, and drops the<a name="line.2432"></a>
+<span class="sourceLineNo">2433</span>     * snapshot depending on dropMemstoreSnapshot argument.<a name="line.2433"></a>
+<span class="sourceLineNo">2434</span>     * @param fileNames names of the flushed files<a name="line.2434"></a>
+<span class="sourceLineNo">2435</span>     * @param dropMemstoreSnapshot whether to drop the prepared memstore snapshot<a name="line.2435"></a>
+<span class="sourceLineNo">2436</span>     * @throws IOException<a name="line.2436"></a>
+<span class="sourceLineNo">2437</span>     */<a name="line.2437"></a>
+<span class="sourceLineNo">2438</span>    @Override<a name="line.2438"></a>
+<span class="sourceLineNo">2439</span>    public void replayFlush(List&lt;String&gt; fileNames, boolean dropMemstoreSnapshot)<a name="line.2439"></a>
+<span class="sourceLineNo">2440</span>        throws IOException {<a name="line.2440"></a>
+<span class="sourceLineNo">2441</span>      List&lt;HStoreFile&gt; storeFiles = new ArrayList&lt;&gt;(fileNames.size());<a name="line.2441"></a>
+<span class="sourceLineNo">2442</span>      for (String file : fileNames) {<a name="line.2442"></a>
+<span class="sourceLineNo">2443</span>        // open the file as a store file (hfile link, etc)<a name="line.2443"></a>
+<span class="sourceLineNo">2444</span>        StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file);<a name="line.2444"></a>
+<span class="sourceLineNo">2445</span>        HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);<a name="line.2445"></a>
+<span class="sourceLineNo">2446</span>        storeFiles.add(storeFile);<a name="line.2446"></a>
+<span class="sourceLineNo">2447</span>        HStore.this.storeSize.addAndGet(storeFile.getReader().length());<a name="line.2447"></a>
+<span class="sourceLineNo">2448</span>        HStore.this.totalUncompressedBytes<a name="line.2448"></a>
+<span class="sourceLineNo">2449</span>            .addAndGet(storeFile.getReader().getTotalUncompressedBytes());<a name="line.2449"></a>
+<span class="sourceLineNo">2450</span>        if (LOG.isInfoEnabled()) {<a name="line.2450"></a>
+<span class="sourceLineNo">2451</span>          LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() +<a name="line.2451"></a>
+<span class="sourceLineNo">2452</span>            " added " + storeFile + ", entries=" + storeFile.getReader().getEntries() +<a name="line.2452"></a>
+<span class="sourceLineNo">2453</span>              ", sequenceid=" + storeFile.getReader().getSequenceID() + ", filesize="<a name="line.2453"></a>
+<span class="sourceLineNo">2454</span>              + TraditionalBinaryPrefix.long2String(storeFile.getReader().length(), "", 1));<a name="line.2454"></a>
+<span class="sourceLineNo">2455</span>        }<a name="line.2455"></a>
+<span class="sourceLineNo">2456</span>      }<a name="line.2456"></a>
+<span class="sourceLineNo">2457</span><a name="line.2457"></a>
+<span class="sourceLineNo">2458</span>      long snapshotId = -1; // -1 means do not drop<a name="line.2458"></a>
+<span class="sourceLineNo">2459</span>      if (dropMemstoreSnapshot &amp;&amp; snapshot != null) {<a name="line.2459"></a>
+<span class="sourceLineNo">2460</span>        snapshotId = snapshot.getId();<a name="line.2460"></a>
+<span class="sourceLineNo">2461</span>        snapshot.close();<a name="line.2461"></a>
+<span class="sourceLineNo">2462</span>      }<a name="line.2462"></a>
+<span class="sourceLineNo">2463</span>      HStore.this.updateStorefiles(storeFiles, snapshotId);<a name="line.2463"></a>
+<span class="sourceLineNo">2464</span>    }<a name="line.2464"></a>
+<span class="sourceLineNo">2465</span><a name="line.2465"></a>
+<span class="sourceLineNo">2466</span>    /**<a name="line.2466"></a>
+<span class="sourceLineNo">2467</span>     * Abort the snapshot preparation. Drops the snapshot if any.<a name="line.2467"></a>
+<span class="sourceLineNo">2468</span>     * @throws IOException<a name="line.2468"></a>
+<span class="sourceLineNo">2469</span>     */<a name="line.2469"></a>
+<span class="sourceLineNo">2470</span>    @Override<a name="line.2470"></a>
+<span class="sourceLineNo">2471</span>    public void abort() throws IOException {<a name="line.2471"></a>
+<span class="sourceLineNo">2472</span>      if (snapshot != null) {<a name="line.2472"></a>
+<span class="sourceLineNo">2473</span>        //We need to close the snapshot when aborting, otherwise, the segment scanner<a name="line.2473"></a>
+<span class="sourceLineNo">2474</span>        //won't be closed. If we are using MSLAB, the chunk referenced by those scanners<a name="line.2474"></a>
+<span class="sourceLineNo">2475</span>        //can't be released, thus memory leak<a name="line.2475"></a>
+<span class="sourceLineNo">2476</span>        snapshot.close();<a name="line.2476"></a>
+<span class="sourceLineNo">2477</span>        HStore.this.updateStorefiles(Collections.emptyList(), snapshot.getId());<a name="line.2477"></a>
+<span class="sourceLineNo">2478</span>      }<a name="line.2478"></a>
+<span class="sourceLineNo">2479</span>    }<a name="line.2479"></a>
+<span class="sourceLineNo">2480</span>  }<a name="line.2480"></a>
+<span class="sourceLineNo">2481</span><a name="line.2481"></a>
+<span class="sourceLineNo">2482</span>  @Override<a name="line.2482"></a>
+<span class="sourceLineNo">2483</span>  public boolean needsCompaction() {<a name="line.2483"></a>
+<span class="sourceLineNo">2484</span>    List&lt;HStoreFile&gt; filesCompactingClone = null;<a name="line.2484"></a>
+<span class="sourceLineNo">2485</span>    synchronized (filesCompacting) {<a name="line.2485"></a>
+<span class="sourceLineNo">2486</span>      filesCompactingClone = Lists.newArrayList(filesCompacting);<a name="line.2486"></a>
+<span class="sourceLineNo">2487</span>    }<a name="line.2487"></a>
+<span class="sourceLineNo">2488</span>    return this.storeEngine.needsCompaction(filesCompactingClone);<a name="line.2488"></a>
+<span class="sourceLineNo">2489</span>  }<a name="line.2489"></a>
+<span class="sourceLineNo">2490</span><a name="line.2490"></a>
+<span class="sourceLineNo">2491</span>  /**<a name="line.2491"></a>
+<span class="sourceLineNo">2492</span>   * Used for tests.<a name="line.2492"></a>
+<span class="sourceLineNo">2493</span>   * @return cache configuration for this Store.<a name="line.2493"></a>
+<span class="sourceLineNo">2494</span>   */<a name="line.2494"></a>
+<span class="sourceLineNo">2495</span>  @VisibleForTesting<a name="line.2495"></a>
+<span class="sourceLineNo">2496</span>  public CacheConfig getCacheConfig() {<a name="line.2496"></a>
+<span class="sourceLineNo">2497</span>    return this.cacheConf;<a name="line.2497"></a>
+<span class="sourceLineNo">2498</span>  }<a name="line.2498"></a>
 <span class="sourceLineNo">2499</span><a name="line.2499"></a>
-<span class="sourceLineNo">2500</span>  @Override<a name="line.2500"></a>
-<span class="sourceLineNo">2501</span>  public long heapSize() {<a name="line.2501"></a>
-<span class="sourceLineNo">2502</span>    MemStoreSize memstoreSize = this.memstore.size();<a name="line.2502"></a>
-<span class="sourceLineNo">2503</span>    return DEEP_OVERHEAD + memstoreSize.getHeapSize();<a name="line.2503"></a>
-<span class="sourceLineNo">2504</span>  }<a name="line.2504"></a>
-<span class="sourceLineNo">2505</span><a name="line.2505"></a>
-<span class="sourceLineNo">2506</span>  @Override<a name="line.2506"></a>
-<span class="sourceLineNo">2507</span>  public CellComparator getComparator() {<a name="line.2507"></a>
-<span class="sourceLineNo">2508</span>    return comparator;<a name="line.2508"></a>
-<span class="sourceLineNo">2509</span>  }<a name="line.2509"></a>
-<span class="sourceLineNo">2510</span><a name="line.2510"></a>
-<span class="sourceLineNo">2511</span>  public ScanInfo getScanInfo() {<a name="line.2511"></a>
-<span class="sourceLineNo">2512</span>    return scanInfo;<a name="line.2512"></a>
-<span class="sourceLineNo">2513</span>  }<a name="line.2513"></a>
-<span class="sourceLineNo">2514</span><a name="line.2514"></a>
-<span class="sourceLineNo">2515</span>  /**<a name="line.2515"></a>
-<span class="sourceLineNo">2516</span>   * Set scan info, used by test<a name="line.2516"></a>
-<span class="sourceLineNo">2517</span>   * @param scanInfo new scan info to use for test<a name="line.2517"></a>
-<span class="sourceLineNo">2518</span>   */<a name="line.2518"></a>
-<span class="sourceLineNo">2519</span>  void setScanInfo(ScanInfo scanInfo) {<a name="line.2519"></a>
-<span class="sourceLineNo">2520</span>    this.scanInfo = scanInfo;<a name="line.2520"></a>
-<span class="sourceLineNo">2521</span>  }<a name="line.2521"></a>
-<span class="sourceLineNo">2522</span><a name="line.2522"></a>
-<span class="sourceLineNo">2523</span>  @Override<a name="line.2523"></a>
-<span class="sourceLineNo">2524</span>  public boolean hasTooManyStoreFiles() {<a name="line.2524"></a>
-<span class="sourceLineNo">2525</span>    return getStorefilesCount() &gt; this.blockingFileCount;<a name="line.2525"></a>
-<span class="sourceLineNo">2526</span>  }<a name="line.2526"></a>
-<span class="sourceLineNo">2527</span><a name="line.2527"></a>
-<span class="sourceLineNo">2528</span>  @Override<a name="line.2528"></a>
-<span class="sourceLineNo">2529</span>  public long getFlushedCellsCount() {<a name="line.2529"></a>
-<span class="sourceLineNo">2530</span>    return flushedCellsCount.get();<a name="line.2530"></a>
+<span class="sourceLineNo">2500</span>  public static final long FIXED_OVERHEAD =<a name="line.2500"></a>
+<span class="sourceLineNo">2501</span>      ClassSize.align(ClassSize.OBJECT + (27 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)<a name="line.2501"></a>
+<span class="sourceLineNo">2502</span>              + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));<a name="line.2502"></a>
+<span class="sourceLineNo">2503</span><a name="line.2503"></a>
+<span class="sourceLineNo">2504</span>  public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD<a name="line.2504"></a>
+<span class="sourceLineNo">2505</span>      + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK<a name="line.2505"></a>
+<span class="sourceLineNo">2506</span>      + ClassSize.CONCURRENT_SKIPLISTMAP<a name="line.2506"></a>
+<span class="sourceLineNo">2507</span>      + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + ClassSize.OBJECT<a name="line.2507"></a>
+<span class="sourceLineNo">2508</span>      + ScanInfo.FIXED_OVERHEAD);<a name="line.2508"></a>
+<span class="sourceLineNo">2509</span><a name="line.2509"></a>
+<span class="sourceLineNo">2510</span>  @Override<a name="line.2510"></a>
+<span class="sourceLineNo">2511</span>  public long heapSize() {<a name="line.2511"></a>
+<span class="sourceLineNo">2512</span>    MemStoreSize memstoreSize = this.memstore.size();<a name="line.2512"></a>
+<span class="sourceLineNo">2513</span>    return DEEP_OVERHEAD + memstoreSize.getHeapSize();<a name="line.2513"></a>
+<span class="sourceLineNo">2514</span>  }<a name="line.2514"></a>
+<span class="sourceLineNo">2515</span><a name="line.2515"></a>
+<span class="sourceLineNo">2516</span>  @Override<a name="line.2516"></a>
+<span class="sourceLineNo">2517</span>  public CellComparator getComparator() {<a name="line.2517"></a>
+<span class="sourceLineNo">2518</span>    return comparator;<a name="line.2518"></a>
+<span class="sourceLineNo">2519</span>  }<a name="line.2519"></a>
+<span class="sourceLineNo">2520</span><a name="line.2520"></a>
+<span class="sourceLineNo">2521</span>  public ScanInfo getScanInfo() {<a name="line.2521"></a>
+<span class="sourceLineNo">2522</span>    return scanInfo;<a name="line.2522"></a>
+<span class="sourceLineNo">2523</span>  }<a name="line.2523"></a>
+<span class="sourceLineNo">2524</span><a name="line.2524"></a>
+<span class="sourceLineNo">2525</span>  /**<a name="line.2525"></a>
+<span class="sourceLineNo">2526</span>   * Set scan info, used by test<a name="line.2526"></a>
+<span class="sourceLineNo">2527</span>   * @param scanInfo new scan info to use for test<a name="line.2527"></a>
+<span class="sourceLineNo">2528</span>   */<a name="line.2528"></a>
+<span class="sourceLineNo">2529</span>  void setScanInfo(ScanInfo scanInfo) {<a name="line.2529"></a>
+<span class="sourceLineNo">2530</span>    this.scanInfo = scanInfo;<a name="line.2530"></a>
 <span class="sourceLineNo">2531</span>  }<a name="line.2531"></a>
 <span class="sourceLineNo">2532</span><a name="line.2532"></a>
 <span class="sourceLineNo">2533</span>  @Override<a name="line.2533"></a>
-<span class="sourceLineNo">2534</span>  public long getFlushedCellsSize() {<a name="line.2534"></a>
-<span class="sourceLineNo">2535</span>    return flushedCellsSize.get();<a name="line.2535"></a>
+<span class="sourceLineNo">2534</span>  public boolean hasTooManyStoreFiles() {<a name="line.2534"></a>
+<span class="sourceLineNo">2535</span>    return getStorefilesCount() &gt; this.blockingFileCount;<a name="line.2535"></a>
 <span class="sourceLineNo">2536</span>  }<a name="line.2536"></a>
 <span class="sourceLineNo">2537</span><a name="line.2537"></a>
 <span class="sourceLineNo">2538</span>  @Override<a name="line.2538"></a>
-<span class="sourceLineNo">2539</span>  public long getFlushedOutputFileSize() {<a name="line.2539"></a>
-<span class="sourceLineNo">2540</span>    return flushedOutputFileSize.get();<a name="line.2540"></a>
+<span class="sourceLineNo">2539</span>  public long getFlushedCellsCount() {<a name="line.2539"></a>
+<span class="sourceLineNo">2540</span>    return flushedCellsCount.get();<a name="line.2540"></a>
 <span class="sourceLineNo">2541</span>  }<a name="line.2541"></a>
 <span class="sourceLineNo">2542</span><a name="line.2542"></a>
 <span class="sourceLineNo">2543</span>  @Override<a name="line.2543"></a>
-<span class="sourceLineNo">2544</span>  public long getCompactedCellsCount() {<a name="line.2544"></a>
-<span class="sourceLineNo">2545</span>    return compactedCellsCount.get();<a name="line.2545"></a>
+<span class="sourceLineNo">2544</span>  public long getFlushedCellsSize() {<a name="line.2544"></a>
+<span class="sourceLineNo">2545</span>    return flushedCellsSize.get();<a name="line.2545"></a>
 <span class="sourceLineNo">2546</span>  }<a name="line.2546"></a>
 <span class="sourceLineNo">2547</span><a name="line.2547"></a>
 <span class="sourceLineNo">2548</span>  @Override<a name="line.2548"></a>
-<span class="sourceLineNo">2549</span>  public long getCompactedCellsSize() {<a name="line.2549"></a>
-<span class="sourceLineNo">2550</span>    return compactedCellsSize.get();<a name="line.2550"></a>
+<span class="sourceLineNo">2549</span>  public long getFlushedOutputFileSize() {<a name="line.2549"></a>
+<span class="sourceLineNo">2550</span>    return flushedOutputFileSize.get();<a name="line.2550"></a>
 <span class="sourceLineNo">2551</span>  }<a name="line.2551"></a>
 <span class="sourceLineNo">2552</span><a name="line.2552"></a>
 <span class="sourceLineNo">2553</span>  @Override<a name="line.2553"></a>
-<span class="sourceLineNo">2554</span>  public long getMajorCompactedCellsCount() {<a name="line.2554"></a>
-<span class="sourceLineNo">2555</span>    return majorCompactedCellsCount.get();<a name="line.2555"></a>
+<span class="sourceLineNo">2554</span>  public long getCompactedCellsCount() {<a name="line.2554"></a>
+<span class="sourceLineNo">2555</span>    return compactedCellsCount.get();<a name="line.2555"></a>
 <span class="sourceLineNo">2556</span>  }<a name="line.2556"></a>
 <span class="sourceLineNo">2557</span><a name="line.2557"></a>
 <span class="sourceLineNo">2558</span>  @Override<a name="line.2558"></a>
-<span class="sourceLineNo">2559</span>  public long getMajorCompactedCellsSize() {<a name="line.2559"></a>
-<span class="sourceLineNo">2560</span>    return majorCompactedCellsSize.get();<a name="line.2560"></a>
+<span class="sourceLineNo">2559</span>  public long getCompactedCellsSize() {<a name="line.2559"></a>
+<span class="sourceLineNo">2560</span>    return compactedCellsSize.get();<a name="line.2560"></a>
 <span class="sourceLineNo">2561</span>  }<a name="line.2561"></a>
 <span class="sourceLineNo">2562</span><a name="line.2562"></a>
-<span class="sourceLineNo">2563</span>  /**<a name="line.2563"></a>
-<span class="sourceLineNo">2564</span>   * Returns the StoreEngine that is backing this concrete implementation of Store.<a name="line.2564"></a>
-<span class="sourceLineNo">2565</span>   * @return Returns the {@link StoreEngine} object used internally inside this HStore object.<a name="line.2565"></a>
-<span class="sourceLineNo">2566</span>   */<a name="line.2566"></a>
-<span class="sourceLineNo">2567</span>  @VisibleForTesting<a name="line.2567"></a>
-<span class="sourceLineNo">2568</span>  public StoreEngine&lt;?, ?, ?, ?&gt; getStoreEngine() {<a name="line.2568"></a>
-<span class="sourceLineNo">2569</span>    return this.storeEngine;<a name="line.2569"></a>
-<span class="sourceLineNo">2570</span>  }<a name="line.2570"></a>
-<span class="sourceLineNo">2571</span><a name="line.2571"></a>
-<span class="sourceLineNo">2572</span>  protected OffPeakHours getOffPeakHours() {<a name="line.2572"></a>
-<span class="sourceLineNo">2573</span>    return this.offPeakHours;<a name="line.2573"></a>
-<span class="sourceLineNo">2574</span>  }<a name="line.2574"></a>
-<span class="sourceLineNo">2575</span><a name="line.2575"></a>
-<span class="sourceLineNo">2576</span>  /**<a name="line.2576"></a>
-<span class="sourceLineNo">2577</span>   * {@inheritDoc}<a name="line.2577"></a>
-<span class="sourceLineNo">2578</span>   */<a name="line.2578"></a>
-<span class="sourceLineNo">2579</span>  @Override<a name="line.2579"></a>
-<span class="sourceLineNo">2580</span>  public void onConfigurationChange(Configuration conf) {<a name="line.2580"></a>
-<span class="sourceLineNo">2581</span>    this.conf = new CompoundConfiguration()<a name="line.2581"></a>
-<span class="sourceLineNo">2582</span>            .add(conf)<a name="line.2582"></a>
-<span class="sourceLineNo">2583</span>            .addBytesMap(family.getValues());<a name="line.2583"></a>
-<span class="sourceLineNo">2584</span>    this.storeEngine.compactionPolicy.setConf(conf);<a name="line.2584"></a>
-<span class="sourceLineNo">2585</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.2585"></a>
-<span class="sourceLineNo">2586</span>  }<a name="line.2586"></a>
-<span class="sourceLineNo">2587</span><a name="line.2587"></a>
-<span class="sourceLineNo">2588</span>  /**<a name="line.2588"></a>
-<span class="sourceLineNo">2589</span>   * {@inheritDoc}<a name="line.2589"></a>
-<span class="sourceLineNo">2590</span>   */<a name="line.2590"></a>
-<span class="sourceLineNo">2591</span>  @Override<a name="line.2591"></a>
-<span class="sourceLineNo">2592</span>  public void registerChildren(ConfigurationManager manager) {<a name="line.2592"></a>
-<span class="sourceLineNo">2593</span>    // No children to register<a name="line.2593"></a>
-<span class="sourceLineNo">2594</span>  }<a name="line.2594"></a>
-<span class="sourceLineNo">2595</span><a name="line.2595"></a>
-<span class="sourceLineNo">2596</span>  /**<a name="line.2596"></a>
-<span class="sourceLineNo">2597</span>   * {@inheritDoc}<a name="line.2597"></a>
-<span class="sourceLineNo">2598</span>   */<a name="line.2598"></a>
-<span class="sourceLineNo">2599</span>  @Override<a name="line.2599"></a>
-<span class="sourceLineNo">2600</span>  public void deregisterChildren(ConfigurationManager manager) {<a name="line.2600"></a>
-<span class="sourceLineNo">2601</span>    // No children to deregister<a name="line.2601"></a>
-<span class="sourceLineNo">2602</span>  }<a name="line.2602"></a>
-<span class="sourceLineNo">2603</span><a name="line.2603"></a>
-<span class="sourceLineNo">2604</span>  @Override<a name="line.2604"></a>
-<span class="sourceLineNo">2605</span>  public double getCompactionPressure() {<a name="line.2605"></a>
-<span class="sourceLineNo">2606</span>    return storeEngine.getStoreFileManager().getCompactionPressure();<a name="line.2606"></a>
-<span class="sourceLineNo">2607</span>  }<a name="line.2607"></a>
-<span class="sourceLineNo">2608</span><a name="line.2608"></a>
+<span class="sourceLineNo">2563</span>  @Override<a name="line.2563"></a>
+<span class="sourceLineNo">2564</span>  public long getMajorCompactedCellsCount() {<a name="line.2564"></a>
+<span class="sourceLineNo">2565</span>    return majorCompactedCellsCount.get();<a name="line.2565"></a>
+<span class="sourceLineNo">2566</span>  }<a name="line.2566"></a>
+<span class="sourceLineNo">2567</span><a name="line.2567"></a>
+<span class="sourceLineNo">2568</span>  @Override<a name="line.2568"></a>
+<span class="sourceLineNo">2569</span>  public long getMajorCompactedCellsSize() {<a name="line.2569"></a>
+<span class="sourceLineNo">2570</span>    return majorCompactedCellsSize.get();<a name="line.2570"></a>
+<span class="sourceLineNo">2571</span>  }<a name="line.2571"></a>
+<span class="sourceLineNo">2572</span><a name="line.2572"></a>
+<span class="sourceLineNo">2573</span>  /**<a name="line.2573"></a>
+<span class="sourceLineNo">2574</span>   * Returns the StoreEngine that is backing this concrete implementation of Store.<a name="line.2574"></a>
+<span class="sourceLineNo">2575</span>   * @return Returns the {@link StoreEngine} object used internally inside this HStore object.<a name="line.2575"></a>
+<span class="sourceLineNo">2576</span>   */<a name="line.2576"></a>
+<span class="sourceLineNo">2577</span>  @VisibleForTesting<a name="line.2577"></a>
+<span class="sourceLineNo">2578</span>  public StoreEngine&lt;?, ?, ?, ?&gt; getStoreEngine() {<a name="line.2578"></a>
+<span class="sourceLineNo">2579</span>    return this.storeEngine;<a name="line.2579"></a>
+<span class="sourceLineNo">2580</span>  }<a name="line.2580"></a>
+<span class="sourceLineNo">2581</span><a name="line.2581"></a>
+<span class="sourceLineNo">2582</span>  protected OffPeakHours getOffPeakHours() {<a name="line.2582"></a>
+<span class="sourceLineNo">2583</span>    return this.offPeakHours;<a name="line.2583"></a>
+<span class="sourceLineNo">2584</span>  }<a name="line.2584"></a>
+<span class="sourceLineNo">2585</span><a name="line.2585"></a>
+<span class="sourceLineNo">2586</span>  /**<a name="line.2586"></a>
+<span class="sourceLineNo">2587</span>   * {@inheritDoc}<a name="line.2587"></a>
+<span class="sourceLineNo">2588</span>   */<a name="line.2588"></a>
+<span class="sourceLineNo">2589</span>  @Override<a name="line.2589"></a>
+<span class="sourceLineNo">2590</span>  public void onConfigurationChange(Configuration conf) {<a name="line.2590"></a>
+<span class="sourceLineNo">2591</span>    this.conf = new CompoundConfiguration()<a name="line.2591"></a>
+<span class="sourceLineNo">2592</span>            .add(conf)<a name="line.2592"></a>
+<span class="sourceLineNo">2593</span>            .addBytesMap(family.getValues());<a name="line.2593"></a>
+<span class="sourceLineNo">2594</span>    this.storeEngine.compactionPolicy.setConf(conf);<a name="line.2594"></a>
+<span class="sourceLineNo">2595</span>    this.offPeakHours = OffPeakHours.getInstance(conf);<a name="line.2595"></a>
+<span class="sourceLineNo">2596</span>  }<a name="line.2596"></a>
+<span class="sourceLineNo">2597</span><a name="line.2597"></a>
+<span class="sourceLineNo">2598</span>  /**<a name="line.2598"></a>
+<span class="sourceLineNo">2599</span>   * {@inheritDoc}<a name="line.2599"></a>
+<span class="sourceLineNo">2600</span>   */<a name="line.2600"></a>
+<span class="sourceLineNo">2601</span>  @Override<a name="line.2601"></a>
+<span class="sourceLineNo">2602</span>  public void registerChildren(ConfigurationManager manager) {<a name="line.2602"></a>
+<span class="sourceLineNo">2603</span>    // No children to register<a name="line.2603"></a>
+<span class="sourceLineNo">2604</span>  }<a name="line.2604"></a>
+<span class="sourceLineNo">2605</span><a name="line.2605"></a>
+<span class="sourceLineNo">2606</span>  /**<a name="line.2606"></a>
+<span class="sourceLineNo">2607</span>   * {@inheritDoc}<a name="line.2607"></a>
+<span class="sourceLineNo">2608</span>   */<a name="line.2608"></a>
 <span class="sourceLineNo">2609</span>  @Override<a name="line.2609"></a>
-<span class="sourceLineNo">2610</span>  public boolean isPrimaryReplicaStore() {<a name="line.2610"></a>
-<span class="sourceLineNo">2611</span>    return getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID;<a name="line.2611"></a>
+<span class="sourceLineNo">2610</span>  public void deregisterChildren(ConfigurationManager manager) {<a name="line.2610"></a>
+<span class="sourceLineNo">2611</span>    // No children to deregister<a name="line.2611"></a>
 <span class="sourceLineNo">2612</span>  }<a name="line.2612"></a>
 <span class="sourceLineNo">2613</span><a name="line.2613"></a>
-<span class="sourceLineNo">2614</span>  /**<a name="line.2614"></a>
-<span class="sourceLineNo">2615</span>   * Sets the store up for a region level snapshot operation.<a name="line.2615"></a>
-<span class="sourceLineNo">2616</span>   * @see #postSnapshotOperation()<a name="line.2616"></a>
-<span class="sourceLineNo">2617</span>   */<a name="line.2617"></a>
-<span class="sourceLineNo">2618</span>  public void preSnapshotOperation() {<a name="line.2618"></a>
-<span class="sourceLineNo">2619</span>    archiveLock.lock();<a name="line.2619"></a>
-<span class="sourceLineNo">2620</span>  }<a name="line.2620"></a>
-<span class="sourceLineNo">2621</span><a name="line.2621"></a>
-<span class="sourceLineNo">2622</span>  /**<a name="line.2622"></a>
-<span class="sourceLineNo">2623</span>   * Perform tasks needed after the completion of snapshot operation.<a name="line.2623"></a>
-<span class="sourceLineNo">2624</span>   * @see #preSnapshotOperation()<a name="line.2624"></a>
-<span class="sourceLineNo">2625</span>   */<a name="line.2625"></a>
-<span class="sourceLineNo">2626</span>  public void postSnapshotOperation() {<a name="line.2626"></a>
-<span class="sourceLineNo">2627</span>    archiveLock.unlock();<a name="line.2627"></a>
-<span class="sourceLineNo">2628</span>  }<a name="line.2628"></a>
-<span class="sourceLineNo">2629</span><a name="line.2629"></a>
-<span class="sourceLineNo">2630</span>  /**<a name="line.2630"></a>
-<span class="sourceLineNo">2631</span>   * Closes and archives the compacted files under this store<a name="line.2631"></a>
-<span class="sourceLineNo">2632</span>   */<a name="line.2632"></a>
-<span class="sourceLineNo">2633</span>  public synchronized void closeAndArchiveCompactedFiles() throws IOException {<a name="line.2633"></a>
-<span class="sourceLineNo">2634</span>    // ensure other threads do not attempt to archive the same files on close()<a name="line.2634"></a>
-<span class="sourceLineNo">2635</span>    archiveLock.lock();<a name="line.2635"></a>
-<span class="sourceLineNo">2636</span>    try {<a name="line.2636"></a>
-<span class="sourceLineNo">2637</span>      lock.readLock().lock();<a name="line.2637"></a>
-<span class="sourceLineNo">2638</span>      Collection&lt;HStoreFile&gt; copyCompactedfiles = null;<a name="line.2638"></a>
-<span class="sourceLineNo">2639</span>      try {<a name="line.2639"></a>
-<span class="sourceLineNo">2640</span>        Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.2640"></a>
-<span class="sourceLineNo">2641</span>            this.getStoreEngine().getStoreFileManager().getCompactedfiles();<a name="line.2641"></a>
-<span class="sourceLineNo">2642</span>        if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.2642"></a>
-<span class="sourceLineNo">2643</span>          // Do a copy under read lock<a name="line.2643"></a>
-<span class="sourceLineNo">2644</span>          copyCompactedfiles = new ArrayList&lt;&gt;(compactedfiles);<a name="line.2644"></a>
-<span class="sourceLineNo">2645</span>        } else {<a name="line.2645"></a>
-<span class="sourceLineNo">2646</span>          LOG.trace("No compacted files to archive");<a name="line.2646"></a>
-<span class="sourceLineNo">2647</span>        }<a name="line.2647"></a>
-<span class="sourceLineNo">2648</span>      } finally {<a name="line.2648"></a>
-<span class="sourceLineNo">2649</span>        lock.readLock().unlock();<a name="line.2649"></a>
-<span class="sourceLineNo">2650</span>      }<a name="line.2650"></a>
-<span class="sourceLineNo">2651</span>      if (CollectionUtils.isNotEmpty(copyCompactedfiles)) {<a name="line.2651"></a>
-<span class="sourceLineNo">2652</span>        removeCompactedfiles(copyCompactedfiles);<a name="line.2652"></a>
-<span class="sourceLineNo">2653</span>      }<a name="line.2653"></a>
-<span class="sourceLineNo">2654</span>    } finally {<a name="line.2654"></a>
-<span class="sourceLineNo">2655</span>      archiveLock.unlock();<a name="line.2655"></a>
-<span class="sourceLineNo">2656</span>    }<a name="line.2656"></a>
-<span class="sourceLineNo">2657</span>  }<a name="line.2657"></a>
-<span class="sourceLineNo">2658</span><a name="line.2658"></a>
-<span class="sourceLineNo">2659</span>  /**<a name="line.2659"></a>
-<span class="sourceLineNo">2660</span>   * Archives and removes the compacted files<a name="line.2660"></a>
-<span class="sourceLineNo">2661</span>   * @param compactedfiles The compacted files in this store that are not active in reads<a name="line.2661"></a>
-<span class="sourceLineNo">2662</span>   */<a name="line.2662"></a>
-<span class="sourceLineNo">2663</span>  private void removeCompactedfiles(Collection&lt;HStoreFile&gt; compactedfiles)<a name="line.2663"></a>
-<span class="sourceLineNo">2664</span>      throws IOException {<a name="line.2664"></a>
-<span class="sourceLineNo">2665</span>    final List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2665"></a>
-<span class="sourceLineNo">2666</span>    final List&lt;Long&gt; storeFileSizes = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2666"></a>
-<span class="sourceLineNo">2667</span>    for (final HStoreFile file : compactedfiles) {<a name="line.2667"></a>
-<span class="sourceLineNo">2668</span>      synchronized (file) {<a name="line.2668"></a>
-<span class="sourceLineNo">2669</span>        try {<a name="line.2669"></a>
-<span class="sourceLineNo">2670</span>          StoreFileReader r = file.getReader();<a name="line.2670"></a>
-<span class="sourceLineNo">2671</span>          if (r == null) {<a name="line.2671"></a>
-<span class="sourceLineNo">2672</span>            LOG.debug("The file {} was closed but still not archived", file);<a name="line.2672"></a>
-<span class="sourceLineNo">2673</span>            // HACK: Temporarily re-open the reader so we can get the size of the file. Ideally,<a name="line.2673"></a>
-<span class="sourceLineNo">2674</span>            // we should know the size of an HStoreFile without having to ask the HStoreFileReader<a name="line.2674"></a>
-<span class="sourceLineNo">2675</span>            // for that.<a name="line.2675"></a>
-<span class="sourceLineNo">2676</span>            long length = getStoreFileSize(file);<a name="line.2676"></a>
-<span class="sourceLineNo">2677</span>            filesToRemove.add(file);<a name="line.2677"></a>
-<span class="sourceLineNo">2678</span>            storeFileSizes.add(length);<a name="line.2678"></a>
-<span class="sourceLineNo">2679</span>            continue;<a name="line.2679"></a>
-<span class="sourceLineNo">2680</span>          }<a name="line.2680"></a>
-<span class="sourceLineNo">2681</span><a name="line.2681"></a>
-<span class="sourceLineNo">2682</span>          if (file.isCompactedAway() &amp;&amp; !file.isReferencedInReads()) {<a name="line.2682"></a>
-<span class="sourceLineNo">2683</span>            // Even if deleting fails we need not bother as any new scanners won't be<a name="line.2683"></a>
-<span class="sourceLineNo">2684</span>            // able to use the compacted file as the status is already compactedAway<a name="line.2684"></a>
-<span class="sourceLineNo">2685</span>            LOG.trace("Closing and archiving the file {}", file);<a name="line.2685"></a>
-<span class="sourceLineNo">2686</span>            // Copy the file size before closing the reader<a name="line.2686"></a>
-<span class="sourceLineNo">2687</span>            final long length = r.length();<a name="line.2687"></a>
-<span class="sourceLineNo">2688</span>            r.close(true);<a name="line.2688"></a>
-<span class="sourceLineNo">2689</span>            // Just close and return<a name="line.2689"></a>
-<span class="sourceLineNo">2690</span>            filesToRemove.add(file);<a name="line.2690"></a>
-<span class="sourceLineNo">2691</span>            // Only add the length if we successfully added the file to `filesToRemove`<a name="line.2691"></a>
-<span class="sourceLineNo">2692</span>            storeFileSizes.add(length);<a name="line.2692"></a>
-<span class="sourceLineNo">2693</span>          } else {<a name="line.2693"></a>
-<span class="sourceLineNo">2694</span>            LOG.info("Can't archive compacted file " + file.getPath()<a name="line.2694"></a>
-<span class="sourceLineNo">2695</span>                + " because of either isCompactedAway=" + file.isCompactedAway()<a name="line.2695"></a>
-<span class="sourceLineNo">2696</span>                + " or file has reference, isReferencedInReads=" + file.isReferencedInReads()<a name="line.2696"></a>
-<span class="sourceLineNo">2697</span>                + ", refCount=" + r.getRefCount() + ", skipping for now.");<a name="line.2697"></a>
-<span class="sourceLineNo">2698</span>          }<a name="line.2698"></a>
-<span class="sourceLineNo">2699</span>        } catch (Exception e) {<a name="line.2699"></a>
-<span class="sourceLineNo">2700</span>          LOG.error("Exception while trying to close the compacted store file {}", file.getPath(),<a name="line.2700"></a>
-<span class="sourceLineNo">2701</span>              e);<a name="line.2701"></a>
-<span class="sourceLineNo">2702</span>        }<a name="line.2702"></a>
-<span class="sourceLineNo">2703</span>      }<a name="line.2703"></a>
-<span class="sourceLineNo">2704</span>    }<a name="line.2704"></a>
-<span class="sourceLineNo">2705</span>    if (this.isPrimaryReplicaStore()) {<a name="line.2705"></a>
-<span class="sourceLineNo">2706</span>      // Only the primary region is allowed to move the file to archive.<a name="line.2706"></a>
-<span class="sourceLineNo">2707</span>      // The secondary region does not move the files to archive. Any active reads from<a name="line.2707"></a>
-<span class="sourceLineNo">2708</span>      // the secondary region will still work because the file as such has active readers on it.<a name="line.2708"></a>
-<span class="sourceLineNo">2709</span>      if (!filesToRemove.isEmpty()) {<a name="line.2709"></a>
-<span class="sourceLineNo">2710</span>        LOG.debug("Moving the files {} to archive", filesToRemove);<a name="line.2710"></a>
-<span class="sourceLineNo">2711</span>        // Only if this is successful it has to be removed<a name="line.2711"></a>
-<span class="sourceLineNo">2712</span>        try {<a name="line.2712"></a>
-<span class="sourceLineNo">2713</span>          this.fs.removeStoreFiles(this.getColumnFamilyDescriptor().getNameAsString(), filesToRemove);<a name="line.2713"></a>
-<span class="sourceLineNo">2714</span>        } catch (FailedArchiveException fae) {<a name="line.2714"></a>
-<span class="sourceLineNo">2715</span>          // Even if archiving some files failed, we still need to clear out any of the<a name="line.2715"></a>
-<span class="sourceLineNo">2716</span>          // files which were successfully archived.  Otherwise we will receive a<a name="line.2716"></a>
-<span class="sourceLineNo">2717</span>          // FileNotFoundException when we attempt to re-archive them in the next go around.<a name="line.2717"></a>
-<span class="sourceLineNo">2718</span>          Collection&lt;Path&gt; failedFiles = fae.getFailedFiles();<a name="line.2718"></a>
-<span class="sourceLineNo">2719</span>          Iterator&lt;HStoreFile&gt; iter = filesToRemove.iterator();<a name="line.2719"></a>
-<span class="sourceLineNo">2720</span>          Iterator&lt;Long&gt; sizeIter = storeFileSizes.iterator();<a name="line.2720"></a>
-<span class="sourceLineNo">2721</span>          while (iter.hasNext()) {<a name="line.2721"></a>
-<span class="sourceLineNo">2722</span>            sizeIter.next();<a name="line.2722"></a>
-<span class="sourceLineNo">2723</span>            if (failedFiles.contains(iter.next().getPath())) {<a name="line.2723"></a>
-<span class="sourceLineNo">2724</span>              iter.remove();<a name="line.2724"></a>
-<span class="sourceLineNo">2725</span>              sizeIter.remove();<a name="line.2725"></a>
-<span class="sourceLineNo">2726</span>            }<a name="line.2726"></a>
-<span class="sourceLineNo">2727</span>          }<a name="line.2727"></a>
-<span class="sourceLineNo">2728</span>          if (!filesToRemove.isEmpty()) {<a name="line.2728"></a>
-<span class="sourceLineNo">2729</span>            clearCompactedfiles(filesToRemove);<a name="line.2729"></a>
-<span class="sourceLineNo">2730</span>          }<a name="line.2730"></a>
-<span class="sourceLineNo">2731</span>          throw fae;<a name="line.2731"></a>
-<span class="sourceLineNo">2732</span>        }<a name="line.2732"></a>
-<span class="sourceLineNo">2733</span>      }<a name="line.2733"></a>
-<span class="sourceLineNo">2734</span>    }<a name="line.2734"></a>
-<span class="sourceLineNo">2735</span>    if (!filesToRemove.isEmpty()) {<a name="line.2735"></a>
-<span class="sourceLineNo">2736</span>      // Clear the compactedfiles from the store file manager<a name="line.2736"></a>
-<span class="sourceLineNo">2737</span>      clearCompactedfiles(filesToRemove);<a name="line.2737"></a>
-<span class="sourceLineNo">2738</span>      // Try to send report of this archival to the Master for updating quota usage faster<a name="line.2738"></a>
-<span class="sourceLineNo">2739</span>      reportArchivedFilesForQuota(filesToRemove, storeFileSizes);<a name="line.2739"></a>
-<span class="sourceLineNo">2740</span>    }<a name="line.2740"></a>
-<span class="sourceLineNo">2741</span>  }<a name="line.2741"></a>
-<span class="sourceLineNo">2742</span><a name="line.2742"></a>
-<span class="sourceLineNo">2743</span>  /**<a name="line.2743"></a>
-<span class="sourceLineNo">2744</span>   * Computes the length of a store file without succumbing to any errors along the way. If an<a name="line.2744"></a>
-<span class="sourceLineNo">2745</span>   * error is encountered, the implementation returns {@code 0} instead of the actual size.<a name="line.2745"></a>
-<span class="sourceLineNo">2746</span>   *<a name="line.2746"></a>
-<span class="sourceLineNo">2747</span>   * @param file The file to compute the size of.<a name="line.2747"></a>
-<span class="sourceLineNo">2748</span>   * @return The size in bytes of the provided {@code file}.<a name="line.2748"></a>
-<span class="sourceLineNo">2749</span>   */<a name="line.2749"></a>
-<span class="sourceLineNo">2750</span>  long getStoreFileSize(HStoreFile file) {<a name="line.2750"></a>
-<span class="sourceLineNo">2751</span>    long length = 0;<a name="line.2751"></a>
-<span class="sourceLineNo">2752</span>    try {<a name="line.2752"></a>
-<span class="sourceLineNo">2753</span>      file.initReader();<a name="line.2753"></a>
-<span class="sourceLineNo">2754</span>      length = file.getReader().length();<a name="line.2754"></a>
-<span class="sourceLineNo">2755</span>    } catch (IOException e) {<a name="line.2755"></a>
-<span class="sourceLineNo">2756</span>      LOG.trace("Failed to open reader when trying to compute store file size, ignoring", e);<a name="line.2756"></a>
-<span class="sourceLineNo">2757</span>    } finally {<a name="line.2757"></a>
-<span class="sourceLineNo">2758</span>      try {<a name="line.2758"></a>
-<span class="sourceLineNo">2759</span>        file.closeStoreFile(<a name="line.2759"></a>
-<span class="sourceLineNo">2760</span>            file.getCacheConf() != null ? file.getCacheConf().shouldEvictOnClose() : true);<a name="line.2760"></a>
-<span class="sourceLineNo">2761</span>      } catch (IOException e) {<a name="line.2761"></a>
-<span class="sourceLineNo">2762</span>        LOG.trace("Failed to close reader after computing store file size, ignoring", e);<a name="line.2762"></a>
-<span class="sourceLineNo">2763</span>      }<a name="line.2763"></a>
-<span class="sourceLineNo">2764</span>    }<a name="line.2764"></a>
-<span class="sourceLineNo">2765</span>    return length;<a name="line.2765"></a>
-<span class="sourceLineNo">2766</span>  }<a name="line.2766"></a>
-<span class="sourceLineNo">2767</span><a name="line.2767"></a>
-<span class="sourceLineNo">2768</span>  public Long preFlushSeqIDEstimation() {<a name="line.2768"></a>
-<span class="sourceLineNo">2769</span>    return memstore.preFlushSeqIDEstimation();<a name="line.2769"></a>
-<span class="sourceLineNo">2770</span>  }<a name="line.2770"></a>
-<span class="sourceLineNo">2771</span><a name="line.2771"></a>
-<span class="sourceLineNo">2772</span>  @Override<a name="line.2772"></a>
-<span class="sourceLineNo">2773</span>  public boolean isSloppyMemStore() {<a name="line.2773"></a>
-<span class="sourceLineNo">2774</span>    return this.memstore.isSloppy();<a name="line.2774"></a>
-<span class="sourceLineNo">2775</span>  }<a name="line.2775"></a>
-<span class="sourceLineNo">2776</span><a name="line.2776"></a>
-<span class="sourceLineNo">2777</span>  private void clearCompactedfiles(List&lt;HStoreFile&gt; filesToRemove) throws IOException {<a name="line.2777"></a>
-<span class="sourceLineNo">2778</span>    LOG.trace("Clearing the compacted file {} from this store", filesToRemove);<a name="line.2778"></a>
-<span class="sourceLineNo">2779</span>    try {<a name="line.2779"></a>
-<span class="sourceLineNo">2780</span>      lock.writeLock().lock();<a name="line.2780"></a>
-<span class="sourceLineNo">2781</span>      this.getStoreEngine().getStoreFileManager().removeCompactedFiles(filesToRemove);<a name="line.2781"></a>
-<span class="sourceLineNo">2782</span>    } finally {<a name="line.2782"></a>
-<span class="sourceLineNo">2783</span>      lock.writeLock().unlock();<a name="line.2783"></a>
-<span class="sourceLineNo">2784</span>    }<a name="line.2784"></a>
+<span class="sourceLineNo">2614</span>  @Override<a name="line.2614"></a>
+<span class="sourceLineNo">2615</span>  public double getCompactionPressure() {<a name="line.2615"></a>
+<span class="sourceLineNo">2616</span>    return storeEngine.getStoreFileManager().getCompactionPressure();<a name="line.2616"></a>
+<span class="sourceLineNo">2617</span>  }<a name="line.2617"></a>
+<span class="sourceLineNo">2618</span><a name="line.2618"></a>
+<span class="sourceLineNo">2619</span>  @Override<a name="line.2619"></a>
+<span class="sourceLineNo">2620</span>  public boolean isPrimaryReplicaStore() {<a name="line.2620"></a>
+<span class="sourceLineNo">2621</span>    return getRegionInfo().getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID;<a name="line.2621"></a>
+<span class="sourceLineNo">2622</span>  }<a name="line.2622"></a>
+<span class="sourceLineNo">2623</span><a name="line.2623"></a>
+<span class="sourceLineNo">2624</span>  /**<a name="line.2624"></a>
+<span class="sourceLineNo">2625</span>   * Sets the store up for a region level snapshot operation.<a name="line.2625"></a>
+<span class="sourceLineNo">2626</span>   * @see #postSnapshotOperation()<a name="line.2626"></a>
+<span class="sourceLineNo">2627</span>   */<a name="line.2627"></a>
+<span class="sourceLineNo">2628</span>  public void preSnapshotOperation() {<a name="line.2628"></a>
+<span class="sourceLineNo">2629</span>    archiveLock.lock();<a name="line.2629"></a>
+<span class="sourceLineNo">2630</span>  }<a name="line.2630"></a>
+<span class="sourceLineNo">2631</span><a name="line.2631"></a>
+<span class="sourceLineNo">2632</span>  /**<a name="line.2632"></a>
+<span class="sourceLineNo">2633</span>   * Perform tasks needed after the completion of snapshot operation.<a name="line.2633"></a>
+<span class="sourceLineNo">2634</span>   * @see #preSnapshotOperation()<a name="line.2634"></a>
+<span class="sourceLineNo">2635</span>   */<a name="line.2635"></a>
+<span class="sourceLineNo">2636</span>  public void postSnapshotOperation() {<a name="line.2636"></a>
+<span class="sourceLineNo">2637</span>    archiveLock.unlock();<a name="line.2637"></a>
+<span class="sourceLineNo">2638</span>  }<a name="line.2638"></a>
+<span class="sourceLineNo">2639</span><a name="line.2639"></a>
+<span class="sourceLineNo">2640</span>  /**<a name="line.2640"></a>
+<span class="sourceLineNo">2641</span>   * Closes and archives the compacted files under this store<a name="line.2641"></a>
+<span class="sourceLineNo">2642</span>   */<a name="line.2642"></a>
+<span class="sourceLineNo">2643</span>  public synchronized void closeAndArchiveCompactedFiles() throws IOException {<a name="line.2643"></a>
+<span class="sourceLineNo">2644</span>    // ensure other threads do not attempt to archive the same files on close()<a name="line.2644"></a>
+<span class="sourceLineNo">2645</span>    archiveLock.lock();<a name="line.2645"></a>
+<span class="sourceLineNo">2646</span>    try {<a name="line.2646"></a>
+<span class="sourceLineNo">2647</span>      lock.readLock().lock();<a name="line.2647"></a>
+<span class="sourceLineNo">2648</span>      Collection&lt;HStoreFile&gt; copyCompactedfiles = null;<a name="line.2648"></a>
+<span class="sourceLineNo">2649</span>      try {<a name="line.2649"></a>
+<span class="sourceLineNo">2650</span>        Collection&lt;HStoreFile&gt; compactedfiles =<a name="line.2650"></a>
+<span class="sourceLineNo">2651</span>            this.getStoreEngine().getStoreFileManager().getCompactedfiles();<a name="line.2651"></a>
+<span class="sourceLineNo">2652</span>        if (CollectionUtils.isNotEmpty(compactedfiles)) {<a name="line.2652"></a>
+<span class="sourceLineNo">2653</span>          // Do a copy under read lock<a name="line.2653"></a>
+<span class="sourceLineNo">2654</span>          copyCompactedfiles = new ArrayList&lt;&gt;(compactedfiles);<a name="line.2654"></a>
+<span class="sourceLineNo">2655</span>        } else {<a name="line.2655"></a>
+<span class="sourceLineNo">2656</span>          LOG.trace("No compacted files to archive");<a name="line.2656"></a>
+<span class="sourceLineNo">2657</span>        }<a name="line.2657"></a>
+<span class="sourceLineNo">2658</span>      } finally {<a name="line.2658"></a>
+<span class="sourceLineNo">2659</span>        lock.readLock().unlock();<a name="line.2659"></a>
+<span class="sourceLineNo">2660</span>      }<a name="line.2660"></a>
+<span class="sourceLineNo">2661</span>      if (CollectionUtils.isNotEmpty(copyCompactedfiles)) {<a name="line.2661"></a>
+<span class="sourceLineNo">2662</span>        removeCompactedfiles(copyCompactedfiles);<a name="line.2662"></a>
+<span class="sourceLineNo">2663</span>      }<a name="line.2663"></a>
+<span class="sourceLineNo">2664</span>    } finally {<a name="line.2664"></a>
+<span class="sourceLineNo">2665</span>      archiveLock.unlock();<a name="line.2665"></a>
+<span class="sourceLineNo">2666</span>    }<a name="line.2666"></a>
+<span class="sourceLineNo">2667</span>  }<a name="line.2667"></a>
+<span class="sourceLineNo">2668</span><a name="line.2668"></a>
+<span class="sourceLineNo">2669</span>  /**<a name="line.2669"></a>
+<span class="sourceLineNo">2670</span>   * Archives and removes the compacted files<a name="line.2670"></a>
+<span class="sourceLineNo">2671</span>   * @param compactedfiles The compacted files in this store that are not active in reads<a name="line.2671"></a>
+<span class="sourceLineNo">2672</span>   */<a name="line.2672"></a>
+<span class="sourceLineNo">2673</span>  private void removeCompactedfiles(Collection&lt;HStoreFile&gt; compactedfiles)<a name="line.2673"></a>
+<span class="sourceLineNo">2674</span>      throws IOException {<a name="line.2674"></a>
+<span class="sourceLineNo">2675</span>    final List&lt;HStoreFile&gt; filesToRemove = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2675"></a>
+<span class="sourceLineNo">2676</span>    final List&lt;Long&gt; storeFileSizes = new ArrayList&lt;&gt;(compactedfiles.size());<a name="line.2676"></a>
+<span class="sourceLineNo">2677</span>    for (final HStoreFile file : compactedfiles) {<a name="line.2677"></a>
+<span class="sourceLineNo">2678</span>      synchronized (file) {<a name="line.2678"></a>
+<span class="sourceLineNo">2679</span>        try {<a name="line.2679"></a>
+<span class="sourceLineNo">2680</span>          StoreFileReader r = file.getReader();<a name="line.2680"></a>
+<span class="sourceLineNo">2681</span>          if (r == null) {<a name="line.2681"></a>
+<span class="sourceLineNo">2682</span>            LOG.debug("The file {} was closed but still not archived", file);<a name="line.2682"></a>
+<span class="sourceLineNo">2683</span>            // HACK: Temporarily re-open the reader so we can get the size of the file. Ideally,<a name="line.2683"></a>
+<span class="sourceLineNo">2684</span>            // we should know the size of an HStoreFile without having to ask the HStoreFileReader<a name="line.2684"></a>
+<span class="sourceLineNo">2685</span>            // for that.<a name="line.2685"></a>
+<span class="sourceLineNo">2686</span>            long length = getStoreFileSize(file);<a name="line.2686"></a>
+<span class="sourceLineNo">2687</span>            filesToRemove.add(file);<a name="line.2687"></a>
+<span class="sourceLineNo">2688</span>            storeFileSizes.add(