hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From git-site-r...@apache.org
Subject [01/41] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
Date Sat, 29 Apr 2017 14:58:38 GMT
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 40526c106 -> 6fbeb5549


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.html
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.html
index f6e8919..b42acba 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestMobSecureExportSnapshot.html
@@ -56,14 +56,13 @@
 <span class="sourceLineNo">048</span>    // setup configuration<a name="line.48"></a>
 <span class="sourceLineNo">049</span>    SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());<a
name="line.49"></a>
 <span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>    TEST_UTIL.startMiniZKCluster();<a
name="line.51"></a>
+<span class="sourceLineNo">051</span>    TEST_UTIL.startMiniCluster(1, 3);<a
name="line.51"></a>
 <span class="sourceLineNo">052</span>    TEST_UTIL.startMiniMapReduceCluster();<a
name="line.52"></a>
-<span class="sourceLineNo">053</span>    TEST_UTIL.startMiniHBaseCluster(1, 3);<a
name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>    // Wait for the ACL table to become
available<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);<a
name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span>}<a name="line.58"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>    // Wait for the ACL table to become
available<a name="line.54"></a>
+<span class="sourceLineNo">055</span>    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);<a
name="line.55"></a>
+<span class="sourceLineNo">056</span>  }<a name="line.56"></a>
+<span class="sourceLineNo">057</span>}<a name="line.57"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.html
b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.html
index 91b1c2a..4f466fa 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/snapshot/TestSecureExportSnapshot.html
@@ -61,14 +61,13 @@
 <span class="sourceLineNo">053</span>    // setup configuration<a name="line.53"></a>
 <span class="sourceLineNo">054</span>    SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());<a
name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>    TEST_UTIL.startMiniZKCluster();<a
name="line.56"></a>
+<span class="sourceLineNo">056</span>    TEST_UTIL.startMiniCluster(1, 3);<a
name="line.56"></a>
 <span class="sourceLineNo">057</span>    TEST_UTIL.startMiniMapReduceCluster();<a
name="line.57"></a>
-<span class="sourceLineNo">058</span>    TEST_UTIL.startMiniHBaseCluster(1, 3);<a
name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>    // Wait for the ACL table to become
available<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);<a
name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span>}<a name="line.63"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>    // Wait for the ACL table to become
available<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);<a
name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span>}<a name="line.62"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
index b367564..7d0d6c1 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
@@ -108,64 +108,77 @@
 <span class="sourceLineNo">100</span>    Mockito.verify(dfs, Mockito.times(1)).isFileClosed(FILE);<a
name="line.100"></a>
 <span class="sourceLineNo">101</span>  }<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  @Test<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public void testIsSameHdfs() throws IOException
{<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");<a
name="line.106"></a>
-<span class="sourceLineNo">107</span>      dfsUtilClazz.getMethod("getNNServiceRpcAddresses",
Configuration.class);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    } catch (Exception e) {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      LOG.info("Skip testIsSameHdfs test
case because of the no-HA hadoop version.");<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    Configuration conf = HBaseConfiguration.create();<a
name="line.113"></a>
-<span class="sourceLineNo">114</span>    Path srcPath = new Path("hdfs://localhost:8020/");<a
name="line.114"></a>
-<span class="sourceLineNo">115</span>    Path desPath = new Path("hdfs://127.0.0.1/");<a
name="line.115"></a>
-<span class="sourceLineNo">116</span>    FileSystem srcFs = srcPath.getFileSystem(conf);<a
name="line.116"></a>
-<span class="sourceLineNo">117</span>    FileSystem desFs = desPath.getFileSystem(conf);<a
name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    desPath = new Path("hdfs://127.0.0.1:8070/");<a
name="line.121"></a>
-<span class="sourceLineNo">122</span>    desFs = desPath.getFileSystem(conf);<a
name="line.122"></a>
-<span class="sourceLineNo">123</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    desPath = new Path("hdfs://127.0.1.1:8020/");<a
name="line.125"></a>
-<span class="sourceLineNo">126</span>    desFs = desPath.getFileSystem(conf);<a
name="line.126"></a>
-<span class="sourceLineNo">127</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>    conf.set("fs.defaultFS", "hdfs://haosong-hadoop");<a
name="line.129"></a>
-<span class="sourceLineNo">130</span>    conf.set("dfs.nameservices", "haosong-hadoop");<a
name="line.130"></a>
-<span class="sourceLineNo">131</span>    conf.set("dfs.ha.namenodes.haosong-hadoop",
"nn1,nn2");<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    conf.set("dfs.client.failover.proxy.provider.haosong-hadoop",<a
name="line.132"></a>
-<span class="sourceLineNo">133</span>            "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");<a
name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.0.0.1:8020");<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.10.2.1:8000");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    desPath = new Path("/");<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    desFs = desPath.getFileSystem(conf);<a
name="line.138"></a>
-<span class="sourceLineNo">139</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.139"></a>
-<span class="sourceLineNo">140</span><a name="line.140"></a>
-<span class="sourceLineNo">141</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.10.2.1:8020");<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.0.0.1:8000");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    desPath = new Path("/");<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    desFs = desPath.getFileSystem(conf);<a
name="line.144"></a>
-<span class="sourceLineNo">145</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
-<span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span>  /**<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * Version of DFS that has HDFS-4525 in
it.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   */<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  class IsFileClosedDistributedFileSystem
extends DistributedFileSystem {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    /**<a name="line.152"></a>
-<span class="sourceLineNo">153</span>     * Close status of a file. Copied over
from HDFS-4525<a name="line.153"></a>
-<span class="sourceLineNo">154</span>     * @return true if file is already closed<a
name="line.154"></a>
-<span class="sourceLineNo">155</span>     **/<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    public boolean isFileClosed(Path f)
throws IOException{<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      return false;<a name="line.157"></a>
+<span class="sourceLineNo">103</span>  void testIsSameHdfs(int nnport) throws
IOException {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>      Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");<a
name="line.105"></a>
+<span class="sourceLineNo">106</span>      dfsUtilClazz.getMethod("getNNServiceRpcAddresses",
Configuration.class);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    } catch (Exception e) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      LOG.info("Skip testIsSameHdfs test
case because of the no-HA hadoop version.");<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>    Configuration conf = HBaseConfiguration.create();<a
name="line.112"></a>
+<span class="sourceLineNo">113</span>    Path srcPath = new Path("hdfs://localhost:"
+ nnport + "/");<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    Path desPath = new Path("hdfs://127.0.0.1/");<a
name="line.114"></a>
+<span class="sourceLineNo">115</span>    FileSystem srcFs = srcPath.getFileSystem(conf);<a
name="line.115"></a>
+<span class="sourceLineNo">116</span>    FileSystem desFs = desPath.getFileSystem(conf);<a
name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>    desPath = new Path("hdfs://127.0.0.1:8070/");<a
name="line.120"></a>
+<span class="sourceLineNo">121</span>    desFs = desPath.getFileSystem(conf);<a
name="line.121"></a>
+<span class="sourceLineNo">122</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    desPath = new Path("hdfs://127.0.1.1:"
+ nnport + "/");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    desFs = desPath.getFileSystem(conf);<a
name="line.125"></a>
+<span class="sourceLineNo">126</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    conf.set("fs.defaultFS", "hdfs://haosong-hadoop");<a
name="line.128"></a>
+<span class="sourceLineNo">129</span>    conf.set("dfs.nameservices", "haosong-hadoop");<a
name="line.129"></a>
+<span class="sourceLineNo">130</span>    conf.set("dfs.ha.namenodes.haosong-hadoop",
"nn1,nn2");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    conf.set("dfs.client.failover.proxy.provider.haosong-hadoop",<a
name="line.131"></a>
+<span class="sourceLineNo">132</span>        "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");<a
name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.0.0.1:"+ nnport);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.10.2.1:8000");<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    desPath = new Path("/");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    desFs = desPath.getFileSystem(conf);<a
name="line.137"></a>
+<span class="sourceLineNo">138</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.10.2.1:"+nnport);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.0.0.1:8000");<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    desPath = new Path("/");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    desFs = desPath.getFileSystem(conf);<a
name="line.143"></a>
+<span class="sourceLineNo">144</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">147</span>  @Test<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  public void testIsSameHdfs() throws IOException
{<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    String hadoopVersion = org.apache.hadoop.util.VersionInfo.getVersion();<a
name="line.149"></a>
+<span class="sourceLineNo">150</span>    LOG.info("hadoop version is: "  + hadoopVersion);<a
name="line.150"></a>
+<span class="sourceLineNo">151</span>    boolean isHadoop3 = hadoopVersion.startsWith("3.");<a
name="line.151"></a>
+<span class="sourceLineNo">152</span>    if (isHadoop3) {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      // Hadoop 3.0.0 alpha1+ change default
nn port to 9820. See HDFS-9427<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      testIsSameHdfs(9820);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    } else {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      // pre hadoop 3.0.0 defaults to port
8020<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      testIsSameHdfs(8020);<a name="line.157"></a>
 <span class="sourceLineNo">158</span>    }<a name="line.158"></a>
 <span class="sourceLineNo">159</span>  }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>}<a name="line.160"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   * Version of DFS that has HDFS-4525 in
it.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  class IsFileClosedDistributedFileSystem
extends DistributedFileSystem {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    /**<a name="line.165"></a>
+<span class="sourceLineNo">166</span>     * Close status of a file. Copied over
from HDFS-4525<a name="line.166"></a>
+<span class="sourceLineNo">167</span>     * @return true if file is already closed<a
name="line.167"></a>
+<span class="sourceLineNo">168</span>     **/<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    public boolean isFileClosed(Path f)
throws IOException{<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      return false;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>}<a name="line.173"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.html
index b367564..7d0d6c1 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSHDFSUtils.html
@@ -108,64 +108,77 @@
 <span class="sourceLineNo">100</span>    Mockito.verify(dfs, Mockito.times(1)).isFileClosed(FILE);<a
name="line.100"></a>
 <span class="sourceLineNo">101</span>  }<a name="line.101"></a>
 <span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  @Test<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public void testIsSameHdfs() throws IOException
{<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");<a
name="line.106"></a>
-<span class="sourceLineNo">107</span>      dfsUtilClazz.getMethod("getNNServiceRpcAddresses",
Configuration.class);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    } catch (Exception e) {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      LOG.info("Skip testIsSameHdfs test
case because of the no-HA hadoop version.");<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      return;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    Configuration conf = HBaseConfiguration.create();<a
name="line.113"></a>
-<span class="sourceLineNo">114</span>    Path srcPath = new Path("hdfs://localhost:8020/");<a
name="line.114"></a>
-<span class="sourceLineNo">115</span>    Path desPath = new Path("hdfs://127.0.0.1/");<a
name="line.115"></a>
-<span class="sourceLineNo">116</span>    FileSystem srcFs = srcPath.getFileSystem(conf);<a
name="line.116"></a>
-<span class="sourceLineNo">117</span>    FileSystem desFs = desPath.getFileSystem(conf);<a
name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>    desPath = new Path("hdfs://127.0.0.1:8070/");<a
name="line.121"></a>
-<span class="sourceLineNo">122</span>    desFs = desPath.getFileSystem(conf);<a
name="line.122"></a>
-<span class="sourceLineNo">123</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>    desPath = new Path("hdfs://127.0.1.1:8020/");<a
name="line.125"></a>
-<span class="sourceLineNo">126</span>    desFs = desPath.getFileSystem(conf);<a
name="line.126"></a>
-<span class="sourceLineNo">127</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>    conf.set("fs.defaultFS", "hdfs://haosong-hadoop");<a
name="line.129"></a>
-<span class="sourceLineNo">130</span>    conf.set("dfs.nameservices", "haosong-hadoop");<a
name="line.130"></a>
-<span class="sourceLineNo">131</span>    conf.set("dfs.ha.namenodes.haosong-hadoop",
"nn1,nn2");<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    conf.set("dfs.client.failover.proxy.provider.haosong-hadoop",<a
name="line.132"></a>
-<span class="sourceLineNo">133</span>            "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");<a
name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.0.0.1:8020");<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.10.2.1:8000");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    desPath = new Path("/");<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    desFs = desPath.getFileSystem(conf);<a
name="line.138"></a>
-<span class="sourceLineNo">139</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.139"></a>
-<span class="sourceLineNo">140</span><a name="line.140"></a>
-<span class="sourceLineNo">141</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.10.2.1:8020");<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.0.0.1:8000");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    desPath = new Path("/");<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    desFs = desPath.getFileSystem(conf);<a
name="line.144"></a>
-<span class="sourceLineNo">145</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
-<span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span>  /**<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * Version of DFS that has HDFS-4525 in
it.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   */<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  class IsFileClosedDistributedFileSystem
extends DistributedFileSystem {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    /**<a name="line.152"></a>
-<span class="sourceLineNo">153</span>     * Close status of a file. Copied over
from HDFS-4525<a name="line.153"></a>
-<span class="sourceLineNo">154</span>     * @return true if file is already closed<a
name="line.154"></a>
-<span class="sourceLineNo">155</span>     **/<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    public boolean isFileClosed(Path f)
throws IOException{<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      return false;<a name="line.157"></a>
+<span class="sourceLineNo">103</span>  void testIsSameHdfs(int nnport) throws
IOException {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>      Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");<a
name="line.105"></a>
+<span class="sourceLineNo">106</span>      dfsUtilClazz.getMethod("getNNServiceRpcAddresses",
Configuration.class);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    } catch (Exception e) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      LOG.info("Skip testIsSameHdfs test
case because of the no-HA hadoop version.");<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>    Configuration conf = HBaseConfiguration.create();<a
name="line.112"></a>
+<span class="sourceLineNo">113</span>    Path srcPath = new Path("hdfs://localhost:"
+ nnport + "/");<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    Path desPath = new Path("hdfs://127.0.0.1/");<a
name="line.114"></a>
+<span class="sourceLineNo">115</span>    FileSystem srcFs = srcPath.getFileSystem(conf);<a
name="line.115"></a>
+<span class="sourceLineNo">116</span>    FileSystem desFs = desPath.getFileSystem(conf);<a
name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>    desPath = new Path("hdfs://127.0.0.1:8070/");<a
name="line.120"></a>
+<span class="sourceLineNo">121</span>    desFs = desPath.getFileSystem(conf);<a
name="line.121"></a>
+<span class="sourceLineNo">122</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    desPath = new Path("hdfs://127.0.1.1:"
+ nnport + "/");<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    desFs = desPath.getFileSystem(conf);<a
name="line.125"></a>
+<span class="sourceLineNo">126</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>    conf.set("fs.defaultFS", "hdfs://haosong-hadoop");<a
name="line.128"></a>
+<span class="sourceLineNo">129</span>    conf.set("dfs.nameservices", "haosong-hadoop");<a
name="line.129"></a>
+<span class="sourceLineNo">130</span>    conf.set("dfs.ha.namenodes.haosong-hadoop",
"nn1,nn2");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    conf.set("dfs.client.failover.proxy.provider.haosong-hadoop",<a
name="line.131"></a>
+<span class="sourceLineNo">132</span>        "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");<a
name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.0.0.1:"+ nnport);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.10.2.1:8000");<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    desPath = new Path("/");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    desFs = desPath.getFileSystem(conf);<a
name="line.137"></a>
+<span class="sourceLineNo">138</span>    assertTrue(FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1",
"127.10.2.1:"+nnport);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2",
"127.0.0.1:8000");<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    desPath = new Path("/");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    desFs = desPath.getFileSystem(conf);<a
name="line.143"></a>
+<span class="sourceLineNo">144</span>    assertTrue(!FSHDFSUtils.isSameHdfs(conf,
srcFs, desFs));<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">147</span>  @Test<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  public void testIsSameHdfs() throws IOException
{<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    String hadoopVersion = org.apache.hadoop.util.VersionInfo.getVersion();<a
name="line.149"></a>
+<span class="sourceLineNo">150</span>    LOG.info("hadoop version is: "  + hadoopVersion);<a
name="line.150"></a>
+<span class="sourceLineNo">151</span>    boolean isHadoop3 = hadoopVersion.startsWith("3.");<a
name="line.151"></a>
+<span class="sourceLineNo">152</span>    if (isHadoop3) {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      // Hadoop 3.0.0 alpha1+ change default
nn port to 9820. See HDFS-9427<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      testIsSameHdfs(9820);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    } else {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      // pre hadoop 3.0.0 defaults to port
8020<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      testIsSameHdfs(8020);<a name="line.157"></a>
 <span class="sourceLineNo">158</span>    }<a name="line.158"></a>
 <span class="sourceLineNo">159</span>  }<a name="line.159"></a>
-<span class="sourceLineNo">160</span>}<a name="line.160"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   * Version of DFS that has HDFS-4525 in
it.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   */<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  class IsFileClosedDistributedFileSystem
extends DistributedFileSystem {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    /**<a name="line.165"></a>
+<span class="sourceLineNo">166</span>     * Close status of a file. Copied over
from HDFS-4525<a name="line.166"></a>
+<span class="sourceLineNo">167</span>     * @return true if file is already closed<a
name="line.167"></a>
+<span class="sourceLineNo">168</span>     **/<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    public boolean isFileClosed(Path f)
throws IOException{<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      return false;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  }<a name="line.172"></a>
+<span class="sourceLineNo">173</span>}<a name="line.173"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.WALPutBenchmark.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.WALPutBenchmark.html
b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.WALPutBenchmark.html
index c44d7a1..a15e884 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.WALPutBenchmark.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.WALPutBenchmark.html
@@ -164,7 +164,7 @@
 <span class="sourceLineNo">156</span>          loopSampler = Sampler.ALWAYS;<a
name="line.156"></a>
 <span class="sourceLineNo">157</span>          if (numIterations &gt; 1000)
{<a name="line.157"></a>
 <span class="sourceLineNo">158</span>            LOG.warn("Full tracing of all
iterations will produce a lot of data. Be sure your"<a name="line.158"></a>
-<span class="sourceLineNo">159</span>              + " SpanReciever can keep
up.");<a name="line.159"></a>
+<span class="sourceLineNo">159</span>              + " SpanReceiver can keep
up.");<a name="line.159"></a>
 <span class="sourceLineNo">160</span>          }<a name="line.160"></a>
 <span class="sourceLineNo">161</span>        } else {<a name="line.161"></a>
 <span class="sourceLineNo">162</span>          getConf().setDouble("hbase.sampler.fraction",
traceFreq);<a name="line.162"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6fbeb554/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html
b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html
index c44d7a1..a15e884 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html
@@ -164,7 +164,7 @@
 <span class="sourceLineNo">156</span>          loopSampler = Sampler.ALWAYS;<a
name="line.156"></a>
 <span class="sourceLineNo">157</span>          if (numIterations &gt; 1000)
{<a name="line.157"></a>
 <span class="sourceLineNo">158</span>            LOG.warn("Full tracing of all
iterations will produce a lot of data. Be sure your"<a name="line.158"></a>
-<span class="sourceLineNo">159</span>              + " SpanReciever can keep
up.");<a name="line.159"></a>
+<span class="sourceLineNo">159</span>              + " SpanReceiver can keep
up.");<a name="line.159"></a>
 <span class="sourceLineNo">160</span>          }<a name="line.160"></a>
 <span class="sourceLineNo">161</span>        } else {<a name="line.161"></a>
 <span class="sourceLineNo">162</span>          getConf().setDouble("hbase.sampler.fraction",
traceFreq);<a name="line.162"></a>


Mime
View raw message