hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jiten...@apache.org
Subject hadoop git commit: HDFS-10579. HDFS web interfaces lack configs for X-FRAME-OPTIONS protection. Contributed by Anu Engineer.
Date Tue, 12 Jul 2016 00:59:47 GMT
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 02b037f62 -> be1a11c9c


HDFS-10579. HDFS web interfaces lack configs for X-FRAME-OPTIONS protection. Contributed by
Anu Engineer.

Conflicts:
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/be1a11c9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/be1a11c9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/be1a11c9

Branch: refs/heads/branch-2
Commit: be1a11c9c8255ab71f3f8dcb1b83c638ba11025a
Parents: 02b037f
Author: Jitendra Pandey <jitendra@apache.org>
Authored: Mon Jul 11 14:55:33 2016 -0700
Committer: Jitendra Pandey <jitendra@apache.org>
Committed: Mon Jul 11 17:54:18 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  6 ++
 .../server/datanode/web/DatanodeHttpServer.java | 10 ++
 .../server/namenode/NameNodeHttpServer.java     | 20 ++++
 .../src/main/resources/hdfs-default.xml         | 24 +++++
 .../datanode/web/TestDatanodeHttpXFrame.java    | 90 ++++++++++++++++++
 .../namenode/TestNameNodeHttpServerXFrame.java  | 97 ++++++++++++++++++++
 6 files changed, 247 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index f3a4dcb..f31eb0a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -702,6 +702,12 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   // Security-related configs
   public static final String DFS_ENCRYPT_DATA_TRANSFER_KEY = "dfs.encrypt.data.transfer";
   public static final boolean DFS_ENCRYPT_DATA_TRANSFER_DEFAULT = false;
+  public static final String DFS_XFRAME_OPTION_ENABLED = "dfs.xframe.enabled";
+  public static final boolean DFS_XFRAME_OPTION_ENABLED_DEFAULT = true;
+
+  public static final String DFS_XFRAME_OPTION_VALUE = "dfs.xframe.value";
+  public static final String DFS_XFRAME_OPTION_VALUE_DEFAULT = "SAMEORIGIN";
+
   @Deprecated
   public static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
       HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
index 0477028..caee6cc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
@@ -107,6 +107,16 @@ public class DatanodeHttpServer implements Closeable {
         .addEndpoint(URI.create("http://localhost:0"))
         .setFindPort(true);
 
+    final boolean xFrameEnabled = conf.getBoolean(
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
+
+    final String xFrameOptionValue = conf.getTrimmed(
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
+
+    builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
+
     this.infoServer = builder.build();
 
     this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
index a66eb96..fa93089 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
@@ -29,6 +29,7 @@ import java.util.Map.Entry;
 
 import javax.servlet.ServletContext;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -141,6 +142,16 @@ public class NameNodeHttpServer {
         DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
         DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
 
+    final boolean xFrameEnabled = conf.getBoolean(
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
+
+    final String xFrameOptionValue = conf.getTrimmed(
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
+
+    builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
+
     httpServer = builder.build();
 
     if (policy.isHttpsEnabled()) {
@@ -326,4 +337,13 @@ public class NameNodeHttpServer {
       ServletContext context) {
     return (StartupProgress)context.getAttribute(STARTUP_PROGRESS_ATTRIBUTE_KEY);
   }
+
+  /**
+   * Returns the httpServer.
+   * @return HttpServer2
+   */
+  @VisibleForTesting
+  public HttpServer2 getHttpServer() {
+    return httpServer;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
index 7a2128b..1692d18 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
@@ -2931,6 +2931,30 @@
   </description>
 </property>
 
+  <property>
+    <name>dfs.xframe.enabled</name>
+    <value>true</value>
+    <description>
+      If true, then enables protection against clickjacking by returning
+      X_FRAME_OPTIONS header value set to SAMEORIGIN.
+      Clickjacking protection prevents an attacker from using transparent or
+      opaque layers to trick a user into clicking on a button
+      or link on another page.
+    </description>
+  </property>
+
+  <property>
+    <name>dfs.xframe.value</name>
+    <value>SAMEORIGIN</value>
+    <description>
+      This configration value allows user to specify the value for the
+      X-FRAME-OPTIONS. The possible values for this field are
+      DENY, SAMEORIGIN and ALLOW-FROM. Any other value will throw an
+      exception when namenode and datanodes are starting up.
+    </description>
+  </property>
+
+
 <property>
   <name>dfs.http.client.retry.policy.enabled</name>
   <value>false</value>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
new file mode 100644
index 0000000..9ecd8ea
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.web;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.http.HttpServer2;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+/**
+ * Test that X-Frame-Options works correctly with DatanodeHTTPServer.
+ */
+public class TestDatanodeHttpXFrame {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void testDataNodeXFrameOptionsEnabled() throws Exception {
+    boolean xFrameEnabled = true;
+    MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
+    HttpURLConnection conn = getConn(cluster);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
+        xfoHeader != null);
+    Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
+        .SAMEORIGIN.toString()));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsDisabled() throws Exception {
+    boolean xFrameEnabled = false;
+    MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
+    HttpURLConnection conn = getConn(cluster);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == null);
+  }
+
+  @Test
+  public void testDataNodeXFramewithInvalidOptions() throws Exception {
+    exception.expect(IllegalArgumentException.class);
+    createCluster(false, "Hadoop");
+  }
+
+  private MiniDFSCluster createCluster(boolean enabled, String
+      value) throws IOException {
+    Configuration conf = new HdfsConfiguration();
+    conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
+    if (value != null) {
+      conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
+    }
+    MiniDFSCluster cluster =
+        new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+    cluster.waitActive();
+    return cluster;
+  }
+
+  private HttpURLConnection getConn(MiniDFSCluster cluster)
+      throws IOException {
+    DataNode datanode = cluster.getDataNodes().get(0);
+    URL newURL = new URL("http://localhost:" + datanode.getInfoPort());
+    HttpURLConnection conn = (HttpURLConnection) newURL.openConnection();
+    conn.connect();
+    return conn;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be1a11c9/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
new file mode 100644
index 0000000..947e951
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+/**
+ * A class to test the XFrameoptions of Namenode HTTP Server. We are not reusing
+ * the TestNameNodeHTTPServer since it is a parameterized class and these
+ * following tests will run multiple times doing the same thing, if we had the
+ * code in that classs.
+ */
+public class TestNameNodeHttpServerXFrame {
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  public static URL getServerURL(HttpServer2 server)
+      throws MalformedURLException {
+    Assert.assertNotNull("No server", server);
+    return new URL("http://"
+        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsEnabled() throws Exception {
+    HttpURLConnection conn = createServerwithXFrame(true, null);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
+        xfoHeader != null);
+    Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
+        .SAMEORIGIN.toString()));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsDisabled() throws Exception {
+    HttpURLConnection conn = createServerwithXFrame(false, null);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == null);
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsIllegalOption() throws Exception {
+    exception.expect(IllegalArgumentException.class);
+    createServerwithXFrame(true, "hadoop");
+  }
+
+  private HttpURLConnection createServerwithXFrame(boolean enabled, String
+      value) throws IOException {
+    Configuration conf = new HdfsConfiguration();
+    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
+    if (value != null) {
+      conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
+
+    }
+    InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
+    NameNodeHttpServer server = null;
+
+    server = new NameNodeHttpServer(conf, null, addr);
+    server.start();
+
+    URL url = getServerURL(server.getHttpServer());
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    conn.connect();
+    return conn;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message