ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aonis...@apache.org
Subject [2/2] ambari git commit: HADOOP-11764. [ HADOOP-11764] NodeManager should use directory other than tmp for extracting and loading leveldbjni (aonishuk)
Date Wed, 29 Jul 2015 10:52:38 GMT
HADOOP-11764. [ HADOOP-11764] NodeManager should use directory other than tmp for extracting
and loading leveldbjni (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8acac954
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8acac954
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8acac954

Branch: refs/heads/branch-2.1
Commit: 8acac954e43e321a6c446beb3ad6cb73c8856c28
Parents: c5d9fb1
Author: Andrew Onishuk <aonishuk@hortonworks.com>
Authored: Wed Jul 29 13:52:29 2015 +0300
Committer: Andrew Onishuk <aonishuk@hortonworks.com>
Committed: Wed Jul 29 13:52:29 2015 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog211.java       | 40 ++++++++++++++++++++
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |  4 ++
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 +
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  2 +
 .../before-ANY/scripts/shared_initialization.py |  8 ++++
 .../services/HDFS/configuration/hadoop-env.xml  |  4 ++
 .../services/HDFS/configuration/hadoop-env.xml  |  4 ++
 .../server/upgrade/UpgradeCatalog211Test.java   |  9 +++++
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  5 +++
 9 files changed, 77 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
index 616d4c4..c59c6c7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
@@ -26,9 +26,13 @@ import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.commons.lang.StringUtils;
 
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.sql.SQLException;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 
@@ -94,6 +98,7 @@ public class UpgradeCatalog211 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
+    addMissingConfigs();
     updateExistingConfigurations();
   }
 
@@ -149,4 +154,39 @@ public class UpgradeCatalog211 extends AbstractUpgradeCatalog {
       updateConfigurationPropertiesForCluster(cluster, "kerberos-env", updates, removes,
true, false);
     }
   }
+
+  protected void addMissingConfigs() throws AmbariException {
+    updateHdfsConfigs();
+  }
+
+  protected void updateHdfsConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(
+        AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+      Map<String, String> prop = new HashMap<String, String>();
+      String content = null;
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          content = null;
+          if (cluster.getDesiredConfigByType("hadoop-env") != null) {
+            content = cluster.getDesiredConfigByType(
+                "hadoop-env").getProperties().get("content");
+          }
+
+          if (content != null) {
+            content += "\nexport JAVA_LIBRARY_PATH=\"${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}\"";
+            content += "\nexport _JAVA_OPTIONS=\"${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}\"\n";
+
+            prop.put("content", content);
+            updateConfigurationPropertiesForCluster(cluster, "hadoop-env",
+                prop, true, false);
+          }
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
index 1b5ddfa..d33cfda 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
@@ -182,6 +182,10 @@ export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
+# Handle a situation when /tmp is set to noexec
+export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
+export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
+
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 1abfd45..c53508b 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -74,6 +74,7 @@ hadoop_secure_dn_user = hdfs_user
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
+hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index d3b3580..35a7832 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 import collections
 import re
+import os
 
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json
module and has the same functions set.
 
@@ -92,6 +93,7 @@ hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
+hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 699fe5f..43c3a54 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -186,3 +186,11 @@ def setup_hadoop_env():
       File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), owner=tc_owner,
         group=params.user_group,
         content=InlineTemplate(params.hadoop_env_sh_template))
+
+    # Create tmp dir for java.io.tmpdir
+    # Handle a situation when /tmp is set to noexec
+    Directory(params.hadoop_java_io_tmpdir,
+              owner=params.hdfs_user,
+              group=params.user_group,
+              mode=0777
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 31dadeb..e7dc00c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -64,6 +64,10 @@ export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
+# Handle a situation when /tmp is set to noexec
+export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
+export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
+
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 3b332ad..3b8a492 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -46,6 +46,10 @@ export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 # Path to jsvc required by secure HDP 2.0 datanode
 export JSVC_HOME={{jsvc_path}}
 
+# Handle a situation when /tmp is set to noexec
+export JAVA_LIBRARY_PATH="${JAVA_LIBRARY_PATH}:{{hadoop_java_io_tmpdir}}"
+export _JAVA_OPTIONS="${_JAVA_OPTIONS} -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
+
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
index 2ba44bf..8ba3a37 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
@@ -37,6 +37,7 @@ import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMockSupport;
@@ -51,9 +52,11 @@ import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 
 import static org.easymock.EasyMock.*;
+import static org.easymock.EasyMock.createNiceMock;
 
 
 /**
@@ -163,9 +166,12 @@ public class UpgradeCatalog211Test extends EasyMockSupport {
     Method updateKerberosConfigurations =
         UpgradeCatalog211.class.getDeclaredMethod("updateKerberosConfigurations", Cluster.class);
 
+    Method addMissingConfigs = UpgradeCatalog211.class.getDeclaredMethod("addMissingConfigs");
+
     UpgradeCatalog211 upgradeCatalog211 = createMockBuilder(UpgradeCatalog211.class)
         .addMockedMethod(addNewConfigurationsFromXml)
         .addMockedMethod(updateKerberosConfigurations)
+        .addMockedMethod(addMissingConfigs)
         .createMock();
 
     setInjector(upgradeCatalog211, injector);
@@ -176,6 +182,9 @@ public class UpgradeCatalog211Test extends EasyMockSupport {
     upgradeCatalog211.updateKerberosConfigurations(anyObject(Cluster.class));
     expectLastCall().once();
 
+    upgradeCatalog211.addMissingConfigs();
+    expectLastCall().once();
+
     replayAll();
 
     upgradeCatalog211.executeDMLUpdates();

http://git-wip-us.apache.org/repos/asf/ambari/blob/8acac954/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
index 129907f..f1ada18 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
@@ -184,4 +184,9 @@ class TestHookBeforeInstall(RMFTestCase):
         owner = 'hdfs',
         group = 'hadoop'
     )
+    self.assertResourceCalled('Directory', '/tmp/hadoop_java_io_tmpdir',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0777
+    )
     self.assertNoMoreResources()


Mime
View raw message