ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From fba...@apache.org
Subject ambari git commit: AMBARI-11890 [WinTP2] [Test] Cluster install via blueprint fails
Date Tue, 16 Jun 2015 08:15:21 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 9b8b308fe -> 345f99588


AMBARI-11890 [WinTP2] [Test] Cluster install via blueprint fails

Strengthened the parameter assignments originating from environment vars, to handle out-of-order
installations


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/345f9958
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/345f9958
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/345f9958

Branch: refs/heads/trunk
Commit: 345f99588c924b3f9c271cee13ae5e53b3e69261
Parents: 9b8b308
Author: Florian Barca <fbarca@hortonworks.com>
Authored: Tue Jun 16 01:15:03 2015 -0700
Committer: Florian Barca <fbarca@hortonworks.com>
Committed: Tue Jun 16 01:15:03 2015 -0700

----------------------------------------------------------------------
 .../libraries/functions/install_hdp_msi.py      |  2 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |  4 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |  5 ++-
 .../2.1.0.2.0/package/scripts/install_params.py | 39 ++++++++++++++++++++
 .../2.1.0.2.0/package/scripts/journalnode.py    |  3 ++
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |  5 +++
 .../2.1.0.2.0/package/scripts/params_windows.py |  5 ++-
 .../package/scripts/params_windows.py           | 31 +++++++++++-----
 .../0.5.0.2.2/package/scripts/params_windows.py | 37 +++++++++++++------
 .../package/scripts/params_windows.py           | 14 +++++--
 .../package/scripts/params_windows.py           | 19 +++++++---
 .../0.4.0.2.1/package/scripts/params_windows.py |  7 +++-
 .../3.4.5.2.0/package/scripts/params_windows.py | 14 +++++--
 13 files changed, 146 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
index 0e06eb0..7e94b5d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
@@ -97,7 +97,7 @@ INSTALL_MSI_CMD = 'cmd /C start /wait msiexec /qn /i  {hdp_msi_path} /lv
{hdp_lo
                   'HDP_LAYOUT={hdp_layout_path} DESTROY_DATA=yes HDP_USER={hadoop_user} HDP_USER_PASSWORD={hadoop_password_arg}
HDP=yes ' \
                   'KNOX=yes KNOX_MASTER_SECRET="AmbariHDP2Windows" FALCON=yes STORM=yes HBase=yes
STORM=yes FLUME=yes SLIDER=yes PHOENIX=no RANGER=no'
 CREATE_SERVICE_SCRIPT = os.path.abspath("sbin\createservice.ps1")
-CREATE_SERVICE_CMD = 'cmd /C powershell -File "{script}" -username {username} -password "{password}"
-servicename ' \
+CREATE_SERVICE_CMD = 'cmd /C powershell -ExecutionPolicy Bypass -File "{script}" -username
{username} -password "{password}" -servicename ' \
                      '{servicename} -hdpresourcesdir "{resourcedir}" -servicecmdpath "{servicecmd}"'
 INSTALL_MARKER_OK = "msi.installed"
 INSTALL_MARKER_FAILED = "msi.failed"

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index fc54f73..d8b8835 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -143,7 +143,9 @@ class DataNodeDefault(DataNode):
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class DataNodeWindows(DataNode):
-  pass
+  def install(self, env):
+    import install_params
+    self.install_packages(env, install_params.exclude_packages)
 
 if __name__ == "__main__":
   DataNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 961e644..dd0dca4 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -111,7 +111,10 @@ class HdfsClientDefault(HdfsClient):
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):
-  pass
+  def install(self, env):
+    import install_params
+    self.install_packages(env, install_params.exclude_packages)
+    self.configure(env)
 
 if __name__ == "__main__":
   HdfsClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
new file mode 100644
index 0000000..fe488c3
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
@@ -0,0 +1,39 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from ambari_commons import OSCheck
+
+# These parameters are supposed to be referenced at installation time, before the Hadoop
environment variables have been set
+if OSCheck.is_windows_family():
+  exclude_packages = []
+else:
+  from resource_management.libraries.functions.default import default
+  from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
+  from resource_management.libraries.script.script import Script
+
+  _config = Script.get_config()
+  stack_version_unformatted = str(_config['hostLevelParams']['stack_version'])
+
+  # The logic for LZO also exists in OOZIE's params.py
+  io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+  lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
+  lzo_packages = get_lzo_packages(stack_version_unformatted)
+
+  exclude_packages = []
+  if not lzo_enabled:
+    exclude_packages += lzo_packages

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index 0cae36f..46c7272 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -159,6 +159,9 @@ class JournalNodeDefault(JournalNode):
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class JournalNodeWindows(JournalNode):
+  def install(self, env):
+    import install_params
+    self.install_packages(env, install_params.exclude_packages)
 
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 578d994..9865af9 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -260,6 +260,11 @@ class NameNodeDefault(NameNode):
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class NameNodeWindows(NameNode):
+  def install(self, env):
+    import install_params
+    self.install_packages(env, install_params.exclude_packages)
+    #TODO we need this for HA because of manual steps
+    self.configure(env)
 
   def rebalancehdfs(self, env):
     from ambari_commons.os_windows import UserHelper, run_os_command_impersonated

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
index c3ee304..60f4a74 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
@@ -17,8 +17,10 @@ limitations under the License.
 
 """
 
-from resource_management import *
 import os
+
+#Used in subsequent imports from params
+from install_params import exclude_packages
 from status_params import *
 
 config = Script.get_config()
@@ -64,7 +66,6 @@ hdfs_user = hadoop_user
 grep_exe = "findstr"
 
 name_node_params = default("/commandParams/namenode", None)
-exclude_packages = []
 
 service_map = {
   "datanode" : datanode_win_service_name,

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
index 506e869..f2524b2 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
@@ -28,14 +28,28 @@ config = Script.get_config()
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
-hive_conf_dir = os.environ["HIVE_CONF_DIR"]
-hive_home = os.environ["HIVE_HOME"]
-hive_lib_dir = os.environ["HIVE_LIB_DIR"]
-hive_log_dir = os.environ["HIVE_LOG_DIR"]
-hive_opts = os.environ["HIVE_OPTS"]
-hcat_home = os.environ["HCAT_HOME"]
-hcat_config_dir = os.environ["WEBHCAT_CONF_DIR"]
+hdp_root = None
+hive_conf_dir = None
+hive_home = None
+hive_lib_dir = None
+hive_log_dir = None
+hive_opts = None
+hcat_home = None
+hcat_config_dir = None
+hive_bin = None
+
+try:
+  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  hive_conf_dir = os.environ["HIVE_CONF_DIR"]
+  hive_home = os.environ["HIVE_HOME"]
+  hive_lib_dir = os.environ["HIVE_LIB_DIR"]
+  hive_log_dir = os.environ["HIVE_LOG_DIR"]
+  hive_opts = os.environ["HIVE_OPTS"]
+  hcat_home = os.environ["HCAT_HOME"]
+  hcat_config_dir = os.environ["WEBHCAT_CONF_DIR"]
+  hive_bin = os.path.join(hive_home, "bin")
+except:
+  pass
 
 hive_env_sh_template = config['configurations']['hive-env']['content']
 hive_warehouse_dir = config['configurations']['hive-site']['hive.metastore.warehouse.dir']
@@ -43,7 +57,6 @@ hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 hive_user = hadoop_user
 hcat_user = hadoop_user
 
-hive_bin = os.path.join(hive_home, "bin")
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
index 8e89052..50acbe7 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
@@ -25,17 +25,32 @@ from status_params import *
 # server configurations
 config = Script.get_config()
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
-knox_home = os.environ['KNOX_HOME']
-knox_conf_dir = os.environ['KNOX_CONF_DIR']
-knox_logs_dir = os.environ['KNOX_LOG_DIR']
-knox_bin = os.path.join(knox_home, 'bin', 'gateway.exe')
-ldap_bin = os.path.join(knox_home, 'bin', 'ldap.exe')
-knox_client_bin = os.path.join(knox_home, 'bin', 'knoxcli.cmd')
-knox_data_dir = os.path.join(knox_home, 'data')
-
-knox_master_secret_path = os.path.join(knox_data_dir, 'security', 'master')
-knox_cert_store_path = os.path.join(knox_data_dir, 'security', 'keystores', 'gateway.jks')
+hdp_root = None
+knox_home = None
+knox_conf_dir = None
+knox_logs_dir = None
+knox_bin = None
+ldap_bin = None
+knox_client_bin = None
+knox_data_dir = None
+
+knox_master_secret_path = None
+knox_cert_store_path = None
+
+try:
+  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  knox_home = os.environ['KNOX_HOME']
+  knox_conf_dir = os.environ['KNOX_CONF_DIR']
+  knox_logs_dir = os.environ['KNOX_LOG_DIR']
+  knox_bin = os.path.join(knox_home, 'bin', 'gateway.exe')
+  ldap_bin = os.path.join(knox_home, 'bin', 'ldap.exe')
+  knox_client_bin = os.path.join(knox_home, 'bin', 'knoxcli.cmd')
+  knox_data_dir = os.path.join(knox_home, 'data')
+
+  knox_master_secret_path = os.path.join(knox_data_dir, 'security', 'master')
+  knox_cert_store_path = os.path.join(knox_data_dir, 'security', 'keystores', 'gateway.jks')
+except:
+  pass
 
 knox_host_port = config['configurations']['gateway-site']['gateway.port']
 knox_host_name = config['clusterHostInfo']['knox_gateway_hosts'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
index 7bb7ef8..8a0a519 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
@@ -22,9 +22,17 @@ from resource_management import *
 
 # server configurations
 config = Script.get_config()
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
-pig_home = os.environ['PIG_HOME']
-pig_conf_dir = os.path.join(pig_home,'conf')
+
+hdp_root = None
+pig_home = None
+pig_conf_dir = None
+try:
+  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  pig_home = os.environ['PIG_HOME']
+  pig_conf_dir = os.path.join(pig_home,'conf')
+except:
+  pass
+
 pig_properties = config['configurations']['pig-properties']['content']
 
 if (('pig-log4j' in config['configurations']) and ('content' in config['configurations']['pig-log4j'])):

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
index 52106a7..366a1c9 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
@@ -24,11 +24,20 @@ import os
 # server configurations
 config = Script.get_config()
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
-slider_home = os.environ['SLIDER_HOME']
-slider_bin_dir = os.path.join(slider_home, 'bin')
-slider_conf_dir = os.path.join(slider_home, 'conf')
-storm_slider_conf_dir = os.path.join(os.environ['STORM_HOME'], 'conf')
+hdp_root = None
+slider_home = None
+slider_bin_dir = None
+slider_conf_dir = None
+storm_slider_conf_dir = None
+try:
+  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  slider_home = os.environ['SLIDER_HOME']
+  slider_bin_dir = os.path.join(slider_home, 'bin')
+  slider_conf_dir = os.path.join(slider_home, 'conf')
+  storm_slider_conf_dir = os.path.join(os.environ['STORM_HOME'], 'conf')
+except:
+  pass
+
 slider_home_dir = slider_home
 
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
index 2f9ee30..636d092 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
@@ -36,8 +36,11 @@ except KeyError:
 
 hdp_stack_version = ""
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
-
+hdp_root = None
+try:
+  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
+except:
+  pass
 
 def refresh_tez_state_dependent_params():
   global tez_home_dir, tez_conf_dir, hdp_stack_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/345f9958/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
index e5ce948..480fc8b 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
@@ -25,9 +25,15 @@ import status_params
 # server configurations
 config = Script.get_config()
 
-# notused zookeeper_home_dir = os.environ["ZOOKEEPER_HOME"]
-config_dir = os.environ["ZOOKEEPER_CONF_DIR"]
-hdp_root = os.environ["HADOOP_NODE_INSTALL_ROOT"]
+config_dir = None
+hdp_root = None
+try:
+  # not used zookeeper_home_dir = os.environ["ZOOKEEPER_HOME"]
+  config_dir = os.environ["ZOOKEEPER_CONF_DIR"]
+  hdp_root = os.environ["HADOOP_NODE_INSTALL_ROOT"]
+except:
+  pass
+
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 zk_user = hadoop_user
 
@@ -59,4 +65,4 @@ zookeeper_win_service_name = status_params.zookeeper_win_service_name
 if (('zookeeper-log4j' in config['configurations']) and ('content' in config['configurations']['zookeeper-log4j'])):
   log4j_props = config['configurations']['zookeeper-log4j']['content']
 else:
-  log4j_props = None
\ No newline at end of file
+  log4j_props = None


Mime
View raw message