trafodion-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From lium...@apache.org
Subject [1/4] incubator-trafodion git commit: [TRAFODION-2393] python installer - reorganize script directories for better user experience
Date Tue, 20 Dec 2016 08:54:42 GMT
Repository: incubator-trafodion
Updated Branches:
  refs/heads/master c5568f2a9 -> f9eda0a7c


http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/scripts/traf_user.py
----------------------------------------------------------------------
diff --git a/install/python-installer/scripts/traf_user.py b/install/python-installer/scripts/traf_user.py
new file mode 100755
index 0000000..01b80b3
--- /dev/null
+++ b/install/python-installer/scripts/traf_user.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+
+# @@@ START COPYRIGHT @@@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# @@@ END COPYRIGHT @@@
+
+### this script should be run on all nodes with sudo user ###
+
+import os
+import sys
+import json
+from common import ParseXML, run_cmd, append_file, mod_file, \
+                   cmd_output, run_cmd_as_user, err, TMP_DIR
+
+def run():
+    """ create trafodion user, bashrc, setup passwordless SSH """
+    dbcfgs = json.loads(dbcfgs_json)
+
+    DISTRO = dbcfgs['distro']
+    if 'CDH' in DISTRO:
+        hadoop_type = 'cloudera'
+    elif 'HDP' in DISTRO:
+        hadoop_type = 'hortonworks'
+    elif 'APACHE' in DISTRO:
+        hadoop_type = 'apache'
+
+    TRAF_USER = dbcfgs['traf_user']
+    TRAF_PWD = dbcfgs['traf_pwd']
+    TRAF_GROUP = TRAF_USER
+    HOME_DIR = cmd_output('cat /etc/default/useradd |grep HOME |cut -d "=" -f 2').strip()
+    # customize trafodion home dir
+    if dbcfgs.has_key('home_dir') and dbcfgs['home_dir']:
+        HOME_DIR = dbcfgs['home_dir']
+
+    TRAF_USER_DIR = '%s/%s' % (HOME_DIR, TRAF_USER)
+    TRAF_DIRNAME = dbcfgs['traf_dirname']
+    TRAF_HOME = '%s/%s' % (TRAF_USER_DIR, TRAF_DIRNAME)
+
+    HBASE_XML_FILE = dbcfgs['hbase_xml_file']
+    KEY_FILE = '/tmp/id_rsa'
+    AUTH_KEY_FILE = '%s/.ssh/authorized_keys' % TRAF_USER_DIR
+    SSH_CFG_FILE = '%s/.ssh/config' % TRAF_USER_DIR
+    BASHRC_TEMPLATE = '%s/templates/bashrc.template' % TMP_DIR
+    BASHRC_FILE = '%s/.bashrc' % TRAF_USER_DIR
+    ULIMITS_FILE = '/etc/security/limits.d/%s.conf' % TRAF_USER
+    HSPERFDATA_FILE = '/tmp/hsperfdata_trafodion'
+
+    # create trafodion user and group
+    if not cmd_output('getent group %s' % TRAF_GROUP):
+        run_cmd('groupadd %s > /dev/null 2>&1' % TRAF_GROUP)
+
+    if not cmd_output('getent passwd %s' % TRAF_USER):
+        run_cmd('useradd --shell /bin/bash -m %s -g %s --home %s --password "$(openssl passwd %s)"' % (TRAF_USER, TRAF_GROUP, TRAF_USER_DIR, TRAF_PWD))
+    elif not os.path.exists(TRAF_USER_DIR):
+        run_cmd('mkdir -p %s' % TRAF_USER_DIR)
+        run_cmd('chmod 700 %s' % TRAF_USER_DIR)
+
+    # set ssh key
+    run_cmd_as_user(TRAF_USER, 'echo -e "y" | ssh-keygen -t rsa -N "" -f ~/.ssh/id_rsa')
+    # the key is generated in copy_file script running on the installer node
+    run_cmd('cp %s{,.pub} %s/.ssh/' % (KEY_FILE, TRAF_USER_DIR))
+
+    run_cmd_as_user(TRAF_USER, 'cat ~/.ssh/id_rsa.pub > %s' % AUTH_KEY_FILE)
+    run_cmd('chmod 644 %s' % AUTH_KEY_FILE)
+
+    ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n'
+    with open(SSH_CFG_FILE, 'w') as f:
+        f.write(ssh_cfg)
+    run_cmd('chmod 600 %s' % SSH_CFG_FILE)
+
+    run_cmd('chown -R %s:%s %s/.ssh/' % (TRAF_USER, TRAF_GROUP, TRAF_USER_DIR))
+
+    hb = ParseXML(HBASE_XML_FILE)
+    zk_hosts = hb.get_property('hbase.zookeeper.quorum')
+    zk_port = hb.get_property('hbase.zookeeper.property.clientPort')
+    # set bashrc
+    nodes = dbcfgs['node_list'].split(',')
+    change_items = {
+        '{{ java_home }}': dbcfgs['java_home'],
+        '{{ traf_home }}': TRAF_HOME,
+        '{{ hadoop_type }}': hadoop_type,
+        '{{ node_list }}': ' '.join(nodes),
+        '{{ node_count }}': str(len(nodes)),
+        '{{ enable_ha }}': dbcfgs['enable_ha'],
+        '{{ zookeeper_nodes }}': zk_hosts,
+        '{{ zookeeper_port }}': zk_port,
+        '{{ my_nodes }}': ' -w ' + ' -w '.join(nodes)
+    }
+
+    mod_file(BASHRC_TEMPLATE, change_items)
+
+    if 'APACHE' in DISTRO:
+        bashrc_content = """
+export HADOOP_PREFIX=%s
+export HBASE_HOME=%s
+export PATH=$PATH:$HADOOP_PREFIX/bin:$HADOOP_PREFIX/sbin:$HBASE_HOME/bin
+        """ % (dbcfgs['hadoop_home'], dbcfgs['hbase_home'])
+        append_file(BASHRC_TEMPLATE, bashrc_content, position='HADOOP_TYPE')
+
+    # backup bashrc if exsits
+    if os.path.exists(BASHRC_FILE):
+        run_cmd('cp %s %s.bak' % ((BASHRC_FILE,) *2))
+
+    # copy bashrc to trafodion's home
+    run_cmd('cp %s %s' % (BASHRC_TEMPLATE, BASHRC_FILE))
+    run_cmd('chown -R %s:%s %s*' % (TRAF_USER, TRAF_GROUP, BASHRC_FILE))
+
+    # set ulimits for trafodion user
+    ulimits_config = '''
+# Trafodion settings
+%s   soft   core unlimited
+%s   hard   core unlimited
+%s   soft   memlock unlimited
+%s   hard   memlock unlimited
+%s   soft   nofile 32768
+%s   hard   nofile 65536
+%s   soft   nproc 100000
+%s   hard   nproc 100000
+%s   soft nofile 8192
+%s   hard nofile 65535
+hbase soft nofile 8192
+''' % ((TRAF_USER,) * 10)
+
+    with open(ULIMITS_FILE, 'w') as f:
+        f.write(ulimits_config)
+
+    # change permission for hsperfdata
+    if os.path.exists(HSPERFDATA_FILE):
+        run_cmd('chown -R %s:%s %s' % (TRAF_USER, TRAF_GROUP, HSPERFDATA_FILE))
+
+    # clean up unused key file at the last step
+    run_cmd('rm -rf %s{,.pub}' % KEY_FILE)
+
+    print 'Setup trafodion user successfully!'
+
+# main
+try:
+    dbcfgs_json = sys.argv[1]
+except IndexError:
+    err('No db config found')
+run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/scripts/wrapper.py
----------------------------------------------------------------------
diff --git a/install/python-installer/scripts/wrapper.py b/install/python-installer/scripts/wrapper.py
new file mode 100644
index 0000000..8847698
--- /dev/null
+++ b/install/python-installer/scripts/wrapper.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python
+
+# @@@ START COPYRIGHT @@@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# @@@ END COPYRIGHT @@@
+
+import os
+import time
+import json
+import subprocess
+from glob import glob
+from threading import Thread
+from common import err_m, run_cmd, time_elapse, get_logger, Remote, \
+                   ParseJson, INSTALLER_LOC, TMP_DIR, SCRCFG_FILE, \
+                   CONFIG_DIR, SCRIPTS_DIR, TEMPLATES_DIR
+
+class RemoteRun(Remote):
+    """ run commands or scripts remotely using ssh """
+
+    def __init__(self, host, logger, user='', pwd='', quiet=False):
+        super(RemoteRun, self).__init__(host, user, pwd)
+
+        self.quiet = quiet # no output
+        self.logger = logger
+
+        # create tmp folder
+        self.execute('mkdir -p %s' % TMP_DIR)
+
+        # copy all needed files to remote host
+        all_files = [CONFIG_DIR, SCRIPTS_DIR, TEMPLATES_DIR]
+
+        self.copy(all_files, remote_folder=TMP_DIR)
+
+        # set permission
+        self.execute('chmod a+rx %s/scripts/*.py' % TMP_DIR)
+
+    def __del__(self):
+        # clean up
+        self.execute('sudo -n rm -rf %s' % TMP_DIR, chkerr=False)
+
+    def run_script(self, script, run_user, json_string, verbose=False):
+        """ @param run_user: run the script with this user """
+
+        if run_user:
+            # format string in order to run with 'sudo -n su $user -c $cmd'
+            json_string = json_string.replace('"', '\\\\\\"').replace(' ', '').replace('{', '\\{').replace('$', '\\\\\\$')
+            # this command only works with shell=True
+            script_cmd = '"sudo -n su - %s -c \'%s/scripts/%s %s\'"' % (run_user, TMP_DIR, script, json_string)
+            self.execute(script_cmd, verbose=verbose, shell=True, chkerr=False)
+        else:
+            script_cmd = 'sudo -n %s/scripts/%s \'%s\'' % (TMP_DIR, script, json_string)
+            self.execute(script_cmd, verbose=verbose, chkerr=False)
+
+        format1 = 'Host [%s]: Script [%s]: %s' % (self.host, script, self.stdout)
+        format2 = 'Host [%s]: Script [%s]' % (self.host, script)
+
+        self.logger.info(format1)
+
+        if self.rc == 0:
+            if not self.quiet: state_ok(format2)
+            self.logger.info(format2 + ' ran successfully!')
+        else:
+            if not self.quiet: state_fail(format2)
+            msg = 'Host [%s]: Failed to run \'%s\'' % (self.host, script)
+            if self.stderr:
+                msg += ': ' + self.stderr
+                print '\n ' + self.stderr
+            self.logger.error(msg)
+            exit(1)
+
+
+def state_ok(msg):
+    state(32, ' OK ', msg)
+
+def state_fail(msg):
+    state(31, 'FAIL', msg)
+
+def state_skip(msg):
+    state(33, 'SKIP', msg)
+
+def state(color, result, msg):
+    WIDTH = 80
+    print '\n\33[%dm%s %s [ %s ]\33[0m\n' % (color, msg, (WIDTH - len(msg))*'.', result)
+
+class Status(object):
+    def __init__(self, stat_file, name):
+        self.stat_file = stat_file
+        self.name = name
+
+    def get_status(self):
+        if not os.path.exists(self.stat_file): os.mknod(self.stat_file)
+        with open(self.stat_file, 'r') as f:
+            st = f.readlines()
+        for s in st:
+            if s.split()[0] == self.name: return True
+        return False
+
+    def set_status(self):
+        with open(self.stat_file, 'a+') as f:
+            f.write('%s OK\n' % self.name)
+
+@time_elapse
+def run(dbcfgs, options, mode='install', pwd=''):
+    """ main entry
+        mode: install/discover
+    """
+    STAT_FILE = '%s/%s.status' % (INSTALLER_LOC, mode)
+    LOG_FILE = '%s/logs/%s_%s.log' % (INSTALLER_LOC, mode, time.strftime('%Y%m%d_%H%M'))
+    logger = get_logger(LOG_FILE)
+
+    verbose = True if hasattr(options, 'verbose') and options.verbose else False
+    upgrade = True if hasattr(options, 'upgrade') and options.upgrade else False
+    user = options.user if hasattr(options, 'user') and options.user else ''
+    threshold = options.fork if hasattr(options, 'fork') and options.fork else 10
+
+    script_output = [] # script output array
+    conf = ParseJson(SCRCFG_FILE).load()
+    script_cfgs = conf[mode]
+
+    dbcfgs_json = json.dumps(dbcfgs)
+    hosts = dbcfgs['node_list'].split(',')
+
+    # handle skipped scripts, skip them if no need to run
+    skipped_scripts = []
+    if upgrade:
+        skipped_scripts += ['hadoop_mods', 'apache_mods', 'apache_restart', 'traf_dep', 'traf_kerberos']
+
+    if dbcfgs['secure_hadoop'] == 'N':
+        skipped_scripts += ['traf_kerberos']
+
+    if dbcfgs['traf_start'].upper() == 'N':
+        skipped_scripts += ['traf_start']
+
+    if dbcfgs['ldap_security'].upper() == 'N':
+        skipped_scripts += ['traf_ldap']
+
+    if 'APACHE' in dbcfgs['distro']:
+        skipped_scripts += ['hadoop_mods']
+    else:
+        skipped_scripts += ['apache_mods', 'apache_restart']
+
+    # set ssh config file to avoid known hosts verify on current installer node
+    SSH_CFG_FILE = os.environ['HOME'] + '/.ssh/config'
+    ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n'
+    with open(SSH_CFG_FILE, 'w') as f:
+        f.write(ssh_cfg)
+    run_cmd('chmod 600 %s' % SSH_CFG_FILE)
+
+    def run_local_script(script, json_string, req_pwd):
+        cmd = '%s/%s \'%s\'' % (SCRIPTS_DIR, script, json_string)
+
+        # pass the ssh password to sub scripts which need SSH password
+        if req_pwd: cmd += ' ' + pwd
+
+        if verbose: print cmd
+
+        # stdout on screen
+        p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True)
+        stdout, stderr = p.communicate()
+
+        rc = p.returncode
+        if rc != 0:
+            msg = 'Failed to run \'%s\'' % script
+            if stderr:
+                msg += ': ' + stderr
+                print stderr
+            logger.error(msg)
+            state_fail('localhost: Script [%s]' % script)
+            exit(rc)
+        else:
+            state_ok('Script [%s]' % script)
+            logger.info('Script [%s] ran successfully!' % script)
+
+        return stdout
+
+    # run sub scripts
+    try:
+        remote_instances = []
+        if mode == 'discover':
+            remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd, quiet=True) for host in hosts]
+        else:
+            remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd) for host in hosts]
+        first_instance = remote_instances[0]
+        for instance in remote_instances:
+            if instance.host == dbcfgs['first_rsnode']:
+                first_rs_instance = instance
+                break
+
+        logger.info(' ***** %s Start *****' % mode)
+        for cfg in script_cfgs:
+            script = cfg['script']
+            node = cfg['node']
+            desc = cfg['desc']
+            run_user = ''
+            if not 'run_as_traf' in cfg.keys():
+                pass
+            elif cfg['run_as_traf'] == 'yes':
+                run_user = dbcfgs['traf_user']
+
+            if not 'req_pwd' in cfg.keys():
+                req_pwd = False
+            elif cfg['req_pwd'] == 'yes':
+                req_pwd = True
+
+            status = Status(STAT_FILE, script)
+            if status.get_status():
+                msg = 'Script [%s] had already been executed' % script
+                state_skip(msg)
+                logger.info(msg)
+                continue
+
+            if script.split('.')[0] in skipped_scripts:
+                continue
+            else:
+                print '\nTASK: %s %s' % (desc, (83 - len(desc))*'*')
+
+            #TODO: timeout exit
+            if node == 'local':
+                run_local_script(script, dbcfgs_json, req_pwd)
+            elif node == 'first':
+                first_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose)
+            elif node == 'first_rs':
+                first_rs_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose)
+            elif node == 'all':
+                l = len(remote_instances)
+                if l > threshold:
+                    piece = (l - (l % threshold)) / threshold
+                    parted_remote_instances = [remote_instances[threshold*i:threshold*(i+1)] for i in range(piece)]
+                    parted_remote_instances.append(remote_instances[threshold*piece:])
+                else:
+                    parted_remote_instances = [remote_instances]
+
+                for parted_remote_inst in parted_remote_instances:
+                    threads = [Thread(target=r.run_script, args=(script, run_user, dbcfgs_json, verbose)) for r in parted_remote_inst]
+                    for t in threads: t.start()
+                    for t in threads: t.join()
+
+                    if sum([r.rc for r in parted_remote_inst]) != 0:
+                        err_m('Script failed to run on one or more nodes, exiting ...\nCheck log file %s for details.' % LOG_FILE)
+
+                    script_output += [{r.host:r.stdout.strip()} for r in parted_remote_inst]
+
+            else:
+                # should not go to here
+                err_m('Invalid configuration for %s' % SCRCFG_FILE)
+
+            status.set_status()
+    except KeyboardInterrupt:
+        err_m('User quit')
+
+    # remove status file if all scripts run successfully
+    os.remove(STAT_FILE)
+
+    return script_output
+
+if __name__ == '__main__':
+    exit(0)

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/templates/bashrc.template
----------------------------------------------------------------------
diff --git a/install/python-installer/templates/bashrc.template b/install/python-installer/templates/bashrc.template
new file mode 100644
index 0000000..010d282
--- /dev/null
+++ b/install/python-installer/templates/bashrc.template
@@ -0,0 +1,81 @@
+#!/bin/sh
+
+# @@@ START COPYRIGHT @@@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# @@@ END COPYRIGHT @@@
+
+
+# This is the .bashrc for the Trafodion environment
+#
+#-------------------------------------------
+# Execute the system's default .bashrc first
+#-------------------------------------------
+if [ -f /etc/bashrc ]; then
+	. /etc/bashrc
+fi
+
+cd $HOME
+
+#-------------------------------------------
+# full path of your Trafodion installation
+#-------------------------------------------
+export TRAF_HOME="{{ traf_home }}"
+export MY_SQROOT="{{ traf_home }}" # for compatibility
+
+#-------------------------------------------
+# other env vars needed by Trafodion
+#-------------------------------------------
+
+# These env vars define all nodes in the cluster
+export JAVA_HOME="{{ java_home }}"
+export NODE_LIST="{{ node_list }}"
+export MY_NODES="{{ my_nodes }}"
+export node_count="{{ node_count }}"
+export HADOOP_TYPE="{{ hadoop_type }}"
+export ENABLE_HA="{{ enable_ha }}"
+export ZOOKEEPER_NODES="{{ zookeeper_nodes }}"
+export ZOOKEEPER_PORT="{{ zookeeper_port }}"
+
+#-------------------------------------------
+# Execute the sqenv.sh script if it exists.
+#-------------------------------------------
+PATH=".:$PATH"
+if [ -f $TRAF_HOME/sqenv.sh ]; then
+	pushd . >/dev/null
+	cd $TRAF_HOME
+	source ./sqenv.sh
+	popd >/dev/null
+	export MANPATH=$MANPATH:$MPI_ROOT/share/man
+fi
+
+#-------------------------------------------
+# additional settings for Trafodion environment
+#-------------------------------------------
+ETC_SECURITY_MSG="***ERROR: To fix this please configure /etc/security/limits.conf properly on $HOSTNAME."
+
+# set core file size
+ulimit -c unlimited
+
+# set max open files
+ulimit -n 32768
+if [ $? -ne 0 ]; then
+    echo "***ERROR: Unable to set max open files. Current value $(ulimit -n)"
+    echo $ETC_SECURITY_MSG
+fi

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/templates/traf_authentication_conf.template
----------------------------------------------------------------------
diff --git a/install/python-installer/templates/traf_authentication_conf.template b/install/python-installer/templates/traf_authentication_conf.template
new file mode 100644
index 0000000..f15dd70
--- /dev/null
+++ b/install/python-installer/templates/traf_authentication_conf.template
@@ -0,0 +1,71 @@
+# @@@ START COPYRIGHT @@@
+# #
+# # Licensed to the Apache Software Foundation (ASF) under one
+# # or more contributor license agreements.  See the NOTICE file
+# # distributed with this work for additional information
+# # regarding copyright ownership.  The ASF licenses this file
+# # to you under the Apache License, Version 2.0 (the
+# # "License"); you may not use this file except in compliance
+# # with the License.  You may obtain a copy of the License at
+# #
+# #   http://www.apache.org/licenses/LICENSE-2.0
+# #
+# # Unless required by applicable law or agreed to in writing,
+# # software distributed under the License is distributed on an
+# # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# # KIND, either express or implied.  See the License for the
+# # specific language governing permissions and limitations
+# # under the License.
+# #
+# # @@@ END COPYRIGHT @@@
+# #
+#
+# This is the template file for Trafodion database authentication
+# directory service configuration.
+#
+# To use authentication in Trafodion, this file must be configured
+# as described below and placed in $TRAF_HOME/sql/scripts and be named
+# .traf_authentication_config.  You must also enable authentication by
+# running the script traf_authentication_setup in $TRAF_HOME/sql/scripts.
+#
+# NOTE: the format of this configuration file is expected to change in the
+# next release of Trafodion.  Backward compatilibity is not guaranteed.
+#
+SECTION: Defaults
+  DefaultSectionName: local
+  RefreshTime: 1800
+  TLS_CACERTFilename: {{ ldap_certpath }}
+SECTION: local
+# If one or more of the LDAPHostName values is a load balancing host, list
+# the name(s) here, one name: value pair for each host.
+  LoadBalanceHostName:
+
+# One or more identically configured hosts must be specified here,
+# one name: value pair for each host.
+  LdapHostname: {{ ldap_hosts }}
+
+# Default is port 389, change if using 636 or any other port
+  LdapPort: {{ ldap_port }}
+
+# Must specify one or more unique identifiers, one name: value pair for each
+  UniqueIdentifier: {{ ldap_identifiers }}
+
+# If the configured LDAP server requires a username and password to
+# to perform name lookup, provide those here.
+  LDAPSearchDN: {{ ldap_user }}
+  LDAPSearchPwd: {{ ldap_pwd }}
+
+# If configured LDAP server requires TLS(1) or SSL (2), update this value
+  LDAPSSL: {{ ldap_encrypt }}
+
+# Default timeout values in seconds
+  LDAPNetworkTimeout: 30
+  LDAPTimeout: 30
+  LDAPTimeLimit: 30
+
+# Default values for retry logic algorithm
+  RetryCount: 5
+  RetryDelay: 2
+  PreserveConnection: No
+  ExcludeBadHosts: Yes
+  MaxExcludeListSize: 3

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_authentication_conf.template
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_authentication_conf.template b/install/python-installer/traf_authentication_conf.template
deleted file mode 100644
index f15dd70..0000000
--- a/install/python-installer/traf_authentication_conf.template
+++ /dev/null
@@ -1,71 +0,0 @@
-# @@@ START COPYRIGHT @@@
-# #
-# # Licensed to the Apache Software Foundation (ASF) under one
-# # or more contributor license agreements.  See the NOTICE file
-# # distributed with this work for additional information
-# # regarding copyright ownership.  The ASF licenses this file
-# # to you under the Apache License, Version 2.0 (the
-# # "License"); you may not use this file except in compliance
-# # with the License.  You may obtain a copy of the License at
-# #
-# #   http://www.apache.org/licenses/LICENSE-2.0
-# #
-# # Unless required by applicable law or agreed to in writing,
-# # software distributed under the License is distributed on an
-# # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# # KIND, either express or implied.  See the License for the
-# # specific language governing permissions and limitations
-# # under the License.
-# #
-# # @@@ END COPYRIGHT @@@
-# #
-#
-# This is the template file for Trafodion database authentication
-# directory service configuration.
-#
-# To use authentication in Trafodion, this file must be configured
-# as described below and placed in $TRAF_HOME/sql/scripts and be named
-# .traf_authentication_config.  You must also enable authentication by
-# running the script traf_authentication_setup in $TRAF_HOME/sql/scripts.
-#
-# NOTE: the format of this configuration file is expected to change in the
-# next release of Trafodion.  Backward compatilibity is not guaranteed.
-#
-SECTION: Defaults
-  DefaultSectionName: local
-  RefreshTime: 1800
-  TLS_CACERTFilename: {{ ldap_certpath }}
-SECTION: local
-# If one or more of the LDAPHostName values is a load balancing host, list
-# the name(s) here, one name: value pair for each host.
-  LoadBalanceHostName:
-
-# One or more identically configured hosts must be specified here,
-# one name: value pair for each host.
-  LdapHostname: {{ ldap_hosts }}
-
-# Default is port 389, change if using 636 or any other port
-  LdapPort: {{ ldap_port }}
-
-# Must specify one or more unique identifiers, one name: value pair for each
-  UniqueIdentifier: {{ ldap_identifiers }}
-
-# If the configured LDAP server requires a username and password to
-# to perform name lookup, provide those here.
-  LDAPSearchDN: {{ ldap_user }}
-  LDAPSearchPwd: {{ ldap_pwd }}
-
-# If configured LDAP server requires TLS(1) or SSL (2), update this value
-  LDAPSSL: {{ ldap_encrypt }}
-
-# Default timeout values in seconds
-  LDAPNetworkTimeout: 30
-  LDAPTimeout: 30
-  LDAPTimeLimit: 30
-
-# Default values for retry logic algorithm
-  RetryCount: 5
-  RetryDelay: 2
-  PreserveConnection: No
-  ExcludeBadHosts: Yes
-  MaxExcludeListSize: 3

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_check.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_check.py b/install/python-installer/traf_check.py
deleted file mode 100755
index 31d62a9..0000000
--- a/install/python-installer/traf_check.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import re
-import json
-import sys
-import os
-from common import run_cmd, cmd_output, err, Version
-
-class Check(object):
-    """ check system envs """
-
-    def __init__(self, dbcfgs_json):
-        self.dbcfgs = json.loads(dbcfgs_json)
-        self.version = Version()
-
-    def check_sudo(self):
-        """ check sudo access """
-        run_cmd('sudo -n echo -n "check sudo access" > /dev/null 2>&1')
-
-    def check_hbase_xml(self):
-        """ check if hbase-site.xml file exists """
-        hbase_xml_file = self.dbcfgs['hbase_xml_file']
-        if not os.path.exists(hbase_xml_file):
-            err('HBase xml file is not found')
-
-    def check_java(self):
-        """ check JDK version """
-        jdk_path = self.dbcfgs['java_home']
-        jdk_ver = cmd_output('%s/bin/javac -version' % jdk_path)
-        try:
-            jdk_ver, sub_ver = re.search(r'javac (\d\.\d).\d_(\d+)', jdk_ver).groups()
-        except AttributeError:
-            err('No JDK found')
-
-        if self.dbcfgs['req_java8'] == 'Y': # only allow JDK1.8
-            support_java = '1.8'
-        else:
-            support_java = self.version.get_version('java')
-
-        if jdk_ver == '1.7' and int(sub_ver) < 65:
-            err('Unsupported JDK1.7 version, sub version should be higher than 65')
-        if jdk_ver not in support_java:
-            err('Unsupported JDK version %s, supported version: %s' % (jdk_ver, support_java))
-
-    #def check_scratch_loc(self):
-    #    """ check if scratch file folder exists """
-    #    scratch_locs = self.dbcfgs['scratch_locs'].split(',')
-    #    for loc in scratch_locs:
-    #        if not os.path.exists(loc):
-    #            err('Scratch file location \'%s\' doesn\'t exist' % loc)
-
-def run():
-    PREFIX = 'check_'
-    check = Check(dbcfgs_json)
-
-    # call method
-    [getattr(check, m)() for m in dir(check) if m.startswith(PREFIX)]
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_dep.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_dep.py b/install/python-installer/traf_dep.py
deleted file mode 100755
index 81fb7e0..0000000
--- a/install/python-installer/traf_dep.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import re
-import os
-import sys
-import json
-import platform
-from common import run_cmd, cmd_output, err
-
-LOCAL_REPO_PTR = """
-[traflocal]
-baseurl=http://%s:%s/
-enabled=1
-gpgcheck=0
-"""
-
-REPO_FILE = '/etc/yum.repos.d/traflocal.repo'
-
-def run():
-    """ install Trafodion dependencies """
-
-    dbcfgs = json.loads(dbcfgs_json)
-
-    if dbcfgs['offline_mode'] == 'Y':
-        print 'Installing pdsh in offline mode ...'
-
-        # setup temp local repo
-        repo_content = LOCAL_REPO_PTR % (dbcfgs['repo_ip'], dbcfgs['repo_port'])
-        with open(REPO_FILE, 'w') as f:
-            f.write(repo_content)
-
-        run_cmd('yum install -y --disablerepo=\* --enablerepo=traflocal pdsh-rcmd-ssh pdsh')
-    else:
-        pdsh_installed = cmd_output('rpm -qa|grep -c pdsh')
-        if pdsh_installed == '0':
-            release = platform.release()
-            releasever, arch = re.search(r'el(\d).(\w+)', release).groups()
-
-            if releasever == '7':
-                pdsh_pkg = 'http://mirrors.neusoft.edu.cn/epel/7/%s/p/pdsh-2.31-1.el7.%s.rpm' % (arch, arch)
-            elif releasever == '6':
-                pdsh_pkg = 'http://mirrors.neusoft.edu.cn/epel/6/%s/pdsh-2.26-4.el6.%s.rpm' % (arch, arch)
-            else:
-                err('Unsupported Linux version')
-
-            print 'Installing pdsh ...'
-            run_cmd('yum install -y %s' % pdsh_pkg)
-
-    package_list = [
-        'apr',
-        'apr-util',
-        'expect',
-        'gzip',
-        'libiodbc-devel',
-        'lzo',
-        'lzop',
-        'openldap-clients',
-        'perl-DBD-SQLite',
-        'perl-Params-Validate',
-        'perl-Time-HiRes',
-        'sqlite',
-        'snappy',
-        'unixODBC-devel',
-        'unzip'
-    ]
-
-    all_pkg_list = run_cmd('rpm -qa')
-    for pkg in package_list:
-        if pkg in all_pkg_list:
-            print 'Package %s had already been installed' % pkg
-        else:
-            print 'Installing %s ...' % pkg
-            if dbcfgs['offline_mode'] == 'Y':
-                run_cmd('yum install -y --disablerepo=\* --enablerepo=traflocal %s' % pkg)
-            else:
-                run_cmd('yum install -y %s' % pkg)
-
-    # remove temp repo file
-    if dbcfgs['offline_mode'] == 'Y':
-        os.remove(REPO_FILE)
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_discover.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_discover.py b/install/python-installer/traf_discover.py
deleted file mode 100755
index d0c9936..0000000
--- a/install/python-installer/traf_discover.py
+++ /dev/null
@@ -1,253 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import re
-import json
-import sys
-import platform
-from glob import glob
-from common import cmd_output, err, Version, ParseXML
-
-PREFIX = 'get_'
-NA = 'N/A' # not available
-NS = 'N/S' # not supported
-OK = 'OK'
-
-def deco(func):
-    def wrapper(self):
-        if PREFIX in func.__name__:
-            name = func.__name__.replace(PREFIX, '')
-            return name, func(self)
-        else:
-            return
-    return wrapper
-
-
-class Discover(object):
-    """ discover functions, to add a new discover function,
-        simply add a new def with name get_xx and decorated
-        by 'deco', then return result in string format:
-
-        @deco
-        def get_xx(self):
-            # do something
-            return result
-    """
-
-    def __init__(self, dbcfgs):
-        self.CPUINFO = cmd_output('cat /proc/cpuinfo')
-        self.MEMINFO = cmd_output('cat /proc/meminfo')
-        self.SYSCTLINFO = cmd_output('sysctl -a')
-        self.version = Version()
-        self.dbcfgs = dbcfgs
-
-    def _parse_string(self, info, string):
-        try:
-            info = info.split('\n')
-            string_line = [line for line in info if string in line][0]
-        except IndexError:
-            err('Cannot get %s info' % string)
-
-        return string_line
-
-    def _get_cpu_info(self, string):
-        return self._parse_string(self.CPUINFO, string).split(':')[1].strip()
-
-    def _get_mem_info(self, string):
-        return self._parse_string(self.MEMINFO, string).split(':')[1].split()[0]
-
-    def _get_sysctl_info(self, string):
-        return self._parse_string(self.SYSCTLINFO, string).split('=')[1].strip()
-
-    @deco
-    def get_linux(self):
-        """ get linux version """
-        os_dist, os_ver = platform.dist()[:2]
-        if os_dist not in self.version.get_version('linux'):
-            return NA
-        else:
-            if not os_ver.split('.')[0] in self.version.get_version(os_dist):
-                return NA
-        return '%s-%s' % (os_dist, os_ver)
-
-    @deco
-    def get_firewall_status(self):
-        """ get firewall running status """
-        iptables_stat = cmd_output('iptables -nL|grep -vE "(Chain|target)"').strip()
-        if iptables_stat:
-            return 'Running'
-        else:
-            return 'Stopped'
-
-    @deco
-    def get_pidmax(self):
-        """ get kernel pid max setting """
-        return self._get_sysctl_info('kernel.pid_max')
-
-    @deco
-    def get_default_java(self):
-        """ get default java version """
-        jdk_path = glob('/usr/java/*') + \
-                   glob('/usr/jdk64/*') + \
-                   glob('/usr/lib/jvm/java-*-openjdk.x86_64')
-
-        jdk_list = {} # {jdk_version: jdk_path}
-        for path in jdk_path:
-            jdk_ver = cmd_output('%s/bin/javac -version' % path)
-
-            try:
-                main_ver, sub_ver = re.search(r'(\d\.\d\.\d)_(\d+)', jdk_ver).groups()
-                # don't support JDK version less than 1.7.0_65
-                if main_ver == '1.7.0' and int(sub_ver) < 65:
-                    continue
-                jdk_list[main_ver] = path
-            except AttributeError:
-                continue
-
-        if not jdk_list:
-            return NA
-        else:
-            # use JDK1.8 first
-            if jdk_list.has_key('1.8.0'):
-                return jdk_list['1.8.0']
-            elif jdk_list.has_key('1.7.0'):
-                return jdk_list['1.7.0']
-
-    @deco
-    def get_hive(self):
-        """ get Hive status """
-        hive_stat = cmd_output('which hive')
-        if 'no hive' in hive_stat:
-            return NA
-        else:
-            return OK
-
-    @deco
-    def get_secure_hadoop(self):
-        if self.dbcfgs.has_key('hadoop_home'): # apache distro
-            CORE_SITE_XML = '%s/etc/hadoop/core-site.xml' % self.dbcfgs['hadoop_home']
-        else:
-            CORE_SITE_XML = '/etc/hadoop/conf/core-site.xml'
-        p = ParseXML(CORE_SITE_XML)
-        return p.get_property('hadoop.security.authentication')
-
-    @deco
-    def get_hbase(self):
-        """ get HBase version """
-        if self.dbcfgs.has_key('hbase_home'): # apache distro
-            hbase_home = self.dbcfgs['hbase_home']
-            hbase_ver = cmd_output('%s/bin/hbase version | head -n1' % hbase_home)
-        else:
-            hbase_ver = cmd_output('hbase version | head -n1')
-
-        support_hbase_ver = self.version.get_version('hbase')
-        try:
-            hbase_ver = re.search(r'HBase (\d\.\d)', hbase_ver).groups()[0]
-        except AttributeError:
-            return NA
-        if hbase_ver not in support_hbase_ver:
-            return NS
-        return hbase_ver
-
-    @deco
-    def get_cpu_model(self):
-        """ get CPU model """
-        return self._get_cpu_info('model name')
-
-    @deco
-    def get_cpu_cores(self):
-        """ get CPU cores """
-        return self.CPUINFO.count('processor')
-
-    @deco
-    def get_arch(self):
-        """ get CPU architecture """
-        arch = platform.processor()
-        if not arch:
-            arch = 'Unknown'
-        return arch
-
-    @deco
-    def get_mem_total(self):
-        """ get total memory size """
-        mem = self._get_mem_info('MemTotal')
-        memsize = mem.split()[0]
-
-        return "%0.1f GB" % round(float(memsize) / (1024 * 1024), 2)
-
-    @deco
-    def get_mem_free(self):
-        """ get current free memory size """
-        free = self._get_mem_info('MemFree')
-        buffers = self._get_mem_info('Buffers')
-        cached = self._get_mem_info('Cached')
-        memfree = float(free) + float(buffers) + float(cached)
-
-        return "%0.1f GB" % round(memfree / (1024 * 1024), 2)
-
-    @deco
-    def get_ext_interface(self):
-        """ get external network interface """
-        return cmd_output('netstat -rn | grep "^0.0.0.0" | awk \'{print $8}\'').strip()
-
-    @deco
-    def get_rootdisk_free(self):
-        """ get root disk space left """
-        space = cmd_output('df -h|grep "\/$" | awk \'{print $4}\'')
-        return space.strip()
-
-    @deco
-    def get_python_ver(self):
-        """ get python version """
-        return platform.python_version()
-
-    @deco
-    def get_traf_status(self):
-        """ get trafodion running status """
-        mon_process = cmd_output('ps -ef|grep -v grep|grep -c "monitor COLD"')
-        if int(mon_process) > 0:
-            return 'Running'
-        else:
-            return 'Stopped'
-
-def run():
-    try:
-        dbcfgs_json = sys.argv[1]
-    except IndexError:
-        err('No db config found')
-    dbcfgs = json.loads(dbcfgs_json)
-    discover = Discover(dbcfgs)
-    methods = [m for m in dir(discover) if m.startswith(PREFIX)]
-    result = {}
-    for method in methods:
-        key, value = getattr(discover, method)() # call method
-        result[key] = value
-
-    print json.dumps(result)
-
-
-# main
-if __name__ == '__main__':
-    run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_kerberos.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_kerberos.py b/install/python-installer/traf_kerberos.py
deleted file mode 100755
index 2e89d2c..0000000
--- a/install/python-installer/traf_kerberos.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import sys
-import re
-import json
-import socket
-from common import run_cmd, cmd_output, err
-
-def run():
-    """ setup Kerberos security """
-    dbcfgs = json.loads(dbcfgs_json)
-
-    distro = dbcfgs['distro']
-    admin_principal = dbcfgs['admin_principal']
-    admin_passwd = dbcfgs['kdcadmin_pwd']
-    kdc_server = dbcfgs['kdc_server']
-    # maxlife = dbcfgs['max_lifetime']
-    # max_renewlife = dbcfgs['max_renew_lifetime']
-    maxlife = '24hours'
-    max_renewlife = '7days'
-    kadmin_cmd = 'kadmin -p %s -w %s -s %s -q' % (admin_principal, admin_passwd, kdc_server)
-
-    host_name = socket.getfqdn()
-    traf_user = dbcfgs['traf_user']
-    hdfs_user = 'hdfs'
-    hbase_user = 'hbase'
-    realm = re.match('.*@(.*)', admin_principal).groups()[0]
-    traf_keytab_dir = '/etc/%s/keytab' % traf_user
-    traf_keytab = '%s/%s.keytab' % (traf_keytab_dir, traf_user)
-    traf_principal = '%s/%s@%s' % (traf_user, host_name, realm)
-    hdfs_principal = '%s/%s@%s' % (hdfs_user, host_name, realm)
-    hbase_principal = '%s/%s@%s' % (hbase_user, host_name, realm)
-
-    ### setting start ###
-    print 'Checking KDC server connection'
-    run_cmd('%s listprincs' % kadmin_cmd)
-
-    # create principals and keytabs for trafodion user
-    principal_exists = cmd_output('%s listprincs | grep -c %s' % (kadmin_cmd, traf_principal))
-    if int(principal_exists) == 0: # not exist
-        run_cmd('%s \'addprinc -randkey %s\'' % (kadmin_cmd, traf_principal))
-        # Adjust principal's maxlife and maxrenewlife
-        run_cmd('%s \'modprinc -maxlife %s -maxrenewlife %s\' %s >/dev/null 2>&1' % (kadmin_cmd, maxlife, max_renewlife, traf_principal))
-
-    run_cmd('mkdir -p %s' % traf_keytab_dir)
-
-    # TODO: need skip add keytab if exist?
-    print 'Create keytab file for trafodion user'
-    run_cmd('%s \'ktadd -k %s %s\'' % (kadmin_cmd, traf_keytab, traf_principal))
-    run_cmd('chown %s %s' % (traf_user, traf_keytab))
-    run_cmd('chmod 400 %s' % traf_keytab)
-
-    # create principals for hdfs/hbase user
-    print 'Create principals for hdfs/hbase user'
-    if 'CDH' in distro:
-        hdfs_keytab = cmd_output('find /var/run/cloudera-scm-agent/process/ -name hdfs.keytab | head -n 1')
-        hbase_keytab = cmd_output('find /var/run/cloudera-scm-agent/process/ -name hbase.keytab | head -n 1')
-    elif 'HDP' in distro:
-        hdfs_keytab = '/etc/security/keytabs/hdfs.headless.keytab'
-        hbase_keytab = '/etc/security/keytabs/hbase.service.keytab'
-
-    run_cmd('sudo -u %s kinit -kt %s %s' % (hdfs_user, hdfs_keytab, hdfs_principal))
-    run_cmd('sudo -u %s kinit -kt %s %s' % (hbase_user, hbase_keytab, hbase_principal))
-
-    print 'Done creating principals and keytabs'
-
-    kinit_bashrc = """
-
-# ---------------------------------------------------------------
-# if needed obtain and cache the Kerberos ticket-granting ticket
-# start automatic ticket renewal process
-# ---------------------------------------------------------------
-klist -s >/dev/null 2>&1
-if [[ $? -eq 1 ]]; then
-    kinit -kt %s %s >/dev/null 2>&1
-fi
-
-# ---------------------------------------------------------------
-# Start trafodion kerberos ticket manager process
-# ---------------------------------------------------------------
-$TRAF_HOME/sql/scripts/krb5service start >/dev/null 2>&1
-""" % (traf_keytab, traf_principal)
-
-    traf_bashrc = '/home/%s/.bashrc' % traf_user
-    with open(traf_bashrc, 'a') as f:
-        f.write(kinit_bashrc)
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_ldap.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_ldap.py b/install/python-installer/traf_ldap.py
deleted file mode 100755
index e756426..0000000
--- a/install/python-installer/traf_ldap.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with trafodion user ###
-
-import os
-import sys
-import json
-from common import run_cmd, mod_file, err, TMP_DIR
-
-def run():
-    """ setup LDAP security """
-    dbcfgs = json.loads(dbcfgs_json)
-
-    DB_ROOT_USER = dbcfgs['db_root_user']
-    TRAF_HOME = os.environ['TRAF_HOME']
-    SQENV_FILE = TRAF_HOME + '/sqenvcom.sh'
-    TRAF_AUTH_CONFIG = '%s/sql/scripts/.traf_authentication_config' % TRAF_HOME
-    TRAF_AUTH_TEMPLATE = '%s/traf_authentication_conf.template' % TMP_DIR
-
-    # set traf_authentication_config file
-    change_items = {
-        '{{ ldap_hosts }}': dbcfgs['ldap_hosts'],
-        '{{ ldap_port }}': dbcfgs['ldap_port'],
-        '{{ ldap_identifiers }}': dbcfgs['ldap_identifiers'],
-        '{{ ldap_encrypt }}': dbcfgs['ldap_encrypt'],
-        '{{ ldap_certpath }}': dbcfgs['ldap_certpath'],
-        '{{ ldap_user }}': dbcfgs['ldap_user'],
-        '{{ ldap_pwd }}': dbcfgs['ldap_pwd']
-    }
-
-    print 'Modify authentication config file'
-    run_cmd('cp %s %s' % (TRAF_AUTH_TEMPLATE, TRAF_AUTH_CONFIG))
-    mod_file(TRAF_AUTH_CONFIG, change_items)
-
-
-    print 'Check LDAP Configuration file for errors'
-    run_cmd('ldapconfigcheck -file %s' % TRAF_AUTH_CONFIG)
-
-    print 'Verify that LDAP user %s exists' % DB_ROOT_USER
-    run_cmd('ldapcheck --verbose --username=%s' % DB_ROOT_USER)
-    #if not 'Authentication successful' in ldapcheck_result:
-    #    err('Failed to access LDAP server with user %s' % DB_ROOT_USER)
-
-    print 'Modfiy sqenvcom.sh to turn on authentication'
-    mod_file(SQENV_FILE, {'TRAFODION_ENABLE_AUTHENTICATION=NO':'TRAFODION_ENABLE_AUTHENTICATION=YES'})
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_package.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_package.py b/install/python-installer/traf_package.py
deleted file mode 100755
index c7cc043..0000000
--- a/install/python-installer/traf_package.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-## This script should be run on all nodes with trafodion user ##
-
-import sys
-import json
-from common import run_cmd, err
-
-def run():
-    dbcfgs = json.loads(dbcfgs_json)
-
-    TRAF_DIR = '%s-%s' % (dbcfgs['traf_basename'], dbcfgs['traf_version'])
-
-    # untar traf package
-    TRAF_PACKAGE_FILE = '/tmp/' + dbcfgs['traf_package'].split('/')[-1]
-    run_cmd('mkdir -p %s' % TRAF_DIR)
-    run_cmd('tar xf %s -C %s' % (TRAF_PACKAGE_FILE, TRAF_DIR))
-
-    print 'Trafodion package extracted successfully!'
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_setup.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_setup.py b/install/python-installer/traf_setup.py
deleted file mode 100755
index 3925fd8..0000000
--- a/install/python-installer/traf_setup.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import os
-import sys
-import re
-import json
-from common import err, cmd_output, run_cmd
-
-def run():
-    dbcfgs = json.loads(dbcfgs_json)
-
-    HOME_DIR = cmd_output('cat /etc/default/useradd |grep HOME |cut -d "=" -f 2').strip()
-    TRAF_USER = dbcfgs['traf_user']
-    TRAF_HOME = '%s/%s/%s-%s' % (HOME_DIR, TRAF_USER, dbcfgs['traf_basename'], dbcfgs['traf_version'])
-
-    TRAF_VER = dbcfgs['traf_version']
-    DISTRO = dbcfgs['distro']
-    TRAF_LIB_PATH = TRAF_HOME + '/export/lib'
-    SCRATCH_LOCS = dbcfgs['scratch_locs'].split(',')
-
-    SUDOER_FILE = '/etc/sudoers.d/trafodion'
-    SUDOER_CFG = """
-## Allow trafodion id to run commands needed for backup and restore
-%%%s ALL =(hbase) NOPASSWD: /usr/bin/hbase"
-""" % TRAF_USER
-
-    ### kernel settings ###
-    run_cmd('sysctl -w kernel.pid_max=65535 2>&1 > /dev/null')
-    run_cmd('echo "kernel.pid_max=65535" >> /etc/sysctl.conf')
-    run_cmd('cp %s/sysinstall/etc/init.d/trafodion /etc/init.d' % TRAF_HOME)
-    run_cmd('chkconfig --add trafodion')
-    run_cmd('chkconfig --level 06 trafodion on')
-
-    ### create and set permission for scratch file dir ###
-    for loc in SCRATCH_LOCS:
-        # don't set permission for HOME folder
-        if not os.path.exists(loc):
-            run_cmd('mkdir -p %s' % loc)
-        if HOME_DIR not in loc:
-            run_cmd('chmod 777 %s' % loc)
-
-    ### copy jar files ###
-    hbase_lib_path = '/usr/lib/hbase/lib'
-    if 'CDH' in DISTRO:
-        parcel_lib = '/opt/cloudera/parcels/CDH/lib/hbase/lib'
-        if os.path.exists(parcel_lib): hbase_lib_path = parcel_lib
-    elif 'HDP' in DISTRO:
-        hbase_lib_path = '/usr/hdp/current/hbase-regionserver/lib'
-    elif 'APACHE' in DISTRO:
-        hbase_home = dbcfgs['hbase_home']
-        hbase_lib_path = hbase_home + '/lib'
-        # for apache distro, get hbase version from cmdline
-        hbase_ver = cmd_output('%s/bin/hbase version | head -n1' % hbase_home)
-        hbase_ver = re.search(r'HBase (\d\.\d)', hbase_ver).groups()[0]
-        DISTRO += hbase_ver
-
-    distro, v1, v2 = re.search(r'(\w+)-*(\d)\.(\d)', DISTRO).groups()
-    if distro == 'CDH':
-        if v2 == '6': v2 = '5'
-        if v2 == '8': v2 = '7'
-    elif distro == 'HDP':
-        if v2 == '4': v2 = '3'
-
-    hbase_trx_jar = 'hbase-trx-%s%s_%s-%s.jar' % (distro.lower(), v1, v2, TRAF_VER)
-    traf_hbase_trx_path = '%s/%s' % (TRAF_LIB_PATH, hbase_trx_jar)
-    hbase_trx_path = '%s/%s' % (hbase_lib_path, hbase_trx_jar)
-    if not os.path.exists(traf_hbase_trx_path):
-        err('Cannot find HBase trx jar \'%s\' for your Hadoop distribution' % hbase_trx_jar)
-
-    # upgrade mode, check if existing trx jar doesn't match the new trx jar file
-    if dbcfgs.has_key('upgrade') and dbcfgs['upgrade'].upper() == 'Y':
-        if not os.path.exists(hbase_trx_path):
-            err('The trx jar \'%s\' doesn\'t exist in hbase lib path, cannot do upgrade, please do regular install' % hbase_trx_jar)
-    else:
-        # remove old trx and trafodion-utility jar files
-        run_cmd('rm -rf %s/{hbase-trx-*,trafodion-utility-*}' % hbase_lib_path)
-
-        # copy new ones
-        run_cmd('cp %s %s' % (traf_hbase_trx_path, hbase_lib_path))
-        run_cmd('cp %s/trafodion-utility-* %s' % (TRAF_LIB_PATH, hbase_lib_path))
-
-    # set permission
-    run_cmd('chmod +r %s/{hbase-trx-*,trafodion-utility-*}' % hbase_lib_path)
-
-    if dbcfgs['dcs_ha'] == 'Y':
-        # set trafodion sudoer file for specific cmds
-        SUDOER_CFG += """
-## Trafodion Floating IP commands
-Cmnd_Alias IP = /sbin/ip
-Cmnd_Alias ARP = /sbin/arping
-
-## Allow Trafodion id to run commands needed to configure floating IP
-%%%s ALL = NOPASSWD: IP, ARP
-""" % TRAF_USER
-
-    ### write trafodion sudoer file ###
-    with open(SUDOER_FILE, 'w') as f:
-        f.write(SUDOER_CFG)
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_sqconfig.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_sqconfig.py b/install/python-installer/traf_sqconfig.py
deleted file mode 100755
index 6665afd..0000000
--- a/install/python-installer/traf_sqconfig.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on first node with trafodion user ###
-
-import os
-import sys
-import json
-from common import run_cmd, err
-
-def run():
-    dbcfgs = json.loads(dbcfgs_json)
-
-    nodes = dbcfgs['node_list'].split(',')
-    scratch_locs = dbcfgs['scratch_locs'].split(',')
-
-    # this script is running by trafodion user, so get sqroot from env
-    TRAF_HOME = os.environ['TRAF_HOME']
-    if TRAF_HOME == '': err('TRAF_HOME var is empty')
-    sqconfig_file = TRAF_HOME + '/sql/scripts/sqconfig'
-
-    core, processor = run_cmd("lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'").split('\n')[:2]
-    core = int(core)-1 if int(core) <= 256 else 255
-
-    lines = ['begin node\n']
-    for node_id, node in enumerate(nodes):
-        line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' % (node_id, node, core, processor)
-        lines.append(line)
-
-    lines.append('end node\n')
-    lines.append('\n')
-    lines.append('begin overflow\n')
-
-    for scratch_loc in scratch_locs:
-        line = 'hdd %s\n' % scratch_loc
-        lines.append(line)
-
-    lines.append('end overflow\n')
-
-    with open(sqconfig_file, 'w') as f:
-        f.writelines(lines)
-
-    print 'sqconfig generated successfully!'
-
-    run_cmd('sqgen')
-
-    print 'sqgen ran successfully!'
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_start.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_start.py b/install/python-installer/traf_start.py
deleted file mode 100755
index 7546a7a..0000000
--- a/install/python-installer/traf_start.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on first node with trafodion user ###
-
-import sys
-import json
-from common import cmd_output, run_cmd, err
-
-def run():
-    """ start trafodion instance """
-    dbcfgs = json.loads(dbcfgs_json)
-
-    print 'Starting trafodion'
-    run_cmd('sqstart')
-
-    tmp_file = '/tmp/initialize.out'
-    if dbcfgs.has_key('upgrade') and dbcfgs['upgrade'].upper() == 'Y':
-        print 'Initialize trafodion upgrade'
-        run_cmd('echo "initialize trafodion, upgrade;" | sqlci > %s' % tmp_file)
-        init_output = cmd_output('cat %s' % tmp_file)
-        if 'ERROR' in init_output:
-            err('Failed to upgrade initialize trafodion:\n %s' % init_output)
-    else:
-        print 'Initialize trafodion'
-        run_cmd('echo "initialize trafodion;" | sqlci > %s' % tmp_file)
-        init_output = cmd_output('cat %s' % tmp_file)
-        # skip error 1392
-        # ERROR[1392] Trafodion is already initialized on this system. No action is needed.
-        if 'ERROR' in init_output and not '1392' in init_output:
-            err('Failed to initialize trafodion:\n %s' % init_output)
-
-    if dbcfgs['ldap_security'] == 'Y':
-        run_cmd('echo "initialize authorization; alter user DB__ROOT set external name \"%s\";" | sqlci > %s' % (dbcfgs['db_root_user'], tmp_file))
-        if dbcfgs.has_key('db_admin_user'):
-            run_cmd('echo "alter user DB__ADMIN set external name \"%s\";" | sqlci >> %s' % (dbcfgs['db_admin_user'], tmp_file))
-
-        secure_output = cmd_output('cat %s' % tmp_file)
-        if 'ERROR' in secure_output:
-            err('Failed to setup security for trafodion:\n %s' % secure_output)
-
-    run_cmd('rm %s' % tmp_file)
-    print 'Start trafodion successfully.'
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/traf_user.py
----------------------------------------------------------------------
diff --git a/install/python-installer/traf_user.py b/install/python-installer/traf_user.py
deleted file mode 100755
index bb2c944..0000000
--- a/install/python-installer/traf_user.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-### this script should be run on all nodes with sudo user ###
-
-import os
-import sys
-import json
-from common import run_cmd, append_file, mod_file, cmd_output, run_cmd_as_user, err, TMP_DIR
-
-def run():
-    """ create trafodion user, bashrc, setup passwordless SSH """
-    dbcfgs = json.loads(dbcfgs_json)
-
-    DISTRO = dbcfgs['distro']
-    if 'CDH' in DISTRO:
-        hadoop_type = 'cloudera'
-    elif 'HDP' in DISTRO:
-        hadoop_type = 'hortonworks'
-    elif 'APACHE' in DISTRO:
-        hadoop_type = 'apache'
-
-    TRAF_USER = dbcfgs['traf_user']
-    TRAF_PWD = dbcfgs['traf_pwd']
-    TRAF_GROUP = TRAF_USER
-    HOME_DIR = cmd_output('cat /etc/default/useradd |grep HOME |cut -d "=" -f 2').strip()
-    TRAF_USER_DIR = '%s/%s' % (HOME_DIR, TRAF_USER)
-    TRAF_HOME = '%s/%s-%s' % (TRAF_USER_DIR, dbcfgs['traf_basename'], dbcfgs['traf_version'])
-
-    KEY_FILE = '/tmp/id_rsa'
-    AUTH_KEY_FILE = '%s/.ssh/authorized_keys' % TRAF_USER_DIR
-    SSH_CFG_FILE = '%s/.ssh/config' % TRAF_USER_DIR
-    BASHRC_TEMPLATE = '%s/bashrc.template' % TMP_DIR
-    BASHRC_FILE = '%s/.bashrc' % TRAF_USER_DIR
-    ULIMITS_FILE = '/etc/security/limits.d/%s.conf' % TRAF_USER
-    HSPERFDATA_FILE = '/tmp/hsperfdata_trafodion'
-
-    # create trafodion user and group
-    if not cmd_output('getent group %s' % TRAF_GROUP):
-        run_cmd('groupadd %s > /dev/null 2>&1' % TRAF_GROUP)
-
-    if not cmd_output('getent passwd %s' % TRAF_USER):
-        run_cmd('useradd --shell /bin/bash -m %s -g %s --password "$(openssl passwd %s)"' % (TRAF_USER, TRAF_GROUP, TRAF_PWD))
-    elif not os.path.exists(TRAF_USER_DIR):
-        run_cmd('mkdir -p %s' % TRAF_USER_DIR)
-        run_cmd('chmod 700 %s' % TRAF_USER_DIR)
-
-    # set ssh key
-    run_cmd_as_user(TRAF_USER, 'echo -e "y" | ssh-keygen -t rsa -N "" -f ~/.ssh/id_rsa')
-    # the key is generated in copy_file script running on the installer node
-    run_cmd('cp %s{,.pub} %s/.ssh/' % (KEY_FILE, TRAF_USER_DIR))
-
-    run_cmd_as_user(TRAF_USER, 'cat ~/.ssh/id_rsa.pub > %s' % AUTH_KEY_FILE)
-    run_cmd('chmod 644 %s' % AUTH_KEY_FILE)
-
-    ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n'
-    with open(SSH_CFG_FILE, 'w') as f:
-        f.write(ssh_cfg)
-    run_cmd('chmod 600 %s' % SSH_CFG_FILE)
-
-    run_cmd('chown -R %s:%s %s/.ssh/' % (TRAF_USER, TRAF_GROUP, TRAF_USER_DIR))
-
-    # set bashrc
-    nodes = dbcfgs['node_list'].split(',')
-    change_items = {
-        '{{ java_home }}': dbcfgs['java_home'],
-        '{{ traf_home }}': TRAF_HOME,
-        '{{ hadoop_type }}': hadoop_type,
-        '{{ node_list }}': ' '.join(nodes),
-        '{{ node_count }}': str(len(nodes)),
-        '{{ enable_ha }}': dbcfgs['enable_ha'],
-        '{{ my_nodes }}': ' -w ' + ' -w '.join(nodes)
-    }
-
-    mod_file(BASHRC_TEMPLATE, change_items)
-
-    if 'APACHE' in DISTRO:
-        bashrc_content = """
-export HADOOP_PREFIX=%s
-export HBASE_HOME=%s
-export PATH=$PATH:$HADOOP_PREFIX/bin:$HADOOP_PREFIX/sbin:$HBASE_HOME/bin
-        """ % (dbcfgs['hadoop_home'], dbcfgs['hbase_home'])
-        append_file(BASHRC_TEMPLATE, bashrc_content, position='HADOOP_TYPE')
-
-    # backup bashrc if exsits
-    if os.path.exists(BASHRC_FILE):
-        run_cmd('cp %s %s.bak' % ((BASHRC_FILE,) *2))
-
-    # copy bashrc to trafodion's home
-    run_cmd('cp %s %s' % (BASHRC_TEMPLATE, BASHRC_FILE))
-    run_cmd('chown -R %s:%s %s*' % (TRAF_USER, TRAF_GROUP, BASHRC_FILE))
-
-    # set ulimits for trafodion user
-    ulimits_config = '''
-# Trafodion settings
-%s   soft   core unlimited
-%s   hard   core unlimited
-%s   soft   memlock unlimited
-%s   hard   memlock unlimited
-%s   soft   nofile 32768
-%s   hard   nofile 65536
-%s   soft   nproc 100000
-%s   hard   nproc 100000
-%s   soft nofile 8192
-%s   hard nofile 65535
-hbase soft nofile 8192
-''' % ((TRAF_USER,) * 10)
-
-    with open(ULIMITS_FILE, 'w') as f:
-        f.write(ulimits_config)
-
-    # change permission for hsperfdata
-    if os.path.exists(HSPERFDATA_FILE):
-        run_cmd('chown -R %s:%s %s' % (TRAF_USER, TRAF_GROUP, HSPERFDATA_FILE))
-
-    # clean up unused key file at the last step
-    run_cmd('rm -rf %s{,.pub}' % KEY_FILE)
-
-    print 'Setup trafodion user successfully!'
-
-# main
-try:
-    dbcfgs_json = sys.argv[1]
-except IndexError:
-    err('No db config found')
-run()

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/version.json
----------------------------------------------------------------------
diff --git a/install/python-installer/version.json b/install/python-installer/version.json
deleted file mode 100644
index dfb20f5..0000000
--- a/install/python-installer/version.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "linux":  ["centos", "redhat"],
-    "hadoop": ["cloudera", "hortonworks", "apache"],
-    "java":   ["1.7"],
-    "centos": ["6"],
-    "redhat": ["6"],
-    "cdh":    ["5.4", "5.5", "5.6"],
-    "hdp":    ["2.3", "2.4"],
-    "hbase":  ["1.0", "1.1"]
-}

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/2886c023/install/python-installer/wrapper.py
----------------------------------------------------------------------
diff --git a/install/python-installer/wrapper.py b/install/python-installer/wrapper.py
deleted file mode 100644
index c7a81a7..0000000
--- a/install/python-installer/wrapper.py
+++ /dev/null
@@ -1,301 +0,0 @@
-#!/usr/bin/env python
-
-# @@@ START COPYRIGHT @@@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# @@@ END COPYRIGHT @@@
-
-import os
-import time
-import json
-import subprocess
-from glob import glob
-from threading import Thread
-from common import err_m, run_cmd, time_elapse, get_logger, ParseJson, Remote, INSTALLER_LOC, TMP_DIR, SCRCFG_FILE
-
-
-class RemoteRun(Remote):
-    """ run commands or scripts remotely using ssh """
-
-    def __init__(self, host, logger, user='', pwd='', quiet=False):
-        super(RemoteRun, self).__init__(host, user, pwd)
-
-        self.quiet = quiet # no output
-        self.logger = logger
-        # create tmp folder
-        self.__run_sshcmd('mkdir -p %s' % TMP_DIR)
-
-        # copy all needed files to remote host
-        all_files = glob(INSTALLER_LOC + '/*.py') + \
-                    glob(INSTALLER_LOC + '/*.json') + \
-                    glob(INSTALLER_LOC + '/*.template')
-
-        self.copy(all_files, remote_folder=TMP_DIR)
-
-        # set permission
-        self.__run_sshcmd('chmod a+rx %s/*.py' % TMP_DIR)
-
-    def __del__(self):
-        # clean up
-        self.__run_ssh('sudo -n rm -rf %s' % TMP_DIR)
-
-    def run_script(self, script, run_user, json_string, verbose=False):
-        """ @param run_user: run the script with this user """
-
-        if run_user:
-            # format string in order to run with 'sudo -n su $user -c $cmd'
-            json_string = json_string.replace('"', '\\\\\\"').replace(' ', '').replace('{', '\\{').replace('$', '\\\\\\$')
-            # this command only works with shell=True
-            script_cmd = '"sudo -n su - %s -c \'%s/%s %s\'"' % (run_user, TMP_DIR, script, json_string)
-            self.__run_ssh(script_cmd, verbose=verbose, shell=True)
-        else:
-            script_cmd = 'sudo -n %s/%s \'%s\'' % (TMP_DIR, script, json_string)
-            self.__run_ssh(script_cmd, verbose=verbose)
-
-        format1 = 'Host [%s]: Script [%s]: %s' % (self.host, script, self.stdout)
-        format2 = 'Host [%s]: Script [%s]' % (self.host, script)
-
-        self.logger.info(format1)
-
-        if self.rc == 0:
-            if not self.quiet: state_ok(format2)
-            self.logger.info(format2 + ' ran successfully!')
-        else:
-            if not self.quiet: state_fail(format2)
-            msg = 'Host [%s]: Failed to run \'%s\'' % (self.host, script)
-            if self.stderr:
-                msg += ': ' + self.stderr
-                print '\n ' + self.stderr
-            self.logger.error(msg)
-            exit(1)
-
-    def __run_ssh(self, user_cmd, verbose=False, shell=False):
-        """ @params: user_cmd should be a string """
-        cmd = self._commands('ssh')
-        cmd += ['-tt'] # force tty allocation
-        if self.user:
-            cmd += ['%s@%s' % (self.user, self.host)]
-        else:
-            cmd += [self.host]
-
-        # if shell=True, cmd should be a string not list
-        if shell:
-            cmd = ' '.join(cmd) + ' '
-            cmd += user_cmd
-        else:
-            cmd += user_cmd.split()
-
-        self._execute(cmd, verbose=verbose, shell=shell)
-
-    def __run_sshcmd(self, int_cmd):
-        """ run internal used ssh command """
-
-        self.__run_ssh(int_cmd)
-        if self.rc != 0:
-            msg = 'Host [%s]: Failed to run ssh commands, check SSH password or connectivity' % self.host
-            self.logger.error(msg)
-            err_m(msg)
-
-def state_ok(msg):
-    state(32, ' OK ', msg)
-
-def state_fail(msg):
-    state(31, 'FAIL', msg)
-
-def state_skip(msg):
-    state(33, 'SKIP', msg)
-
-def state(color, result, msg):
-    WIDTH = 80
-    print '\n\33[%dm%s %s [ %s ]\33[0m\n' % (color, msg, (WIDTH - len(msg))*'.', result)
-
-class Status(object):
-    def __init__(self, stat_file, name):
-        self.stat_file = stat_file
-        self.name = name
-
-    def get_status(self):
-        if not os.path.exists(self.stat_file): os.mknod(self.stat_file)
-        with open(self.stat_file, 'r') as f:
-            st = f.readlines()
-        for s in st:
-            if s.split()[0] == self.name: return True
-        return False
-
-    def set_status(self):
-        with open(self.stat_file, 'a+') as f:
-            f.write('%s OK\n' % self.name)
-
-@time_elapse
-def run(dbcfgs, options, mode='install', pwd=''):
-    """ main entry
-        mode: install/discover
-    """
-    STAT_FILE = mode + '.status'
-    LOG_FILE = '%s/logs/%s_%s.log' % (INSTALLER_LOC, mode, time.strftime('%Y%m%d_%H%M'))
-    logger = get_logger(LOG_FILE)
-
-    verbose = True if hasattr(options, 'verbose') and options.verbose else False
-    upgrade = True if hasattr(options, 'upgrade') and options.upgrade else False
-    user = options.user if hasattr(options, 'user') and options.user else ''
-    threshold = options.fork if hasattr(options, 'fork') and options.fork else 10
-
-    script_output = [] # script output array
-    conf = ParseJson(SCRCFG_FILE).load()
-    script_cfgs = conf[mode]
-
-    dbcfgs_json = json.dumps(dbcfgs)
-    hosts = dbcfgs['node_list'].split(',')
-
-    # handle skipped scripts, skip them if no need to run
-    skipped_scripts = []
-    if upgrade:
-        skipped_scripts += ['hadoop_mods', 'apache_mods', 'apache_restart', 'traf_dep', 'traf_kerberos']
-
-    if dbcfgs['secure_hadoop'] == 'N':
-        skipped_scripts += ['traf_kerberos']
-
-    if dbcfgs['traf_start'].upper() == 'N':
-        skipped_scripts += ['traf_start']
-
-    if dbcfgs['ldap_security'].upper() == 'N':
-        skipped_scripts += ['traf_ldap']
-
-    if 'APACHE' in dbcfgs['distro']:
-        skipped_scripts += ['hadoop_mods']
-    else:
-        skipped_scripts += ['apache_mods', 'apache_restart']
-
-
-    # set ssh config file to avoid known hosts verify on current installer node
-    SSH_CFG_FILE = os.environ['HOME'] + '/.ssh/config'
-    ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n'
-    with open(SSH_CFG_FILE, 'w') as f:
-        f.write(ssh_cfg)
-    run_cmd('chmod 600 %s' % SSH_CFG_FILE)
-
-    def run_local_script(script, json_string, req_pwd):
-        cmd = '%s/%s \'%s\'' % (INSTALLER_LOC, script, json_string)
-
-        # pass the ssh password to sub scripts which need SSH password
-        if req_pwd: cmd += ' ' + pwd
-
-        if verbose: print cmd
-
-        # stdout on screen
-        p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True)
-        stdout, stderr = p.communicate()
-
-        rc = p.returncode
-        if rc != 0:
-            msg = 'Failed to run \'%s\'' % script
-            if stderr:
-                msg += ': ' + stderr
-                print stderr
-            logger.error(msg)
-            state_fail('localhost: Script [%s]' % script)
-            exit(rc)
-        else:
-            state_ok('Script [%s]' % script)
-            logger.info('Script [%s] ran successfully!' % script)
-
-        return stdout
-
-    # run sub scripts
-    try:
-        remote_instances = []
-        if mode == 'discover':
-            remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd, quiet=True) for host in hosts]
-        else:
-            remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd) for host in hosts]
-        first_instance = remote_instances[0]
-        for instance in remote_instances:
-            if instance.host == dbcfgs['first_rsnode']:
-                first_rs_instance = instance
-                break
-
-        logger.info(' ***** %s Start *****' % mode)
-        for cfg in script_cfgs:
-            script = cfg['script']
-            node = cfg['node']
-            desc = cfg['desc']
-            run_user = ''
-            if not 'run_as_traf' in cfg.keys():
-                pass
-            elif cfg['run_as_traf'] == 'yes':
-                run_user = dbcfgs['traf_user']
-
-            if not 'req_pwd' in cfg.keys():
-                req_pwd = False
-            elif cfg['req_pwd'] == 'yes':
-                req_pwd = True
-
-            status = Status(STAT_FILE, script)
-            if status.get_status():
-                msg = 'Script [%s] had already been executed' % script
-                state_skip(msg)
-                logger.info(msg)
-                continue
-
-            if script.split('.')[0] in skipped_scripts:
-                continue
-            else:
-                print '\nTASK: %s %s' % (desc, (83 - len(desc))*'*')
-
-            #TODO: timeout exit
-            if node == 'local':
-                run_local_script(script, dbcfgs_json, req_pwd)
-            elif node == 'first':
-                first_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose)
-            elif node == 'first_rs':
-                first_rs_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose)
-            elif node == 'all':
-                l = len(remote_instances)
-                if l > threshold:
-                    piece = (l - (l % threshold)) / threshold
-                    parted_remote_instances = [remote_instances[threshold*i:threshold*(i+1)] for i in range(piece)]
-                    parted_remote_instances.append(remote_instances[threshold*piece:])
-                else:
-                    parted_remote_instances = [remote_instances]
-
-                for parted_remote_inst in parted_remote_instances:
-                    threads = [Thread(target=r.run_script, args=(script, run_user, dbcfgs_json, verbose)) for r in parted_remote_inst]
-                    for t in threads: t.start()
-                    for t in threads: t.join()
-
-                    if sum([r.rc for r in parted_remote_inst]) != 0:
-                        err_m('Script failed to run on one or more nodes, exiting ...\nCheck log file %s for details.' % LOG_FILE)
-
-                    script_output += [{r.host:r.stdout.strip()} for r in parted_remote_inst]
-
-            else:
-                # should not go to here
-                err_m('Invalid configuration for %s' % SCRCFG_FILE)
-
-            status.set_status()
-    except KeyboardInterrupt:
-        err_m('User quit')
-
-    # remove status file if all scripts run successfully
-    os.remove(STAT_FILE)
-
-    return script_output
-
-if __name__ == '__main__':
-    exit(0)


Mime
View raw message