ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aonis...@apache.org
Subject ambari git commit: AMBARI-18950. Refactor hive() function in HIVE service (aonishuk)
Date Mon, 21 Nov 2016 16:29:05 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk c9a4881d9 -> cfc161299


AMBARI-18950. Refactor hive() function in HIVE service (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cfc16129
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cfc16129
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cfc16129

Branch: refs/heads/trunk
Commit: cfc161299031e5485f8bb96457b1d06acb1bacdc
Parents: c9a4881
Author: Andrew Onishuk <aonishuk@hortonworks.com>
Authored: Mon Nov 21 18:28:55 2016 +0200
Committer: Andrew Onishuk <aonishuk@hortonworks.com>
Committed: Mon Nov 21 18:28:55 2016 +0200

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     | 497 ++++++++++---------
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    | 147 +++---
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 311 ++++++------
 .../stacks/2.1/HIVE/test_hive_metastore.py      | 170 +++++--
 4 files changed, 603 insertions(+), 522 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cfc16129/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index ec64200..f08c458 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -45,171 +45,10 @@ from ambari_commons.constants import SERVICE
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
-
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def hive(name=None):
-  import params
-
-  XmlConfig("hive-site.xml",
-            conf_dir = params.hive_conf_dir,
-            configurations = params.config['configurations']['hive-site'],
-            owner=params.hive_user,
-            configuration_attributes=params.config['configuration_attributes']['hive-site']
-  )
-
-  if name in ["hiveserver2","metastore"]:
-    # Manually overriding service logon user & password set by the installation package
-    service_name = params.service_map[name]
-    ServiceConfig(service_name,
-                  action="change_user",
-                  username = params.hive_user,
-                  password = Script.get_password(params.hive_user))
-    Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user)
-
-  if name == 'metastore':
-    if params.init_metastore_schema:
-      check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info '
-                                        '-dbType {hive_metastore_db_type} '
-                                        '-userName {hive_metastore_user_name} '
-                                        '-passWord {hive_metastore_user_passwd!p}'
-                                        '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually
-                                        hive_bin=params.hive_bin,
-                                        hive_metastore_db_type=params.hive_metastore_db_type,
-                                        hive_metastore_user_name=params.hive_metastore_user_name,
-                                        hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-      try:
-        Execute(check_schema_created_cmd)
-      except Fail:
-        create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema '
-                                   '-dbType {hive_metastore_db_type} '
-                                   '-userName {hive_metastore_user_name} '
-                                   '-passWord {hive_metastore_user_passwd!p}',
-                                   hive_bin=params.hive_bin,
-                                   hive_metastore_db_type=params.hive_metastore_db_type,
-                                   hive_metastore_user_name=params.hive_metastore_user_name,
-                                   hive_metastore_user_passwd=params.hive_metastore_user_passwd)
-        Execute(create_schema_cmd,
-                user = params.hive_user,
-                logoutput=True
-        )
-
-  if name == "hiveserver2":
-    if params.hive_execution_engine == "tez":
-      # Init the tez app dir in hadoop
-      script_file = __file__.replace('/', os.sep)
-      cmd_file = os.path.normpath(os.path.join(os.path.dirname(script_file), "..", "files", "hiveTezSetup.cmd"))
-
-      Execute("cmd /c " + cmd_file, logoutput=True, user=params.hadoop_user)
-
-
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive(name=None):
   import params
-
-  if name == 'hiveserver2':
-    # copy tarball to HDFS feature not supported
-    if not (params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major)):  
-      params.HdfsResource(params.webhcat_apps_dir,
-                            type="directory",
-                            action="create_on_execute",
-                            owner=params.webhcat_user,
-                            mode=0755
-                          )
-    
-    # Create webhcat dirs.
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.webhcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-
-    # ****** Begin Copy Tarballs ******
-    # *********************************
-    #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
-    if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-    # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
-    # This can use a different source and dest location to account
-    copy_to_hdfs("pig",
-                 params.user_group,
-                 params.hdfs_user,
-                 file_mode=params.tarballs_mode,
-                 custom_source_file=params.pig_tar_source,
-                 custom_dest_file=params.pig_tar_dest_file,
-                 skip=params.sysprep_skip_copy_tarballs_hdfs)
-    copy_to_hdfs("hive",
-                 params.user_group,
-                 params.hdfs_user,
-                 file_mode=params.tarballs_mode,
-                 custom_source_file=params.hive_tar_source,
-                 custom_dest_file=params.hive_tar_dest_file,
-                 skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-    wildcard_tarballs = ["sqoop", "hadoop_streaming"]
-    for tarball_name in wildcard_tarballs:
-      source_file_pattern = eval("params." + tarball_name + "_tar_source")
-      dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
-
-      if source_file_pattern is None or dest_dir is None:
-        continue
-
-      source_files = glob.glob(source_file_pattern) if "*" in source_file_pattern else [source_file_pattern]
-      for source_file in source_files:
-        src_filename = os.path.basename(source_file)
-        dest_file = os.path.join(dest_dir, src_filename)
-
-        copy_to_hdfs(tarball_name,
-                     params.user_group,
-                     params.hdfs_user,
-                     file_mode=params.tarballs_mode,
-                     custom_source_file=source_file,
-                     custom_dest_file=dest_file,
-                     skip=params.sysprep_skip_copy_tarballs_hdfs)
-    # ******* End Copy Tarballs *******
-    # *********************************
     
-    # if warehouse directory is in DFS
-    if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
-      # Create Hive Metastore Warehouse Dir
-      params.HdfsResource(params.hive_apps_whs_dir,
-                           type="directory",
-                            action="create_on_execute",
-                            owner=params.hive_user,
-                            mode=0777
-      )
-    else:
-      Logger.info(format("Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."))
-
-    # Create Hive User Dir
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=params.hive_hdfs_user_mode
-    )
-    
-    if not is_empty(params.hive_exec_scratchdir) and not urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
-      params.HdfsResource(params.hive_exec_scratchdir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hive_user,
-                           group=params.hdfs_user,
-                           mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
-      
-    params.HdfsResource(None, action="execute")
-
   Directory(params.hive_etc_dir_prefix,
             mode=0755
   )
@@ -233,24 +72,6 @@ def hive(name=None):
     atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename)
     setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
   
-  if name == 'hiveserver2':
-    XmlConfig("hiveserver2-site.xml",
-              conf_dir=params.hive_server_conf_dir,
-              configurations=params.config['configurations']['hiveserver2-site'],
-              configuration_attributes=params.config['configuration_attributes']['hiveserver2-site'],
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0644)
-
-  if params.hive_metastore_site_supported and name == 'metastore':
-    XmlConfig("hivemetastore-site.xml",
-              conf_dir=params.hive_server_conf_dir,
-              configurations=params.config['configurations']['hivemetastore-site'],
-              configuration_attributes=params.config['configuration_attributes']['hivemetastore-site'],
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0644)
-  
   File(format("{hive_config_dir}/hive-env.sh"),
        owner=params.hive_user,
        group=params.user_group,
@@ -271,86 +92,217 @@ def hive(name=None):
        content=Template("hive.conf.j2")
        )
 
-  if name == 'metastore' or name == 'hiveserver2':
-    if params.hive_jdbc_target is not None and not os.path.exists(params.hive_jdbc_target):
-      jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
-    if params.hive2_jdbc_target is not None and not os.path.exists(params.hive2_jdbc_target):
-      jdbc_connector(params.hive2_jdbc_target, params.hive2_previous_jdbc_jar)
-
   File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
        content = DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")),
        mode = 0644,
   )
 
+  if name != "client":
+    setup_non_client()
+  if name == 'hiveserver2':
+    setup_hiveserver2()
   if name == 'metastore':
-    File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hivemetastore.properties"),
-         owner=params.hive_user,
-         group=params.user_group,
-         content=Template("hadoop-metrics2-hivemetastore.properties.j2")
-    )
+    setup_metastore() # schematool work
 
-    File(params.start_metastore_path,
-         mode=0755,
-         content=StaticFile('startMetastore.sh')
-    )
-    if params.init_metastore_schema:
-      create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                 "{hive_schematool_bin}/schematool -initSchema "
-                                 "-dbType {hive_metastore_db_type} "
-                                 "-userName {hive_metastore_user_name} "
-                                 "-passWord {hive_metastore_user_passwd!p} -verbose")
-
-      check_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                        "{hive_schematool_bin}/schematool -info "
-                                        "-dbType {hive_metastore_db_type} "
-                                        "-userName {hive_metastore_user_name} "
-                                        "-passWord {hive_metastore_user_passwd!p} -verbose"), params.hive_user)
-
-      # HACK: in cases with quoted passwords and as_user (which does the quoting as well) !p won't work for hiding passwords.
-      # Fixing it with the hack below:
-      quoted_hive_metastore_user_passwd = quote_bash_args(quote_bash_args(params.hive_metastore_user_passwd))
-      if quoted_hive_metastore_user_passwd[0] == "'" and quoted_hive_metastore_user_passwd[-1] == "'" \
-          or quoted_hive_metastore_user_passwd[0] == '"' and quoted_hive_metastore_user_passwd[-1] == '"':
-        quoted_hive_metastore_user_passwd = quoted_hive_metastore_user_passwd[1:-1]
-      Logger.sensitive_strings[repr(check_schema_created_cmd)] = repr(check_schema_created_cmd.replace(
-          format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + utils.PASSWORDS_HIDE_STRING))
-
-      Execute(create_schema_cmd,
-              not_if = check_schema_created_cmd,
-              user = params.hive_user
-      )
-  elif name == 'hiveserver2':
-    File(params.start_hiveserver2_path,
-         mode=0755,
-         content=Template(format('{start_hiveserver2_script}'))
+def setup_hiveserver2():
+  import params
+  
+  File(params.start_hiveserver2_path,
+       mode=0755,
+       content=Template(format('{start_hiveserver2_script}'))
+  )
+
+  File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=Template("hadoop-metrics2-hiveserver2.properties.j2")
+  )
+  XmlConfig("hiveserver2-site.xml",
+            conf_dir=params.hive_server_conf_dir,
+            configurations=params.config['configurations']['hiveserver2-site'],
+            configuration_attributes=params.config['configuration_attributes']['hiveserver2-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+  
+  # copy tarball to HDFS feature not supported
+  if not (params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major)):  
+    params.HdfsResource(params.webhcat_apps_dir,
+                          type="directory",
+                          action="create_on_execute",
+                          owner=params.webhcat_user,
+                          mode=0755
+                        )
+  
+  # Create webhcat dirs.
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsResource(params.hcat_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.webhcat_user,
+                         mode=params.hcat_hdfs_user_mode
     )
 
-    File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
-         owner=params.hive_user,
-         group=params.user_group,
-         content=Template("hadoop-metrics2-hiveserver2.properties.j2")
+  params.HdfsResource(params.webhcat_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+
+  # ****** Begin Copy Tarballs ******
+  # *********************************
+  #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
+  if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
+    copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+    copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+
+  # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
+  # This can use a different source and dest location to account
+  copy_to_hdfs("pig",
+               params.user_group,
+               params.hdfs_user,
+               file_mode=params.tarballs_mode,
+               custom_source_file=params.pig_tar_source,
+               custom_dest_file=params.pig_tar_dest_file,
+               skip=params.sysprep_skip_copy_tarballs_hdfs)
+  copy_to_hdfs("hive",
+               params.user_group,
+               params.hdfs_user,
+               file_mode=params.tarballs_mode,
+               custom_source_file=params.hive_tar_source,
+               custom_dest_file=params.hive_tar_dest_file,
+               skip=params.sysprep_skip_copy_tarballs_hdfs)
+
+  wildcard_tarballs = ["sqoop", "hadoop_streaming"]
+  for tarball_name in wildcard_tarballs:
+    source_file_pattern = eval("params." + tarball_name + "_tar_source")
+    dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
+
+    if source_file_pattern is None or dest_dir is None:
+      continue
+
+    source_files = glob.glob(source_file_pattern) if "*" in source_file_pattern else [source_file_pattern]
+    for source_file in source_files:
+      src_filename = os.path.basename(source_file)
+      dest_file = os.path.join(dest_dir, src_filename)
+
+      copy_to_hdfs(tarball_name,
+                   params.user_group,
+                   params.hdfs_user,
+                   file_mode=params.tarballs_mode,
+                   custom_source_file=source_file,
+                   custom_dest_file=dest_file,
+                   skip=params.sysprep_skip_copy_tarballs_hdfs)
+  # ******* End Copy Tarballs *******
+  # *********************************
+  
+  # if warehouse directory is in DFS
+  if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
+    # Create Hive Metastore Warehouse Dir
+    params.HdfsResource(params.hive_apps_whs_dir,
+                         type="directory",
+                          action="create_on_execute",
+                          owner=params.hive_user,
+                          mode=0777
     )
+  else:
+    Logger.info(format("Not creating warehouse directory '{hive_apps_whs_dir}', as the location is not in DFS."))
+
+  # Create Hive User Dir
+  params.HdfsResource(params.hive_hdfs_user_dir,
+                       type="directory",
+                        action="create_on_execute",
+                        owner=params.hive_user,
+                        mode=params.hive_hdfs_user_mode
+  )
+  
+  if not is_empty(params.hive_exec_scratchdir) and not urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
+    params.HdfsResource(params.hive_exec_scratchdir,
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hive_user,
+                         group=params.hdfs_user,
+                         mode=0777) # Hive expects this dir to be writeable by everyone as it is used as a temp dir
+    
+  params.HdfsResource(None, action="execute")
+  
+def setup_non_client():
+  import params
+  
+  Directory(params.hive_pid_dir,
+            create_parents = True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+  Directory(params.hive_log_dir,
+            create_parents = True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+  Directory(params.hive_var_lib,
+            create_parents = True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
 
-  if name != "client":
-    Directory(params.hive_pid_dir,
-              create_parents = True,
-              cd_access='a',
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0755)
-    Directory(params.hive_log_dir,
-              create_parents = True,
-              cd_access='a',
-              owner=params.hive_user,
-              group=params.user_group,
-              mode=0755)
-    Directory(params.hive_var_lib,
-              create_parents = True,
-              cd_access='a',
+  if params.hive_jdbc_target is not None and not os.path.exists(params.hive_jdbc_target):
+    jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
+  if params.hive2_jdbc_target is not None and not os.path.exists(params.hive2_jdbc_target):
+    jdbc_connector(params.hive2_jdbc_target, params.hive2_previous_jdbc_jar)
+    
+def setup_metastore():
+  import params
+  
+  if params.hive_metastore_site_supported:
+    XmlConfig("hivemetastore-site.xml",
+              conf_dir=params.hive_server_conf_dir,
+              configurations=params.config['configurations']['hivemetastore-site'],
+              configuration_attributes=params.config['configuration_attributes']['hivemetastore-site'],
               owner=params.hive_user,
               group=params.user_group,
-              mode=0755)
+              mode=0644)
+  
+  File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hivemetastore.properties"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=Template("hadoop-metrics2-hivemetastore.properties.j2")
+  )
 
+  File(params.start_metastore_path,
+       mode=0755,
+       content=StaticFile('startMetastore.sh')
+  )
+  if params.init_metastore_schema:
+    create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
+                               "{hive_schematool_bin}/schematool -initSchema "
+                               "-dbType {hive_metastore_db_type} "
+                               "-userName {hive_metastore_user_name} "
+                               "-passWord {hive_metastore_user_passwd!p} -verbose")
+
+    check_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
+                                      "{hive_schematool_bin}/schematool -info "
+                                      "-dbType {hive_metastore_db_type} "
+                                      "-userName {hive_metastore_user_name} "
+                                      "-passWord {hive_metastore_user_passwd!p} -verbose"), params.hive_user)
+
+    # HACK: in cases with quoted passwords and as_user (which does the quoting as well) !p won't work for hiding passwords.
+    # Fixing it with the hack below:
+    quoted_hive_metastore_user_passwd = quote_bash_args(quote_bash_args(params.hive_metastore_user_passwd))
+    if quoted_hive_metastore_user_passwd[0] == "'" and quoted_hive_metastore_user_passwd[-1] == "'" \
+        or quoted_hive_metastore_user_passwd[0] == '"' and quoted_hive_metastore_user_passwd[-1] == '"':
+      quoted_hive_metastore_user_passwd = quoted_hive_metastore_user_passwd[1:-1]
+    Logger.sensitive_strings[repr(check_schema_created_cmd)] = repr(check_schema_created_cmd.replace(
+        format("-passWord {quoted_hive_metastore_user_passwd}"), "-passWord " + utils.PASSWORDS_HIDE_STRING))
+
+    Execute(create_schema_cmd,
+            not_if = check_schema_created_cmd,
+            user = params.hive_user
+    )
+    
 """
 Writes configuration files required by Hive.
 """
@@ -479,3 +431,58 @@ def jdbc_connector(target, hive_previous_jdbc_jar):
   File(target,
        mode = 0644,
   )
+  
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hive(name=None):
+  import params
+
+  XmlConfig("hive-site.xml",
+            conf_dir = params.hive_conf_dir,
+            configurations = params.config['configurations']['hive-site'],
+            owner=params.hive_user,
+            configuration_attributes=params.config['configuration_attributes']['hive-site']
+  )
+
+  if name in ["hiveserver2","metastore"]:
+    # Manually overriding service logon user & password set by the installation package
+    service_name = params.service_map[name]
+    ServiceConfig(service_name,
+                  action="change_user",
+                  username = params.hive_user,
+                  password = Script.get_password(params.hive_user))
+    Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user)
+
+  if name == 'metastore':
+    if params.init_metastore_schema:
+      check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info '
+                                        '-dbType {hive_metastore_db_type} '
+                                        '-userName {hive_metastore_user_name} '
+                                        '-passWord {hive_metastore_user_passwd!p}'
+                                        '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually
+                                        hive_bin=params.hive_bin,
+                                        hive_metastore_db_type=params.hive_metastore_db_type,
+                                        hive_metastore_user_name=params.hive_metastore_user_name,
+                                        hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+      try:
+        Execute(check_schema_created_cmd)
+      except Fail:
+        create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema '
+                                   '-dbType {hive_metastore_db_type} '
+                                   '-userName {hive_metastore_user_name} '
+                                   '-passWord {hive_metastore_user_passwd!p}',
+                                   hive_bin=params.hive_bin,
+                                   hive_metastore_db_type=params.hive_metastore_db_type,
+                                   hive_metastore_user_name=params.hive_metastore_user_name,
+                                   hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+        Execute(create_schema_cmd,
+                user = params.hive_user,
+                logoutput=True
+        )
+
+  if name == "hiveserver2":
+    if params.hive_execution_engine == "tez":
+      # Init the tez app dir in hadoop
+      script_file = __file__.replace('/', os.sep)
+      cmd_file = os.path.normpath(os.path.join(os.path.dirname(script_file), "..", "files", "hiveTezSetup.cmd"))
+
+      Execute("cmd /c " + cmd_file, logoutput=True, user=params.hadoop_user)

http://git-wip-us.apache.org/repos/asf/ambari/blob/cfc16129/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index c79e773..799ee39 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -212,33 +212,12 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path = ['/bin', '/usr/bin/'],
-                              sudo = True,
-                              )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-        mode = 0644,
-    )
+
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
-                              )
-    self.assertResourceCalled('File', '/tmp/start_metastore_script',
-                              content = StaticFile('startMetastore.sh'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
-        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
-        user = 'hive',
-    )
+
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
                               group = 'hadoop',
@@ -260,6 +239,30 @@ class TestHiveMetastore(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
+    
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path = ['/bin', '/usr/bin/'],
+                              sudo = True,
+                              )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
+                              )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
+        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
+        user = 'hive',
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hive',
@@ -325,33 +328,10 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path = ['/bin', '/usr/bin/'],
-                              sudo = True,
-                              )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-        mode = 0644,
-    )
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
-                              )
-    self.assertResourceCalled('File', '/tmp/start_metastore_script',
-                              content = StaticFile('startMetastore.sh'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
-        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
-        user = 'hive',
-    )
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
                               group = 'hadoop',
@@ -373,6 +353,29 @@ class TestHiveMetastore(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path = ['/bin', '/usr/bin/'],
+                              sudo = True,
+                              )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
+                              )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
+        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
+        user = 'hive',
+    )
 
   @patch("resource_management.core.shell.call")
   @patch("resource_management.libraries.functions.get_stack_version")
@@ -464,36 +467,11 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644)
 
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path = ['/bin', '/usr/bin/'],
-                              sudo = True)
-
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-        mode = 0644)
-
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
-                              )
-    self.assertResourceCalled('File', '/tmp/start_metastore_script',
-                              content = StaticFile('startMetastore.sh'),
-                              mode = 0755)
-
-    self.maxDiff = None
-
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa -verbose',
-        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa -verbose'",
-        user = 'hive')
-
+    
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
                               group = 'hadoop',
@@ -514,6 +492,31 @@ class TestHiveMetastore(RMFTestCase):
                               mode = 0755,
                               create_parents = True,
                               cd_access = 'a')
+    
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path = ['/bin', '/usr/bin/'],
+                              sudo = True)
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644)
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
+                              )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755)
+
+    self.maxDiff = None
+
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa -verbose',
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa -verbose'",
+        user = 'hive')
 
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},

http://git-wip-us.apache.org/repos/asf/ambari/blob/cfc16129/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 35d2144..8b866d6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -322,6 +322,128 @@ class TestHiveServer(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self, no_tmp = False, default_fs_default='hdfs://c6401.ambari.apache.org:8020'):
+
+    if self._testMethodName == "test_socket_timeout":
+      # This test will not call any more resources.
+      return
+    
+    self.assertResourceCalled('Directory', '/etc/hive',
+                              mode=0755,
+    )
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
+                              owner='hive',
+                              group='hadoop',
+                              create_parents = True,
+    )
+
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              group='hadoop',
+                              conf_dir='/usr/hdp/current/hive-server2/conf',
+                              mode=0644,
+                              configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                   u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner='hive',
+                              configurations=self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-default.xml.template',
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-env.sh.template',
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
+                              content='log4jproperties\nline2',
+                              owner='hive',
+                              group='hadoop',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-log4j.properties',
+                              content='log4jproperties\nline2',
+                              owner='hive',
+                              group='hadoop',
+                              mode=0644,
+    )
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+                              group='hadoop',
+                              conf_dir='/etc/hive/conf.server',
+                              mode=0644,
+                              configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                   u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                   u'javax.jdo.option.ConnectionPassword': u'true'}},
+                              owner='hive',
+                              configurations=self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+                              content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('Directory', '/etc/security/limits.d',
+                              owner='root',
+                              group='root',
+                              create_parents = True,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
+                              content=Template('hive.conf.j2'),
+                              owner='root',
+                              group='root',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
+                              content=DownloadSource('http://c6401.ambari.apache.org:8080/resources'
+                                                     '/DBConnectionVerification.jar'),
+                              mode=0644,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+    )
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path=['/bin', '/usr/bin/'],
+                              sudo=True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+                              content=Template('startHiveserver2.sh.j2'),
+                              mode=0755,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hiveserver2.properties.j2')
+                              )
+    self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
+      group = 'hadoop',
+      conf_dir = '/etc/hive/conf.server',
+      mode = 0644,
+      owner = 'hive',
+      configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
+      configurations = self.getConfig()['configurations']['hiveserver2-site'],
+    )
     # Verify creating of Hcat and Hive directories
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -352,10 +474,6 @@ class TestHiveServer(RMFTestCase):
         mode = 0755,
     )
 
-    if self._testMethodName == "test_socket_timeout":
-      # This test will not call any more resources.
-      return
-
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
@@ -411,6 +529,8 @@ class TestHiveServer(RMFTestCase):
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
+
+  def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hive',
                               mode=0755,
     )
@@ -419,7 +539,6 @@ class TestHiveServer(RMFTestCase):
                               group='hadoop',
                               create_parents = True,
     )
-
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
                               group='hadoop',
                               conf_dir='/usr/hdp/current/hive-server2/conf',
@@ -459,14 +578,6 @@ class TestHiveServer(RMFTestCase):
                               owner='hive',
                               configurations=self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
-      group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
-      mode = 0644,
-      owner = 'hive',
-      configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
-      configurations = self.getConfig()['configurations']['hiveserver2-site'],
-    )
     self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
                               content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner='hive',
@@ -483,53 +594,59 @@ class TestHiveServer(RMFTestCase):
                               group='root',
                               mode=0644,
     )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path=['/bin', '/usr/bin/'],
-                              sudo=True,
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-                              mode=0644,
-    )
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
-                              content=DownloadSource('http://c6401.ambari.apache.org:8080/resources'
-                                                     '/DBConnectionVerification.jar'),
+                              content=DownloadSource(
+                                'http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
                               mode=0644,
     )
-    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-                              content=Template('startHiveserver2.sh.j2'),
-                              mode=0755,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hiveserver2.properties.j2')
-                              )
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner='hive',
-                              mode=0755,
                               group='hadoop',
+                              mode=0755,
                               create_parents = True,
                               cd_access='a',
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
                               owner='hive',
-                              mode=0755,
                               group='hadoop',
+                              mode=0755,
                               create_parents = True,
                               cd_access='a',
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
                               owner='hive',
-                              mode=0755,
                               group='hadoop',
+                              mode=0755,
                               create_parents = True,
                               cd_access='a',
     )
-
-  def assert_configure_secured(self):
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path=['/bin', '/usr/bin/'],
+                              sudo=True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+                              content=Template('startHiveserver2.sh.j2'),
+                              mode=0755,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hiveserver2.properties.j2')
+    )
+    self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
+      group = 'hadoop',
+      conf_dir = '/etc/hive/conf.server',
+      mode = 0644,
+      owner = 'hive',
+      configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
+      configurations = self.getConfig()['configurations']['hiveserver2-site'],
+    )
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
@@ -613,122 +730,6 @@ class TestHiveServer(RMFTestCase):
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
-    self.assertResourceCalled('Directory', '/etc/hive',
-                              mode=0755,
-    )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
-                              owner='hive',
-                              group='hadoop',
-                              create_parents = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              group='hadoop',
-                              conf_dir='/usr/hdp/current/hive-server2/conf',
-                              mode=0644,
-                              configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
-                                                                   u'mapreduce.jobtracker.staging.root.dir': u'true'}},
-                              owner='hive',
-                              configurations=self.getConfig()['configurations']['mapred-site'],
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-default.xml.template',
-                              owner='hive',
-                              group='hadoop',
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-env.sh.template',
-                              owner='hive',
-                              group='hadoop',
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
-                              content='log4jproperties\nline2',
-                              owner='hive',
-                              group='hadoop',
-                              mode=0644,
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-log4j.properties',
-                              content='log4jproperties\nline2',
-                              owner='hive',
-                              group='hadoop',
-                              mode=0644,
-    )
-    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-                              group='hadoop',
-                              conf_dir='/etc/hive/conf.server',
-                              mode=0644,
-                              configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
-                                                                   u'javax.jdo.option.ConnectionDriverName': u'true',
-                                                                   u'javax.jdo.option.ConnectionPassword': u'true'}},
-                              owner='hive',
-                              configurations=self.getConfig()['configurations']['hive-site'],
-    )
-    self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
-      group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
-      mode = 0644,
-      owner = 'hive',
-      configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
-      configurations = self.getConfig()['configurations']['hiveserver2-site'],
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-                              content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-                              owner='hive',
-                              group='hadoop',
-    )
-    self.assertResourceCalled('Directory', '/etc/security/limits.d',
-                              owner='root',
-                              group='root',
-                              create_parents = True,
-    )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
-                              content=Template('hive.conf.j2'),
-                              owner='root',
-                              group='root',
-                              mode=0644,
-    )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path=['/bin', '/usr/bin/'],
-                              sudo=True,
-    )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-                              mode=0644,
-    )
-    self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
-                              content=DownloadSource(
-                                'http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
-                              mode=0644,
-    )
-    self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-                              content=Template('startHiveserver2.sh.j2'),
-                              mode=0755,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hiveserver2.properties.j2')
-    )
-    self.assertResourceCalled('Directory', '/var/run/hive',
-                              owner='hive',
-                              group='hadoop',
-                              mode=0755,
-                              create_parents = True,
-                              cd_access='a',
-    )
-    self.assertResourceCalled('Directory', '/var/log/hive',
-                              owner='hive',
-                              group='hadoop',
-                              mode=0755,
-                              create_parents = True,
-                              cd_access='a',
-    )
-    self.assertResourceCalled('Directory', '/var/lib/hive',
-                              owner='hive',
-                              group='hadoop',
-                              mode=0755,
-                              create_parents = True,
-                              cd_access='a',
-    )
 
   @patch("time.time")
   @patch("socket.socket")

http://git-wip-us.apache.org/repos/asf/ambari/blob/cfc16129/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index b398ac8..5ee09f6 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -238,33 +238,10 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path = ['/bin', '/usr/bin/'],
-                              sudo = True,
-                              )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-        mode = 0644,
-    )
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
-                              )
-    self.assertResourceCalled('File', '/tmp/start_metastore_script',
-                              content = StaticFile('startMetastore.sh'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa -verbose',
-        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa -verbose'",
-        user = 'hive',
-    )
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
                               group = 'hadoop',
@@ -286,6 +263,29 @@ class TestHiveMetastore(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
+    self.assertResourceCalled('Execute', ('cp',
+     '--remove-destination',
+     '/usr/share/java/mysql-connector-java.jar',
+     '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
+                              )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa -verbose',
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa -verbose'",
+        user = 'hive',
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hive',
@@ -339,35 +339,10 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('Execute', ('cp',
-                                          '--remove-destination',
-                                          '/usr/share/java/mysql-connector-java.jar',
-                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
-                              path = ['/bin', '/usr/bin/'],
-                              sudo = True,
-                              )
-    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
-        mode = 0644,
-    )
     self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
         mode = 0644,
     )
-
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
-                              )
-
-    self.assertResourceCalled('File', '/tmp/start_metastore_script',
-                              content = StaticFile('startMetastore.sh'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord asd -verbose',
-        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord asd -verbose'",
-        user = 'hive',
-    )
     self.assertResourceCalled('Directory', '/var/run/hive',
                               owner = 'hive',
                               group = 'hadoop',
@@ -389,6 +364,30 @@ class TestHiveMetastore(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
+    self.assertResourceCalled('Execute', ('cp',
+     '--remove-destination',
+     '/usr/share/java/mysql-connector-java.jar',
+     '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hivemetastore.properties.j2')
+                              )
+
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord asd -verbose',
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord asd -verbose'",
+        user = 'hive',
+    )
 
   @patch("resource_management.libraries.functions.security_commons.build_expectations")
   @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
@@ -683,7 +682,32 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/mysql-connector-java.jar',
         mode = 0644)
-
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
+        content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-server2/lib/ojdbc6.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/tmp/mysql-connector-java.jar',
+        content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar'),
+    )
+    self.assertResourceCalled('Execute', ('cp',
+     '--remove-destination',
+     u'/tmp/mysql-connector-java.jar',
+     u'/usr/hdp/2.3.0.0-1234/hive/lib/mysql-connector-java.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
     self.assertResourceCalled('Execute', ('cp',
      '/usr/hdp/2.2.7.0-1234/hive/lib/mysql-connector-java.jar',
      '/usr/hdp/2.3.0.0-1234/hive/lib'),
@@ -749,6 +773,25 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/2.3.2.0-2950/hive/lib/mysql-connector-java.jar',
         mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-metastore/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
+        content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', ('cp',
+     '--remove-destination',
+     '/usr/share/java/mysql-connector-java.jar',
+     u'/usr/hdp/2.3.2.0-2950/hive/lib/mysql-connector-java.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/usr/hdp/2.3.2.0-2950/hive/lib/mysql-connector-java.jar',
+        mode = 0644,
+    )
     self.assertResourceCalled('Execute', ('cp',
      '/usr/hdp/2.3.0.0-2557/hive/lib/mysql-connector-java.jar',
      '/usr/hdp/2.3.2.0-2950/hive/lib'),
@@ -849,7 +892,34 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/sqla-client-jdbc.tar.gz',
                               mode = 0644,
                               )
-
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
+        content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-server2/lib/ojdbc6.jar'),
+        path = ['/bin', '/usr/bin/'],
+        sudo = True,
+    )
+    self.assertResourceCalled('File', '/tmp/sqla-client-jdbc.tar.gz',
+        content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//sqla-client-jdbc.tar.gz'),
+    )
+    self.assertResourceCalled('Execute', ('tar', '-xvf', u'/tmp/sqla-client-jdbc.tar.gz', '-C', '/tmp'),
+        sudo = True,
+    )
+    self.assertResourceCalled('Execute', 'yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/java/* /usr/hdp/current/hive-server2/lib',)
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/lib/native/lib64',
+        create_parents = True,
+    )
+    self.assertResourceCalled('Execute', 'yes | ambari-sudo.sh cp /tmp/sqla-client-jdbc/native/lib64/* /usr/hdp/current/hive-server2/lib/native/lib64',)
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh chown -R hive:hadoop /usr/hdp/current/hive-server2/lib/*',)
+    self.assertResourceCalled('File', '/usr/hdp/2.3.0.0-1234/hive/lib/sqla-client-jdbc.tar.gz',
+        mode = 0644,
+    )
     self.assertResourceCalled('Execute',
                               ('yes | ambari-sudo.sh cp /usr/hdp/current/hive-server2/lib/*.jar /usr/hdp/2.3.0.0-1234/hive/lib'))
 


Mime
View raw message