ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From fba...@apache.org
Subject [2/2] ambari git commit: AMBARI-11348 [WinTP2] Add HDPWIN 2.3 stack definition
Date Tue, 26 May 2015 21:58:25 GMT
AMBARI-11348 [WinTP2] Add HDPWIN 2.3 stack definition

+Added HDPWIN 2.3 stack definition
+Fixed startup issues for HBase, Hive, Oozie, Storm and YARN
+Fixed build break in ambari-agent


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78430140
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78430140
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78430140

Branch: refs/heads/trunk
Commit: 78430140ff00294f19b4b245bbf941a9fe473d99
Parents: 3db2574
Author: Florian Barca <fbarca@hortonworks.com>
Authored: Tue May 26 14:58:05 2015 -0700
Committer: Florian Barca <fbarca@hortonworks.com>
Committed: Tue May 26 14:58:05 2015 -0700

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   2 +
 .../src/main/python/ambari_commons/os_utils.py  |   5 +
 .../core/providers/windows/system.py            |   2 +-
 .../libraries/functions/install_hdp_msi.py      |  29 ++-
 .../libraries/functions/reload_windows_env.py   |   4 +-
 .../libraries/script/script.py                  |  10 +-
 .../0.1.0/package/scripts/params_windows.py     |  29 ++-
 .../0.5.0.2.1/package/scripts/service_check.py  |   2 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   2 +-
 .../package/scripts/webhcat_service.py          |   2 +-
 .../OOZIE/4.0.0.2.0/package/scripts/params.py   |   3 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |   2 -
 .../0.12.0.2.0/package/scripts/service_check.py |  18 +-
 .../SLIDER/0.60.0.2.2/package/scripts/params.py |   5 -
 .../0.60.0.2.2/package/scripts/params_linux.py  |   7 +-
 .../package/scripts/params_windows.py           |   1 +
 .../0.9.1.2.1/package/scripts/params_windows.py |  10 +
 .../0.9.1.2.1/package/scripts/service_check.py  |   4 +-
 .../0.9.1.2.1/package/scripts/yaml_utils.py     |   4 +-
 .../2.1.0.2.0/package/scripts/status_params.py  |   2 +-
 .../resources/stacks/HDPWIN/2.1/metainfo.xml    |   2 +-
 .../AMBARI_METRICS/configuration/ams-env.xml    |  20 ++
 .../stacks/HDPWIN/2.1/services/stack_advisor.py |  21 --
 .../resources/stacks/HDPWIN/2.2/metainfo.xml    |   2 +-
 .../HIVE/configuration/webhcat-site.xml         |   2 +-
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |  47 +++-
 .../resources/stacks/HDPWIN/2.3/metainfo.xml    |  23 ++
 .../stacks/HDPWIN/2.3/repos/repoinfo.xml        |  26 ++
 .../configuration/falcon-startup.properties.xml |  29 +++
 .../HDPWIN/2.3/services/FALCON/metainfo.xml     |  26 ++
 .../HDPWIN/2.3/services/FLUME/metainfo.xml      |  26 ++
 .../services/HBASE/configuration/hbase-site.xml |  43 ++++
 .../HDPWIN/2.3/services/HBASE/metainfo.xml      |  26 ++
 .../services/HDFS/configuration/hdfs-site.xml   |  41 +++
 .../HDPWIN/2.3/services/HDFS/metainfo.xml       |  26 ++
 .../services/HIVE/configuration/hive-site.xml   | 247 +++++++++++++++++++
 .../HIVE/configuration/webhcat-site.xml         |  32 +++
 .../HDPWIN/2.3/services/HIVE/metainfo.xml       |  26 ++
 .../HDPWIN/2.3/services/KNOX/metainfo.xml       |  26 ++
 .../services/OOZIE/configuration/oozie-env.xml  | 129 ++++++++++
 .../services/OOZIE/configuration/oozie-site.xml | 114 +++++++++
 .../HDPWIN/2.3/services/OOZIE/metainfo.xml      |  26 ++
 .../stacks/HDPWIN/2.3/services/PIG/metainfo.xml |  26 ++
 .../HDPWIN/2.3/services/SLIDER/metainfo.xml     |  26 ++
 .../HDPWIN/2.3/services/SQOOP/metainfo.xml      |  26 ++
 .../services/STORM/configuration/storm-site.xml |  43 ++++
 .../HDPWIN/2.3/services/STORM/metainfo.xml      |  27 ++
 .../2.3/services/TEZ/configuration/tez-site.xml |  98 ++++++++
 .../stacks/HDPWIN/2.3/services/TEZ/metainfo.xml |  26 ++
 .../HDPWIN/2.3/services/YARN/metainfo.xml       |  34 +++
 .../HDPWIN/2.3/services/ZOOKEEPER/metainfo.xml  |  26 ++
 .../stacks/HDPWIN/2.3/services/stack_advisor.py |  43 ++++
 52 files changed, 1396 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 1cf3b71..9a58537 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -732,6 +732,8 @@
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>
           <excludes>
+            <exclude>conf/unix/logging.conf.sample</exclude>
+            <exclude>conf/windows/logging.conf.sample</exclude>
             <exclude>src/test/python/tmp_hostcheck.result</exclude>
             <exclude>src/examples/*</exclude>
             <exclude>src/test/python/dummy*.txt</exclude>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-common/src/main/python/ambari_commons/os_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/os_utils.py b/ambari-common/src/main/python/ambari_commons/os_utils.py
index 9126a5b..5ee34a8 100644
--- a/ambari-common/src/main/python/ambari_commons/os_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/os_utils.py
@@ -51,6 +51,11 @@ def quote_path(filepath):
     filepath_ret = filepath
   return filepath_ret
 
+def trim_uri(file_uri):
+  if file_uri.startswith("file:///"):
+    return file_uri[8:].replace("/", os.sep)
+  return file_uri
+
 def _search_file(filename, search_path, pathsep):
   for path in string.split(search_path, pathsep):
     candidate = os.path.join(path, filename)

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-common/src/main/python/resource_management/core/providers/windows/system.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/windows/system.py b/ambari-common/src/main/python/resource_management/core/providers/windows/system.py
index a410e9c..70cf9bf 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/windows/system.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/windows/system.py
@@ -364,7 +364,7 @@ class DirectoryProvider(Provider):
   @staticmethod
   def _trim_uri(file_uri):
     if file_uri.startswith("file:///"):
-      return file_uri[8:]
+      return file_uri[8:].replace("/", os.sep)
     return file_uri
     # class res: pass
     # resource = res()

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
index 12ccc51..0e06eb0 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
@@ -32,6 +32,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 import socket
 import os
 import glob
+import urlparse
 
 
 __all__ = ['install_windows_msi']
@@ -48,7 +49,7 @@ HDFS_NAMENODE_DATA_DIR={hdp_data_dir}\\hdpdatann
 HDFS_DATANODE_DATA_DIR={hdp_data_dir}\\hdpdatadn
 
 IS_SLIDER=yes
-IS_PHOENIX=yes
+IS_PHOENIX=no
 """
 cluster_properties = """#Log directory
 HDP_LOG_DIR={hdp_log_dir}
@@ -94,7 +95,7 @@ OOZIE_DB_PASSWORD=oozie
 
 INSTALL_MSI_CMD = 'cmd /C start /wait msiexec /qn /i  {hdp_msi_path} /lv {hdp_log_path} MSIUSEREALADMINDETECTION=1 ' \
                   'HDP_LAYOUT={hdp_layout_path} DESTROY_DATA=yes HDP_USER={hadoop_user} HDP_USER_PASSWORD={hadoop_password_arg} HDP=yes ' \
-                  'KNOX=yes KNOX_MASTER_SECRET="AmbariHDP2Windows" FALCON=yes STORM=yes HBase=yes STORM=yes FLUME=yes SLIDER=yes PHOENIX=yes RANGER=no'
+                  'KNOX=yes KNOX_MASTER_SECRET="AmbariHDP2Windows" FALCON=yes STORM=yes HBase=yes STORM=yes FLUME=yes SLIDER=yes PHOENIX=no RANGER=no'
 CREATE_SERVICE_SCRIPT = os.path.abspath("sbin\createservice.ps1")
 CREATE_SERVICE_CMD = 'cmd /C powershell -File "{script}" -username {username} -password "{password}" -servicename ' \
                      '{servicename} -hdpresourcesdir "{resourcedir}" -servicecmdpath "{servicecmd}"'
@@ -155,7 +156,7 @@ def _write_marker():
     open(os.path.join(_working_dir, INSTALL_MARKER_FAILED), "w").close()
 
 
-def install_windows_msi(msi_url, save_dir, save_file, hadoop_user, hadoop_password, stack_version):
+def install_windows_msi(url_base, save_dir, save_files, hadoop_user, hadoop_password, stack_version):
   global _working_dir
   _working_dir = save_dir
   save_dir = os.path.abspath(save_dir)
@@ -177,18 +178,26 @@ def install_windows_msi(msi_url, save_dir, save_file, hadoop_user, hadoop_passwo
     if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
       hdp_22_specific_props = hdp_22.format(hdp_data_dir=hdp_data_dir)
 
-    # install msi
-    try:
-      download_file(msi_url, os.path.join(msi_save_dir, save_file))
-    except:
-      raise Fail("Failed to download {url}".format(url=msi_url))
+    # MSIs cannot be larger than 2GB. HDPWIN 2.3 needed split in order to accommodate this limitation
+    hdp_msi_file = ''
+    for save_file in save_files:
+      if save_file.lower().endswith(".msi"):
+        hdp_msi_file = save_file
+      file_url = urlparse.urljoin(url_base, save_file)
+      try:
+        download_file(file_url, os.path.join(msi_save_dir, save_file))
+      except:
+        raise Fail("Failed to download {url}".format(url=file_url))
+
     File(os.path.join(msi_save_dir, "properties.txt"), content=cluster_properties.format(hdp_log_dir=hdp_log_dir,
                                                                                          hdp_data_dir=hdp_data_dir,
                                                                                          local_host=local_host,
                                                                                          db_flavor=db_flavor,
                                                                                          hdp_22_specific_props=hdp_22_specific_props))
-    hdp_msi_path = os_utils.quote_path(os.path.join(save_dir, "hdp.msi"))
-    hdp_log_path = os_utils.quote_path(os.path.join(save_dir, "hdp.log"))
+
+    # install msi
+    hdp_msi_path = os_utils.quote_path(os.path.join(save_dir, hdp_msi_file))
+    hdp_log_path = os_utils.quote_path(os.path.join(save_dir, hdp_msi_file[:-3] + "log"))
     hdp_layout_path = os_utils.quote_path(os.path.join(save_dir, "properties.txt"))
     hadoop_password_arg = os_utils.quote_path(hadoop_password)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py b/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
index eaa2582..f69b6a3 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/reload_windows_env.py
@@ -28,8 +28,8 @@ default_whitelist = ["FALCON_CONF_DIR", "FALCON_DATA_DIR", "FALCON_HOME", "FALCO
                      "HADOOP_SETUP_TOOLS", "HADOOP_YARN_HOME", "HBASE_CONF_DIR", "HBASE_HOME", "HCAT_HOME",
                      "HDFS_AUDIT_LOGGER", "HDFS_DATA_DIR", "HIVE_CONF_DIR", "HIVE_HOME", "HIVE_LIB_DIR", "HIVE_LOG_DIR",
                      "HIVE_OPTS", "KNOX_CONF_DIR", "KNOX_HOME", "KNOX_LOG_DIR", "MAHOUT_HOME", "OOZIE_DATA",
-                     "OOZIE_HOME", "OOZIE_LOG", "OOZIE_ROOT", "PIG_HOME", "SQOOP_HOME", "STORM_CONF_DIR", "STORM_HOME",
-                     "STORM_LOG_DIR", "TEZ_HOME", "WEBHCAT_CONF_DIR", "YARN_LOG_DIR", "ZOOKEEPER_CONF_DIR",
+                     "OOZIE_HOME", "OOZIE_LOG", "OOZIE_ROOT", "PIG_HOME", "SLIDER_HOME", "SQOOP_HOME", "STORM_CONF_DIR",
+                     "STORM_HOME", "STORM_LOG_DIR", "TEZ_HOME", "WEBHCAT_CONF_DIR", "YARN_LOG_DIR", "ZOOKEEPER_CONF_DIR",
                      "ZOOKEEPER_HOME", "ZOOKEEPER_LIB_DIR", "ZOO_LOG_DIR", "COLLECTOR_CONF_DIR", "COLLECTOR_HOME",
                      "MONITOR_CONF_DIR", "MONITOR_HOME", "SINK_HOME"]
 def reload_windows_env(keys_white_list=default_whitelist):

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 2b1c763..3ee1f87 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -26,7 +26,8 @@ import sys
 import json
 import logging
 import platform
-from ambari_commons.os_check import OSCheck
+from ambari_commons import OSCheck, OSConst
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from resource_management.libraries.resources import XmlConfig
 from resource_management.libraries.resources import PropertiesFile
 from resource_management.core.resources import File, Directory
@@ -280,7 +281,7 @@ class Script(object):
     :return: a normalized HDP stack version or None
     """
     stack_name = Script.get_stack_name()
-    if stack_name is None or stack_name.upper() != "HDP":
+    if stack_name is None or stack_name.upper() not in ["HDP", "HDPWIN"]:
       return None
 
     config = Script.get_config()
@@ -366,8 +367,9 @@ class Script(object):
     if OSCheck.is_windows_family():
       #TODO hacky install of windows msi, remove it or move to old(2.1) stack definition when component based install will be implemented
       hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
-      install_windows_msi(os.path.join(config['hostLevelParams']['jdk_location'], "hdp.msi"),
-                          config["hostLevelParams"]["agentCacheDir"], "hdp.msi", hadoop_user, self.get_password(hadoop_user),
+      install_windows_msi(config['hostLevelParams']['jdk_location'],
+                          config["hostLevelParams"]["agentCacheDir"], ["hdp-2.3.0.0.winpkg.msi", "hdp-2.3.0.0.cab", "hdp-2.3.0.0-01.cab"],
+                          hadoop_user, self.get_password(hadoop_user),
                           str(config['hostLevelParams']['stack_version']))
       reload_windows_env()
     self.set_version()

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_windows.py
index 139e2bb..cd3ed58 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_windows.py
@@ -18,22 +18,37 @@ limitations under the License.
 
 """
 
+import os
+
+from resource_management.libraries.script.script import Script
 
-from resource_management import *
 
 config = Script.get_config()
 
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 ams_user = hadoop_user
-ams_collector_conf_dir = os.environ["COLLECTOR_CONF_DIR"]
-ams_collector_home_dir = os.environ["COLLECTOR_HOME"]
-ams_monitor_conf_dir = os.environ["MONITOR_CONF_DIR"]
-ams_monitor_home_dir = os.environ["MONITOR_HOME"]
+
+try:
+  ams_collector_conf_dir = os.environ["COLLECTOR_CONF_DIR"]
+  ams_collector_home_dir = os.environ["COLLECTOR_HOME"]
+  hbase_cmd = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "bin", "hbase.cmd")
+  hbase_conf_dir = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "conf")
+except:
+  ams_collector_conf_dir = None
+  ams_collector_home_dir = None
+  hbase_cmd = None
+  hbase_conf_dir = None
+
+try:
+  ams_monitor_conf_dir = os.environ["MONITOR_CONF_DIR"]
+  ams_monitor_home_dir = os.environ["MONITOR_HOME"]
+except:
+  ams_monitor_conf_dir = None
+  ams_monitor_home_dir = None
+
 hadoop_native_lib = os.path.join(os.environ["HADOOP_HOME"], "bin")
 hadoop_bin_dir = os.path.join(os.environ["HADOOP_HOME"], "bin")
-hbase_cmd = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "bin", "hbase.cmd")
 hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf")
-hbase_conf_dir = os.path.join(os.environ["COLLECTOR_HOME"], "hbase", "conf")
 
 ams_collector_win_service_name = "AmbariMetricsCollector"
 ams_monitor_win_service_name = "AmbariMetricsHostMonitoring"

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
index 678cb02..1e48d6a 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
@@ -47,7 +47,7 @@ class FalconServiceCheckWindows(FalconServiceCheck):
     env.set_params(params)
     smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
     service = "FALCON"
-    Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
+    Execute(format("cmd /C {smoke_cmd} {service}"), user=params.falcon_user, logoutput=True)
 
 if __name__ == "__main__":
   FalconServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index d679314..97cdd32 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -36,7 +36,7 @@ class HbaseServiceCheckWindows(HbaseServiceCheck):
     env.set_params(params)
     smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
     service = "HBASE"
-    Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
+    Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hbase_user, logoutput=True)
 
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
index 3e6ec9e..01d0a9f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -23,7 +23,7 @@ from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def webhcat_service(action='start'):
+def webhcat_service(action='start', rolling_restart=False):
   import params
   if action == 'start' or action == 'stop':
     Service(params.webhcat_server_win_service_name, action=action)

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index 36dd07f..06d4e22 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -25,4 +25,7 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
+java_home = config['hostLevelParams']['java_home']
+java_version = int(config['hostLevelParams']['java_version'])
+
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index aef90ee..6963410 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -130,8 +130,6 @@ oozie_env_sh_template = config['configurations']['oozie-env']['content']
 
 oracle_driver_jar_name = "ojdbc6.jar"
 
-java_home = config['hostLevelParams']['java_home']
-java_version = int(config['hostLevelParams']['java_version'])
 oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
 oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
 oozie_jdbc_connection_url = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.url", "")

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index d52b787..2f8da76 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -18,19 +18,19 @@ limitations under the License.
 Ambari Agent
 
 """
+
 import os
 
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.core.resources.system import Execute, File
+from resource_management.core.source import InlineTemplate, StaticFile
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions import format
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.script.script import Script
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
-from resource_management.core.resources.system import File, Execute
-from resource_management.core.source import StaticFile
-
 
 class PigServiceCheck(Script):
   pass
@@ -126,7 +126,7 @@ class PigServiceCheckWindows(PigServiceCheck):
     env.set_params(params)
     smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
     service = "PIG"
-    Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True, user=params.hdfs_user)
+    Execute(format("cmd /C {smoke_cmd} {service}", smoke_cmd=smoke_cmd, service=service), logoutput=True, user=params.pig_user, timeout=300)
 
 if __name__ == "__main__":
   PigServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index c127115..61033c0 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -44,12 +44,7 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 #hadoop params
-slider_bin_dir = "/usr/lib/slider/bin"
-if Script.is_hdp_stack_greater_or_equal("2.2"):
-  slider_bin_dir = '/usr/hdp/current/slider-client/bin'
-
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-slider_conf_dir = "/usr/hdp/current/slider-client/conf"
 
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 98a408c..5d60e21 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -21,6 +21,11 @@ from resource_management.libraries.script.script import Script
 # server configurations
 config = Script.get_config()
 
+#hadoop params
+slider_bin_dir = "/usr/lib/slider/bin"
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+    slider_bin_dir = '/usr/hdp/current/slider-client/bin'
+
 slider_conf_dir = "/usr/hdp/current/slider-client/conf"
 storm_slider_conf_dir = '/usr/hdp/current/storm-slider-client/conf'
-slider_home_dir = '/usr/hdp/current/slider-client'
\ No newline at end of file
+slider_home_dir = '/usr/hdp/current/slider-client'

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
index 932c5a4..52106a7 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
@@ -26,6 +26,7 @@ config = Script.get_config()
 
 hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 slider_home = os.environ['SLIDER_HOME']
+slider_bin_dir = os.path.join(slider_home, 'bin')
 slider_conf_dir = os.path.join(slider_home, 'conf')
 storm_slider_conf_dir = os.path.join(os.environ['STORM_HOME'], 'conf')
 slider_home_dir = slider_home

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
index d98246c..2c7f041 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
@@ -24,11 +24,21 @@ from status_params import *
 # server configurations
 config = Script.get_config()
 
+stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+
 hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 conf_dir = os.environ["STORM_CONF_DIR"]
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 storm_user = hadoop_user
 
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+if stack_is_hdp23_or_further:
+  if security_enabled:
+    storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.secure.transport']
+  else:
+    storm_thrift_transport = config['configurations']['storm-site']['_storm.thrift.nonsecure.transport']
+
 service_map = {
   "nimbus" : nimbus_win_service_name,
   "supervisor" : supervisor_win_service_name,

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
index 4484501..d78484e 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
@@ -18,6 +18,8 @@ limitations under the License.
 
 """
 
+import os
+
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.core.resources import File
@@ -38,7 +40,7 @@ class ServiceCheckWindows(ServiceCheck):
     env.set_params(params)
     smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
     service = "STORM"
-    Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
+    Execute(format("cmd /C {smoke_cmd} {service}", smoke_cmd=smoke_cmd, service=service), user=params.storm_user, logoutput=True)
 
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
index 6e99e8b..7b71553 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
@@ -71,8 +71,8 @@ def yaml_config_template(configurations):
 
 def yaml_config(filename, configurations = None, conf_dir = None, owner = None, group = None):
   import params
-  config_content = InlineTemplate('''{% for key, value in configurations_dict.items() %}{{ key }}: {{ escape_yaml_property(value) }}
-{% endfor %}''', configurations_dict=configurations, extra_imports=[escape_yaml_property])
+  config_content = InlineTemplate('''{% for key, value in configurations_dict|dictsort %}{{ key }}: {{ escape_yaml_property(resource_management.core.source.InlineTemplate(value).get_content()) }}
+{% endfor %}''', configurations_dict=configurations, extra_imports=[escape_yaml_property, resource_management, resource_management.core, resource_management.core.source])
 
   File (os.path.join(params.conf_dir, filename),
         content = config_content,

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
index b4c5b6e..e17a945 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@ -29,7 +29,7 @@ tmp_dir = Script.get_tmp_dir()
 if OSCheck.is_windows_family():
   resourcemanager_win_service_name = 'resourcemanager'
   nodemanager_win_service_name = 'nodemanager'
-  historyserver_win_service_name = 'jobhistoryserver'
+  historyserver_win_service_name = 'historyserver'
   timelineserver_win_service_name = 'timelineserver'
 
   service_map = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.1/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/metainfo.xml
index ca45822..45a63e5 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/metainfo.xml
@@ -17,6 +17,6 @@
 -->
 <metainfo>
     <versions>
-	  <active>true</active>
+	  <active>false</active>
     </versions>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
index 3b421b9..ff779d1 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
@@ -23,6 +23,26 @@
     <name>ams_user</name>
     <deleted>true</deleted>
   </property>
+  <property>
+    <name>metrics_collector_log_dir</name>
+    <value>C:\var\log\ambari-metrics-collector</value>
+    <description>Collector log directory.</description>
+  </property>
+  <property>
+    <name>metrics_collector_pid_dir</name>
+    <value>C:\var\run\ambari-metrics-collector</value>
+    <description>Collector pid directory.</description>
+  </property>
+  <property>
+    <name>metrics_monitor_pid_dir</name>
+    <value>C:\var\run\ambari-metrics-monitor</value>
+    <description>Monitor pid directory.</description>
+  </property>
+  <property>
+    <name>metrics_monitor_log_dir</name>
+    <value>C:\var\log\ambari-metrics-monitor</value>
+    <description>Monitor log directory.</description>
+  </property>
 
   <property>
     <name>content</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/stack_advisor.py
index c212563..308ff96 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/stack_advisor.py
@@ -767,27 +767,6 @@ def formatXmxSizeToBytes(value):
     }[1]
   return to_number(value) * m
 
-def getPort(address):
-  """
-  Extracts port from the address like 0.0.0.0:1019
-  """
-  if address is None:
-    return None
-  m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
-  if m is not None:
-    return int(m.group(2))
-  else:
-    return None
-
-def isSecurePort(port):
-  """
-  Returns True if port is root-owned at *nix systems
-  """
-  if port is not None:
-    return port < 1024
-  else:
-    return False
-
 def getMountPointForDir(dir, mountPoints):
   """
   :param dir: Directory to check, even if it doesn't exist.

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/metainfo.xml
index 36f01e0..c3a226c 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/metainfo.xml
@@ -17,7 +17,7 @@
 -->
 <metainfo>
   <versions>
-    <active>true</active>
+    <active>false</active>
   </versions>
   <extends>2.1</extends>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/webhcat-site.xml
index 0454c1c..dee0a97 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/webhcat-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/webhcat-site.xml
@@ -31,7 +31,7 @@ limitations under the License.
 
   <property>
     <name>templeton.libjars</name>
-    <value>/usr/hdp/current/zookeeper-client/zookeeper.jar</value>
+    <value>file:///c:/hdp/hive/lib/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
index 3f246fb..9ab1470 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
@@ -18,9 +18,36 @@ limitations under the License.
 """
 
 import math
-from math import floor
+import re
 from urlparse import urlparse
 
+def getSiteProperties(configurations, siteName):
+  siteConfig = configurations.get(siteName)
+  if siteConfig is None:
+    return None
+  return siteConfig.get("properties")
+
+def getPort(address):
+  """
+  Extracts port from the address like 0.0.0.0:1019
+  """
+  if address is None:
+    return None
+  m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
+  if m is not None:
+    return int(m.group(2))
+  else:
+    return None
+
+def isSecurePort(port):
+  """
+  Returns True if port is root-owned at *nix systems
+  """
+  if port is not None:
+    return port < 1024
+  else:
+    return False
+
 class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
 
   def getServiceConfigurationRecommenderDict(self):
@@ -160,7 +187,7 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
       cpuPercentageLimit = 0.8
       if "yarn.nodemanager.resource.percentage-physical-cpu-limit" in configurations["yarn-site"]["properties"]:
         cpuPercentageLimit = float(configurations["yarn-site"]["properties"]["yarn.nodemanager.resource.percentage-physical-cpu-limit"])
-      cpuLimit = max(1, int(floor(nodeManagerHost["Hosts"]["cpu_count"] * cpuPercentageLimit)))
+      cpuLimit = max(1, int(math.floor(nodeManagerHost["Hosts"]["cpu_count"] * cpuPercentageLimit)))
       putYarnProperty('yarn.nodemanager.resource.cpu-vcores', str(cpuLimit))
       putYarnProperty('yarn.scheduler.maximum-allocation-vcores', configurations["yarn-site"]["properties"]["yarn.nodemanager.resource.cpu-vcores"])
       putYarnPropertyAttribute('yarn.nodemanager.resource.memory-mb', 'maximum', int(nodeManagerHost["Hosts"]["total_mem"] / 1024)) # total_mem in kb
@@ -647,9 +674,9 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
 
     #Adding Ranger Plugin logic here 
     ranger_plugin_properties = getSiteProperties(configurations, "ranger-hdfs-plugin-properties")
-    ranger_plugin_enabled = ranger_plugin_properties['ranger-hdfs-plugin-enabled'] if ranger_plugin_properties else 'No'
+    ranger_plugin_enabled = ranger_plugin_properties['ranger-hdfs-plugin-enabled'] if ranger_plugin_properties else 'no'
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if ("RANGER" in servicesList) and (ranger_plugin_enabled.lower() == 'Yes'.lower()):
+    if ("RANGER" in servicesList) and (ranger_plugin_enabled.lower() == 'yes'):
       if hdfs_site['dfs.permissions.enabled'] != 'true':
         validationItems.append({"config-name": 'dfs.permissions.enabled',
                                     "item": self.getWarnItem(
@@ -747,12 +774,12 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
     validationItems = [] 
     #Adding Ranger Plugin logic here 
     ranger_plugin_properties = getSiteProperties(configurations, "ranger-hive-plugin-properties")
-    ranger_plugin_enabled = ranger_plugin_properties['ranger-hive-plugin-enabled']
+    ranger_plugin_enabled = ranger_plugin_properties['ranger-hdfs-plugin-enabled'] if ranger_plugin_properties else 'no'
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     ##Add stack validations only if Ranger is enabled.
     if ("RANGER" in servicesList):
       ##Add stack validations for  Ranger plugin enabled.
-      if (ranger_plugin_enabled.lower() == 'Yes'.lower()):
+      if (ranger_plugin_enabled.lower() == 'yes'):
         prop_name = 'hive.security.authorization.manager'
         prop_val = "com.xasecure.authorization.hive.authorizer.XaSecureHiveAuthorizerFactory"
         if hive_server2[prop_name] != prop_val:
@@ -803,12 +830,12 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
     validationItems = []
     #Adding Ranger Plugin logic here
     ranger_plugin_properties = getSiteProperties(configurations, "ranger-hive-plugin-properties")
-    ranger_plugin_enabled = ranger_plugin_properties['ranger-hive-plugin-enabled']
+    ranger_plugin_enabled = ranger_plugin_properties['ranger-hdfs-plugin-enabled'] if ranger_plugin_properties else 'no'
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     ##Add stack validations only if Ranger is enabled.
     if ("RANGER" in servicesList):
       ##Add stack validations for  Ranger plugin enabled.
-      if (ranger_plugin_enabled.lower() == 'Yes'.lower()):
+      if (ranger_plugin_enabled.lower() == 'yes'):
         prop_name = 'hive.security.authorization.enabled'
         prop_val = 'true'
         if hive_site[prop_name] != prop_val:
@@ -861,11 +888,11 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
 
     #Adding Ranger Plugin logic here 
     ranger_plugin_properties = getSiteProperties(configurations, "ranger-hbase-plugin-properties")
-    ranger_plugin_enabled = ranger_plugin_properties['ranger-hbase-plugin-enabled']
+    ranger_plugin_enabled = ranger_plugin_properties['ranger-hdfs-plugin-enabled'] if ranger_plugin_properties else 'no'
     prop_name = 'hbase.security.authorization'
     prop_val = "true"
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if ("RANGER" in servicesList) and (ranger_plugin_enabled.lower() == 'Yes'.lower()):
+    if ("RANGER" in servicesList) and (ranger_plugin_enabled.lower() == 'yes'):
       if hbase_site[prop_name] != prop_val:
         validationItems.append({"config-name": prop_name,
                                 "item": self.getWarnItem(

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/metainfo.xml
new file mode 100644
index 0000000..6fc0ae6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <versions>
+    <active>true</active>
+  </versions>
+  <extends>2.2</extends>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/repos/repoinfo.xml
new file mode 100644
index 0000000..54a0bf0
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/repos/repoinfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<reposinfo>
+  <os family="winsrv6">
+    <repo>
+      <baseurl>http://dummy_repo</baseurl>
+      <repoid>HDPWIN-2.3</repoid>
+      <reponame>HDPWIN</reponame>
+    </repo>
+  </os>
+</reposinfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/configuration/falcon-startup.properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/configuration/falcon-startup.properties.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/configuration/falcon-startup.properties.xml
new file mode 100644
index 0000000..409cd5a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/configuration/falcon-startup.properties.xml
@@ -0,0 +1,29 @@
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false">
+
+  <property>
+    <name>*.shared.libs</name>
+    <value>activemq-core,ant,geronimo-j2ee-management,jms,json-simple,oozie-client,spring-jms,commons-lang3,commons-el</value>
+    <description></description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/metainfo.xml
new file mode 100644
index 0000000..f34aede
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FALCON/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FALCON</name>
+      <version>0.7.0.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FLUME/metainfo.xml
new file mode 100644
index 0000000..e377396
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/FLUME/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FLUME</name>
+      <version>1.5.2.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/configuration/hbase-site.xml
new file mode 100644
index 0000000..43f872a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/configuration/hbase-site.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>hbase.master.port</name>
+    <value>16000</value>
+    <description>The port the HBase Master should bind to.</description>
+  </property>
+  <property>
+    <name>hbase.master.info.port</name>
+    <value>60010</value>
+    <description>The port for the HBase Master web UI.</description>
+  </property>
+  <property>
+    <name>hbase.regionserver.port</name>
+    <value>16020</value>
+    <description>The port the HBase RegionServer binds to.</description>
+  </property>
+  <property>
+    <name>hbase.regionserver.info.port</name>
+    <value>16030</value>
+    <description>The port for the HBase RegionServer web UI.</description>
+  </property> 
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/metainfo.xml
new file mode 100644
index 0000000..aa169df
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HBASE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HBASE</name>
+      <version>1.1.0.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/configuration/hdfs-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/configuration/hdfs-site.xml
new file mode 100644
index 0000000..5f2db0c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/configuration/hdfs-site.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>nfs.file.dump.dir</name>
+    <value>/tmp/.hdfs-nfs</value>
+    <description>
+      This directory is used to temporarily save out-of-order writes before
+      writing to HDFS. For each file, the out-of-order writes are dumped after
+      they are accumulated to exceed certain threshold (e.g., 1MB) in memory.
+      One needs to make sure the directory has enough space.
+    </description>
+  </property>
+
+  <property>
+    <name>nfs.exports.allowed.hosts</name>
+    <value>* rw</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..5660d02
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HDFS/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HDFS</name>
+      <version>2.7.0.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/hive-site.xml
new file mode 100644
index 0000000..b83c706
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/hive-site.xml
@@ -0,0 +1,247 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>hive.cbo.enable</name>
+    <value>true</value>
+    <description>Flag to control enabling Cost Based Optimizations using Calcite framework.</description>
+  </property>
+
+  <property>
+    <name>hive.exec.reducers.bytes.per.reducer</name>
+    <value>67108864</value>
+    <description>size per reducer.The default is 256Mb, i.e if the input size is 1G, it will use 4 reducers.</description>
+  </property>
+
+  <property>
+    <name>hive.exec.dynamic.partition.mode</name>
+    <value>nonstrict</value>
+    <description>
+      In strict mode, the user must specify at least one static partition
+      in case the user accidentally overwrites all partitions.
+      NonStrict allows all partitions of a table to be dynamic.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.exec.orc.default.stripe.size</name>
+    <value>67108864</value>
+    <description>Define the default ORC stripe size</description>
+  </property>
+
+  <property>
+    <name>hive.exec.orc.default.compress</name>
+    <value>ZLIB</value>
+    <description>Define the default compression codec for ORC file</description>
+  </property>
+
+  <property>
+    <name>hive.tez.log.level</name>
+    <value>INFO</value>
+    <description>
+      The log level to use for tasks executing as part of the DAG.
+      Used only if hive.tez.java.opts is used to configure Java options.
+    </description>
+  </property>
+  <property>
+    <name>hive.enforce.bucketing</name>
+    <value>true</value>
+    <description>Whether bucketing is enforced. If true, while inserting into the table, bucketing is enforced.</description>
+  </property>
+
+  <property>
+    <name>hive.optimize.sort.dynamic.partition</name>
+    <value>false</value>
+    <description>
+      When enabled dynamic partitioning column will be globally sorted.
+      This way we can keep only one record writer open for each partition value
+      in the reducer thereby reducing the memory pressure on reducers.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.stats.fetch.partition.stats</name>
+    <value>true</value>
+    <description>
+      Annotation of operator tree with statistics information requires partition level basic
+      statistics like number of rows, data size and file size. Partition statistics are fetched from
+      metastore. Fetching partition statistics for each needed partition can be expensive when the
+      number of partitions is high. This flag can be used to disable fetching of partition statistics
+      from metastore. When this flag is disabled, Hive will make calls to filesystem to get file sizes
+      and will estimate the number of rows from row schema.
+    </description>
+  </property>
+  <property>
+    <name>hive.stats.fetch.column.stats</name>
+    <value>false</value>
+    <description>
+      Annotation of operator tree with statistics information requires column statistics.
+      Column statistics are fetched from metastore. Fetching column statistics for each needed column
+      can be expensive when the number of columns is high. This flag can be used to disable fetching
+      of column statistics from metastore.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.txn.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager</value>
+    <description/>
+  </property>
+
+  <property>
+    <name>hive.support.concurrency</name>
+    <value>false</value>
+    <description>
+      Support concurrency and use locks, needed for Transactions. Requires Zookeeper.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.security.authorization.enabled</name>
+    <value>false</value>
+    <description>enable or disable the Hive client authorization</description>
+  </property>
+
+  <property>
+    <name>hive.security.metastore.authorization.manager</name>
+    <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider,org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
+    <description>
+      authorization manager class name to be used in the metastore for authorization.
+      The user defined authorization class should implement interface
+      org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.server2.authentication</name>
+    <description>Authentication mode, default NONE. Options are NONE, NOSASL, KERBEROS, LDAP, PAM and CUSTOM</description>
+    <value>NONE</value>
+  </property>
+
+  <property>
+    <name>hive.server2.enable.doAs</name>
+    <value>true</value>
+    <description>
+      Setting this property to true will have HiveServer2 execute
+      Hive operations as the user making the calls to it.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.server2.use.SSL</name>
+    <value>false</value>
+    <description/>
+  </property>
+
+  <property>
+    <name>hive.prewarm.enabled</name>
+    <value>false</value>
+    <description>Enables container prewarm for Tez (Hadoop 2 only)</description>
+  </property>
+  <property>
+    <name>hive.prewarm.numcontainers</name>
+    <value>10</value>
+    <description>Controls the number of containers to prewarm for Tez (Hadoop 2 only)</description>
+  </property>
+
+  <property>
+    <name>hive.tez.auto.reducer.parallelism</name>
+    <value>false</value>
+    <description>
+      Turn on Tez' auto reducer parallelism feature. When enabled, Hive will still estimate data sizes
+      and set parallelism estimates. Tez will sample source vertices' output sizes and adjust the estimates at runtime as
+      necessary.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.tez.dynamic.partition.pruning</name>
+    <value>true</value>
+    <description>When dynamic pruning is enabled, joins on partition keys will be processed by sending events from the processing vertices to the tez application master. These events will be used to prune unnecessary partitions.</description>
+  </property>
+
+  <!-- performance -->
+
+  <property>
+    <name>hive.vectorized.execution.enabled</name>
+    <value>true</value>
+    <description>
+      This flag should be set to true to enable vectorized mode of query execution.
+      The default value is false.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.vectorized.execution.reduce.enabled</name>
+    <value>false</value>
+    <description>
+      This flag should be set to true to enable vectorized mode of the reduce-side of query execution.
+      The default value is true.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.optimize.index.filter</name>
+    <value>true</value>
+    <description>Whether to enable automatic use of indexes</description>
+  </property>
+
+  <property>
+    <name>hive.execution.engine</name>
+    <value>mr</value>
+    <description>
+      Expects one of [mr, tez].
+      Chooses execution engine. Options are: mr (Map reduce, default) or tez (hadoop 2 only)
+    </description>
+  </property>
+
+  <property>
+    <name>hive.compute.query.using.stats</name>
+    <value>true</value>
+    <description>
+      When set to true Hive will answer a few queries like count(1) purely using stats
+      stored in metastore. For basic stats collection turn on the config hive.stats.autogather to true.
+      For more advanced stats collection need to run analyze table queries.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.server2.tez.sessions.per.default.queue</name>
+    <value>1</value>
+    <description>
+      A positive integer that determines the number of Tez sessions that should be
+      launched on each of the queues specified by "hive.server2.tez.default.queues".
+      Determines the parallelism on each queue.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.server2.tez.initialize.default.sessions</name>
+    <value>false</value>
+    <description>
+      This flag is used in HiveServer2 to enable a user to use HiveServer2 without
+      turning on Tez for HiveServer2. The user could potentially want to run queries
+      over Tez without the pool of sessions.
+    </description>
+  </property>
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..2be3547
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>file:///c:/hdp/hive/lib/zookeeper.jar,file:///c:/hdp/hive/lib/hive-common.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/metainfo.xml
new file mode 100644
index 0000000..64f266e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/HIVE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HIVE</name>
+      <version>0.15.0.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/KNOX/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/KNOX/metainfo.xml
new file mode 100644
index 0000000..48ba394
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/KNOX/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KNOX</name>
+      <version>0.6.0.2.3</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-env.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-env.xml
new file mode 100644
index 0000000..1db1b6e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-env.xml
@@ -0,0 +1,129 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property>
+    <name>oozie_user</name>
+    <deleted>true</deleted>
+  </property>
+  <property>
+    <name>oozie_hostname</name>
+    <value></value>
+    <description>
+      Specify the host on which the OOZIE database is hosted.
+    </description>
+  </property>
+  <property>
+    <name>oozie_database</name>
+    <value>Existing MSSQL Server database with SQL authentication</value>
+    <description>Oozie Server Database.</description>
+  </property>
+  <property>
+    <name>oozie_data_dir</name>
+    <value>c:\hadoop\oozie\data</value>
+    <description>Data directory in which the Oozie DB exists</description>
+  </property>
+  <property>
+    <name>oozie_log_dir</name>
+    <value>c:\hadoop\logs\oozie</value>
+    <description>Directory for oozie logs</description>
+  </property>
+  <property>
+    <name>oozie_pid_dir</name>
+    <value>c:\hadoop\run\oozie</value>
+    <description>Directory in which the pid files for oozie reside.</description>
+  </property>
+
+  <!-- oozie-env.cmd -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for the oozie-env.cmd content</description>
+    <value>
+@rem Licensed to the Apache Software Foundation (ASF) under one
+@rem or more contributor license agreements.  See the NOTICE file
+@rem distributed with this work for additional information
+@rem regarding copyright ownership.  The ASF licenses this file
+@rem to you under the Apache License, Version 2.0 (the
+@rem "License"); you may not use this file except in compliance
+@rem with the License.  You may obtain a copy of the License at
+@rem
+@rem      http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@rem Set Oozie specific environment variables here.
+
+@rem Settings for the Embedded Tomcat that runs Oozie
+@rem Java System properties for Oozie should be specified in this variable
+@rem
+set CATALINA_OPTS=%CATALINA_OPTS% -Xmx1024m
+
+@rem Oozie configuration file to load from Oozie configuration directory
+@rem
+@rem set OOZIE_CONFIG_FILE=oozie-site.xml
+
+@rem Oozie logs directory
+@rem
+@rem set OOZIE_LOG={{oozie_log_dir}}
+
+@rem Oozie Log4J configuration file to load from Oozie configuration directory
+@rem
+@rem set OOZIE_LOG4J_FILE=oozie-log4j.properties
+
+@rem Reload interval of the Log4J configuration file, in seconds
+@rem
+@rem set OOZIE_LOG4J_RELOAD=10
+
+@rem The port Oozie server runs
+@rem
+@rem set OOZIE_HTTP_PORT=11000
+
+@rem The port Oozie server runs if using SSL (HTTPS)
+@rem
+@rem set OOZIE_HTTPS_PORT=11443
+
+@rem The host name Oozie server runs on
+@rem
+@rem set OOZIE_HTTP_HOSTNAME=%COMPUTERNAME%
+
+@rem The base URL for callback URLs to Oozie
+@rem
+@rem set OOZIE_BASE_URL="http://%OOZIE_HTTP_HOSTNAME%:%OOZIE_HTTP_PORT%/oozie"
+
+@rem The location of the keystore for the Oozie server if using SSL (HTTPS)
+@rem
+@rem set OOZIE_HTTPS_KEYSTORE_FILE=%HOME%/.keystore
+
+@rem The password of the keystore for the Oozie server if using SSL (HTTPS)
+@rem
+@rem set OOZIE_HTTPS_KEYSTORE_PASS=password
+
+set JAVA_LIBRARY_PATH=%HADOOP_COMMON_HOME%\bin
+    </value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-site.xml
new file mode 100644
index 0000000..4e5bb61
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/configuration/oozie-site.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+        
+       http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<configuration supports_final="true">
+  <property>
+    <name>oozie.service.JPAService.jdbc.url</name>
+    <value>jdbc:sqlserver://localhost;databaseName=oozie</value>
+    <description>
+      JDBC URL.
+    </description>
+  </property>
+  <property>
+    <name>oozie.service.JPAService.create.db.schema</name>
+    <value>true</value>
+    <description>
+      Creates Oozie DB.
+
+      If set to true, it creates the DB schema if it does not exist. If the DB schema exists is a NOP.
+      If set to false, it does not create the DB schema. If the DB schema does not exist it fails start up.
+    </description>
+  </property>
+
+  <property>
+    <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
+    <value>*=c:\hdp\hadoop\etc\hadoop</value>
+    <description>
+      Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
+      the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
+      used when there is no exact match for an authority. The HADOOP_CONF_DIR contains
+      the relevant Hadoop *-site.xml files. If the path is relative is looked within
+      the Oozie configuration directory; though the path can be absolute (i.e. to point
+      to Hadoop client conf/ directories in the local filesystem.
+    </description>
+  </property>
+
+  <property>
+    <name>oozie.service.coord.check.maximum.frequency</name>
+    <value>false</value>
+    <description>
+      When true, Oozie will reject any coordinators with a frequency faster than 5 minutes.  It is not recommended to disable
+      this check or submit coordinators with frequencies faster than 5 minutes: doing so can cause unintended behavior and
+      additional system stress.
+    </description>
+  </property>
+  <property>
+    <name>oozie.services</name>
+    <value>
+      org.apache.oozie.service.SchedulerService,
+      org.apache.oozie.service.InstrumentationService,
+      org.apache.oozie.service.MemoryLocksService,
+      org.apache.oozie.service.UUIDService,
+      org.apache.oozie.service.ELService,
+      org.apache.oozie.service.AuthorizationService,
+      org.apache.oozie.service.UserGroupInformationService,
+      org.apache.oozie.service.HadoopAccessorService,
+      org.apache.oozie.service.JobsConcurrencyService,
+      org.apache.oozie.service.URIHandlerService,
+      org.apache.oozie.service.DagXLogInfoService,
+      org.apache.oozie.service.SchemaService,
+      org.apache.oozie.service.LiteWorkflowAppService,
+      org.apache.oozie.service.JPAService,
+      org.apache.oozie.service.StoreService,
+      org.apache.oozie.service.SLAStoreService,
+      org.apache.oozie.service.DBLiteWorkflowStoreService,
+      org.apache.oozie.service.CallbackService,
+      org.apache.oozie.service.ShareLibService,
+      org.apache.oozie.service.CallableQueueService,
+      org.apache.oozie.service.ActionService,
+      org.apache.oozie.service.ActionCheckerService,
+      org.apache.oozie.service.RecoveryService,
+      org.apache.oozie.service.PurgeService,
+      org.apache.oozie.service.CoordinatorEngineService,
+      org.apache.oozie.service.BundleEngineService,
+      org.apache.oozie.service.DagEngineService,
+      org.apache.oozie.service.CoordMaterializeTriggerService,
+      org.apache.oozie.service.StatusTransitService,
+      org.apache.oozie.service.PauseTransitService,
+      org.apache.oozie.service.GroupsService,
+      org.apache.oozie.service.ProxyUserService,
+      org.apache.oozie.service.XLogStreamingService,
+      org.apache.oozie.service.JvmPauseMonitorService
+    </value>
+    <description>
+      All services to be created and managed by Oozie Services singleton.
+      Class names must be separated by commas.
+    </description>
+  </property>
+
+  <property>
+    <name>oozie.service.SchemaService.wf.ext.schemas</name>
+    <value>shell-action-0.1.xsd,shell-action-0.2.xsd,shell-action-0.3.xsd,email-action-0.1.xsd,email-action-0.2.xsd,hive-action-0.2.xsd,hive-action-0.3.xsd,hive-action-0.4.xsd,hive-action-0.5.xsd,sqoop-action-0.2.xsd,sqoop-action-0.3.xsd,sqoop-action-0.4.xsd,ssh-action-0.1.xsd,ssh-action-0.2.xsd,distcp-action-0.1.xsd,distcp-action-0.2.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd</value>
+  </property>
+
+  <property>
+    <name>oozie.service.AuthorizationService.security.enabled</name>
+    <deleted>true</deleted>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/metainfo.xml
new file mode 100644
index 0000000..fdb65a4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/OOZIE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>OOZIE</name>
+      <extends>common-services/OOZIE/5.0.0.2.3</extends>
+    </service>
+  </services>
+</metainfo>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/78430140/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/PIG/metainfo.xml
new file mode 100644
index 0000000..758c3af
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.3/services/PIG/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PIG</name>
+      <version>0.15.0.2.3</version>
+    </service>
+  </services>
+</metainfo>


Mime
View raw message