ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jonathanhur...@apache.org
Subject [1/4] ambari git commit: AMBARI-21722 - Begin Using Service Versions In Python stack_feature Code (jonathanhurley)
Date Wed, 16 Aug 2017 04:05:35 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-21450 086e368a9 -> e7f413d32


http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 805a7bc..67c6969 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -217,7 +217,6 @@ class TestHookAfterInstall(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_version'] = "2.3"
-    json_content['hostLevelParams']['current_version'] = "2.3.0.0-1234"
 
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
       classname="AfterInstallHook",
@@ -314,7 +313,6 @@ class TestHookAfterInstall(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_version'] = "2.3"
-    json_content['hostLevelParams']['current_version'] = "2.3.0.0-1234"
     json_content['hostLevelParams']['host_sys_prepped'] = "true"
 
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
index 9de2156..fcbbbee 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
@@ -34,6 +34,7 @@ class TestFalconClient(RMFTestCase):
                        classname="FalconClient",
                        command="configure",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -49,11 +50,11 @@ class TestFalconClient(RMFTestCase):
                               mode = 0755,
                               cd_access = "a",
                               )
-    self.assertResourceCalled('Directory', '/var/lib/falcon/webapp',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/falcon-client/webapp',
                               owner = 'falcon',
                               create_parents = True
                               )
-    self.assertResourceCalled('Directory', '/usr/lib/falcon',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/falcon-client',
                               owner = 'falcon',
                               create_parents = True
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 2c877c8..239e2b5 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -40,24 +40,25 @@ class TestFalconServer(RMFTestCase):
       classname="FalconServer",
       command="start",
       config_file="default.json",
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
     self.assert_configure_default()
 
-    self.assertResourceCalled('Execute', '/usr/lib/falcon/bin/falcon-config.sh server falcon',
+    self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh
server falcon',
       path = ['/usr/bin'],
       user = 'falcon',
       environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
       not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
     )
 
-    self.assertResourceCalled('File', '/usr/lib/falcon/server/webapp/falcon/WEB-INF/lib/je-5.0.73.jar',
+    self.assertResourceCalled('File', '/usr/hdp/current/falcon-server/server/webapp/falcon/WEB-INF/lib/je-5.0.73.jar',
       content=DownloadSource('http://c6401.ambari.apache.org:8080/resources//je-5.0.73.jar'),
       mode=0755
     )
 
-    self.assertResourceCalled('Execute', '/usr/lib/falcon/bin/falcon-start -port 15000',
+    self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start
-port 15000',
       path = ['/usr/bin'],
       user = 'falcon',
       environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
@@ -71,10 +72,11 @@ class TestFalconServer(RMFTestCase):
       classname="FalconServer",
       command="stop",
       config_file="default.json",
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
-    self.assertResourceCalled('Execute', '/usr/lib/falcon/bin/falcon-stop',
+    self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-stop',
       path = ['/usr/bin'],
       user = 'falcon',
       environment = {'HADOOP_HOME': '/usr/lib/hadoop'})
@@ -89,6 +91,7 @@ class TestFalconServer(RMFTestCase):
                        classname="FalconServer",
                        command="configure",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -108,11 +111,11 @@ class TestFalconServer(RMFTestCase):
                               cd_access = "a",
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/var/lib/falcon/webapp',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/falcon-server/webapp',
                               owner = 'falcon',
                               create_parents = True
                               )
-    self.assertResourceCalled('Directory', '/usr/lib/falcon',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/falcon-server',
                               owner = 'falcon',
                               create_parents = True
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
index 6519e3c..ebd6c8a 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_service_check.py
@@ -25,15 +25,18 @@ class TestFalconServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FALCON/0.5.0.2.1/package"
   STACK_VERSION = "2.1"
 
+  CONFIG_OVERRIDES = {"serviceName":"FALCON", "role":"FALCON_CLIENT"}
+
   def test_service_check(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
                        classname="FalconServiceCheck",
                        command="service_check",
                        config_file="default.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', '/usr/lib/falcon/bin/falcon admin -version',
+    self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-client/bin/falcon admin
-version',
                               logoutput = True,
                               tries = 3,
                               user = 'ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 24fb0d9..ac5d6ae 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -534,8 +534,22 @@ class TestHiveMetastore(RMFTestCase):
     json_content['commandParams']['version'] = version
     json_content['commandParams']['upgrade_direction'] = Direction.UPGRADE
     json_content['hostLevelParams']['stack_version'] = "2.3"
-    json_content['hostLevelParams']['current_version'] = "2.2.7.0-1234"
-
+    json_content["upgradeSummary"] = {
+      "services":{
+        "HIVE":{
+          "sourceRepositoryId":1,
+          "sourceStackId":"HDP-2.2",
+          "sourceVersion":"2.2.7.0-1234",
+          "targetRepositoryId":2,
+          "targetStackId":"HDP-2.3",
+          "targetVersion":version
+        }
+      },
+      "direction":"UPGRADE",
+      "type":"nonrolling_upgrade",
+      "isRevert":False,
+      "orchestration":"STANDARD"
+    }
 
     # trigger the code to think it needs to copy the JAR
     json_content['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
= "com.mysql.jdbc.Driver"
@@ -591,7 +605,7 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/hive/bin/schematool -dbType
mysql -upgradeSchema',
          logoutput = True,
-         environment = {'HIVE_CONF_DIR': '/etc/hive/conf.server'},
+         environment = {'HIVE_CONF_DIR': '/usr/hdp/current/hive-metastore/conf/conf.server'},
          tries = 1,
          user = 'hive')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
index 021695b..3bf8b6d 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
@@ -29,8 +29,7 @@
         "stack_name": "HDP", 
         "group_list": "[\"hadoop\",\"users\"]", 
         "host_sys_prepped": "false", 
-        "ambari_db_rca_username": "mapred", 
-        "current_version": "2.3.0.0-2557", 
+        "ambari_db_rca_username": "mapred",
         "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
         "mysql_jdbc_url": "http://10.0.0.13:8080/resources//mysql-connector-java.jar", 
         "repo_info": "[{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-2.3.0.0-2557\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.3\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.4.0-3360\",\"baseSaved\":true},{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-UTILS-1.1.0.20\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]",

@@ -57,7 +56,23 @@
     "role": "HIVE_METASTORE", 
     "requestId": 9, 
     "taskId": 176, 
-    "public_hostname": "c6402.ambari.apache.org", 
+    "public_hostname": "c6402.ambari.apache.org",
+    "upgradeSummary": {
+      "services": {
+        "HIVE": {
+          "sourceRepositoryId": 1,
+          "sourceStackId": "HDP-2.3",
+          "sourceVersion": "2.3.0.0-2557",
+          "targetRepositoryId": 2,
+          "targetStackId": "HDP-2.3",
+          "targetVersion": "2.3.2.0-2950"
+        }
+      },
+      "direction": "UPGRADE",
+      "type": "nonrolling_upgrade",
+      "isRevert": false,
+      "orchestration": "STANDARD"
+    },
     "configurations": {
         "webhcat-env": {
             "content": "\n# The file containing the running pid\nPID_FILE={{webhcat_pid_file}}\n\nTEMPLETON_LOG_DIR={{templeton_log_dir}}/\n\n\nWEBHCAT_LOG_DIR={{templeton_log_dir}}/\n\n#
The console error log\nERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log\n\n# The
console log\nCONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log\n\n#TEMPLETON_JAR=templeton_jar_name\n\n#HADOOP_PREFIX=hadoop_prefix\n\n#HCAT_PREFIX=hive_prefix\n\n#
Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
index 8cdbb2e..3114fa8 100644
--- a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
+++ b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
@@ -208,7 +208,23 @@ class TestKnoxGateway(RMFTestCase):
     version = "2.3.2.0-5678"
     # This is an RU from 2.3.0.0 to 2.3.2.0
     json_content['commandParams']['version'] = version
-    json_content['hostLevelParams']['current_version'] = source_version
+
+    json_content["upgradeSummary"] = {
+      "services":{
+        "KNOX":{
+          "sourceRepositoryId":1,
+          "sourceStackId":"HDP-2.2",
+          "sourceVersion":source_version,
+          "targetRepositoryId":2,
+          "targetStackId":"HDP-2.3",
+          "targetVersion":version
+        }
+      },
+      "direction":"UPGRADE",
+      "type":"nonrolling_upgrade",
+      "isRevert":False,
+      "orchestration":"STANDARD"
+    }
 
     path_exists_mock.return_value = True
     mocks_dict = {}
@@ -251,7 +267,23 @@ class TestKnoxGateway(RMFTestCase):
     version = "2.3.2.0-1001"
     # This is an RU from 2.3.2.0 to 2.3.2.1
     json_content['commandParams']['version'] = version
-    json_content['hostLevelParams']['current_version'] = source_version
+
+    json_content["upgradeSummary"] = {
+      "services":{
+        "KNOX":{
+          "sourceRepositoryId":1,
+          "sourceStackId":"HDP-2.2",
+          "sourceVersion":source_version,
+          "targetRepositoryId":2,
+          "targetStackId":"HDP-2.3",
+          "targetVersion":version
+        }
+      },
+      "direction":"UPGRADE",
+      "type":"rolling_upgrade",
+      "isRevert":False,
+      "orchestration":"STANDARD"
+    }
 
     path_exists_mock.return_value = True
     mocks_dict = {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
index 1805c3b..c3e3f94 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
@@ -37,8 +37,7 @@
         "stack_name": "HDP",
         "group_list": "[\"hadoop\",\"users\",\"knox\"]", 
         "host_sys_prepped": "false", 
-        "ambari_db_rca_username": "mapred", 
-        "current_version": "2.3.0.0-2075", 
+        "ambari_db_rca_username": "mapred",
         "jdk_name": "jdk-8u60-linux-x64.tar.gz", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",

         "repo_info": "[]", 
@@ -65,7 +64,23 @@
     "role": "KNOX_GATEWAY", 
     "forceRefreshConfigTags": [], 
     "taskId": 100, 
-    "public_hostname": "c6401.ambari.apache.org", 
+    "public_hostname": "c6401.ambari.apache.org",
+    "upgradeSummary": {
+      "services": {
+        "KNOX": {
+          "sourceRepositoryId": 1,
+          "sourceStackId": "HDP-2.3",
+          "sourceVersion": "2.3.0.0-2075",
+          "targetRepositoryId": 2,
+          "targetStackId": "HDP-2.3",
+          "targetVersion": "2.3.0.0-2096"
+        }
+      },
+      "direction": "UPGRADE",
+      "type": "rolling_upgrade",
+      "isRevert": false,
+      "orchestration": "STANDARD"
+    },
     "configurations": {
         "gateway-log4j": {
             "content": "\n\n      # Licensed to the Apache Software Foundation (ASF) under
one\n      # or more contributor license agreements. See the NOTICE file\n      # distributed
with this work for additional information\n      # regarding copyright ownership. The ASF
licenses this file\n      # to you under the Apache License, Version 2.0 (the\n      # \"License\");
you may not use this file except in compliance\n      # with the License. You may obtain a
copy of the License at\n      #\n      # http://www.apache.org/licenses/LICENSE-2.0\n    
 #\n      # Unless required by applicable law or agreed to in writing, software\n      # distributed
under the License is distributed on an \"AS IS\" BASIS,\n      # WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied.\n      # See the License for the specific language
governing permissions and\n      # limitations under the License.\n\n      app.log.dir=${launcher.dir}/../logs\n
     app.log.file=${launcher.name}.lo
 g\n      app.audit.file=${launcher.name}-audit.log\n\n      log4j.rootLogger=ERROR, drfa\n\n
     log4j.logger.org.apache.hadoop.gateway=INFO\n      #log4j.logger.org.apache.hadoop.gateway=DEBUG\n\n
     #log4j.logger.org.eclipse.jetty=DEBUG\n      #log4j.logger.org.apache.shiro=DEBUG\n 
    #log4j.logger.org.apache.http=DEBUG\n      #log4j.logger.org.apache.http.client=DEBUG\n
     #log4j.logger.org.apache.http.headers=DEBUG\n      #log4j.logger.org.apache.http.wire=DEBUG\n\n
     log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n      log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n
     log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n
     log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n      log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n
     log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n      log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n
     log4j.appender.drfa.layout.ConversionPat
 tern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n      log4j.logger.audit=INFO, auditfile\n
     log4j.appender.auditfile=org.apache.log4j.DailyRollingFileAppender\n      log4j.appender.auditfile.File=${app.log.dir}/${app.audit.file}\n
     log4j.appender.auditfile.Append = true\n      log4j.appender.auditfile.DatePattern =
'.'yyyy-MM-dd\n      log4j.appender.auditfile.layout = org.apache.hadoop.gateway.audit.log4j.layout.AuditLayout"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.3/configs/storm_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/storm_default.json b/ambari-server/src/test/python/stacks/2.3/configs/storm_default.json
index 351025d..b69a311 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/storm_default.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/storm_default.json
@@ -14,7 +14,8 @@
         "command_retry_max_attempt_count": "3", 
         "command_retry_enabled": "false", 
         "command_timeout": "300", 
-        "script_type": "PYTHON"
+        "script_type": "PYTHON",
+        "version": "2.3.0.0-1889"
     }, 
     "roleCommand": "SERVICE_CHECK", 
     "kerberosCommandParams": [], 
@@ -34,8 +35,7 @@
         "host_sys_prepped": "false", 
         "db_name": "ambari", 
         "jdk_name": "jdk-8u60-linux-x64.tar.gz", 
-        "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "current_version": "2.3.0.0-1889", 
+        "ambari_db_rca_driver": "org.postgresql.Driver",
         "ambari_db_rca_username": "mapred", 
         "db_driver_filename": "mysql-connector-java.jar", 
         "agentCacheDir": "/var/lib/ambari-agent/cache", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.3/configs/storm_default_secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/storm_default_secure.json b/ambari-server/src/test/python/stacks/2.3/configs/storm_default_secure.json
index daa9030..8abff9f 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/storm_default_secure.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/storm_default_secure.json
@@ -14,7 +14,8 @@
         "command_retry_max_attempt_count": "3", 
         "command_retry_enabled": "false", 
         "command_timeout": "300", 
-        "script_type": "PYTHON"
+        "script_type": "PYTHON",
+        "version": "2.3.0.0-1889"
     }, 
     "roleCommand": "SERVICE_CHECK", 
     "kerberosCommandParams": [], 
@@ -34,8 +35,7 @@
         "host_sys_prepped": "false", 
         "db_name": "ambari", 
         "jdk_name": "jdk-8u60-linux-x64.tar.gz", 
-        "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "current_version": "2.3.0.0-1889", 
+        "ambari_db_rca_driver": "org.postgresql.Driver",
         "ambari_db_rca_username": "mapred", 
         "db_driver_filename": "mysql-connector-java.jar", 
         "agentCacheDir": "/var/lib/ambari-agent/cache", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
index fb77531..4e431bb 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-default.json
@@ -167,7 +167,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.5.0.0-801",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.5",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
index 7f1e549..288d155 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
@@ -176,7 +176,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.5.0.0-801",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.5",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
index cafbede..7f060eb 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-default.json
@@ -193,7 +193,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.5.0.0-777",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.5",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
index bcadd03..103c86b 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
@@ -203,7 +203,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.5.0.0-801",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.5",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
index e5abe32..e3044cb 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
@@ -175,7 +175,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.6.0.0-801",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.6",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
index 64e7d52..38b5906 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
@@ -184,7 +184,6 @@
         "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
         "host_sys_prepped": "false",
         "ambari_db_rca_username": "mapred",
-        "current_version": "2.6.0.0-801",
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
         "agent_stack_retry_count": "5",
         "stack_version": "2.6",

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
index 8e95319..0341092 100644
--- a/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
+++ b/ambari-server/src/test/python/stacks/utils/RMFTestCase.py
@@ -77,28 +77,17 @@ class RMFTestCase(TestCase):
                     try_install=False,
                     command_args=[],
                     log_out_files=False):
+
     norm_path = os.path.normpath(path)
-    src_dir = RMFTestCase.get_src_folder()
+
     if target == self.TARGET_STACKS:
       stack_version = norm_path.split(os.sep)[0]
-      base_path = os.path.join(src_dir, PATH_TO_STACKS)
-      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
-    elif target == self.TARGET_CUSTOM_ACTIONS:
-      base_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTIONS)
-      configs_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTION_TESTS, "configs")
-    elif target == self.TARGET_COMMON_SERVICES:
-      base_path = os.path.join(src_dir, PATH_TO_COMMON_SERVICES)
-      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
-    else:
-      raise RuntimeError("Wrong target value %s", target)
+
+    base_path, configs_path = self._get_test_paths(target, stack_version)
     script_path = os.path.join(base_path, norm_path)
+
     if config_file is not None and config_dict is None:
-      config_file_path = os.path.join(configs_path, config_file)
-      try:
-        with open(config_file_path, "r") as f:
-          self.config_dict = json.load(f)
-      except IOError:
-        raise RuntimeError("Can not read config file: "+ config_file_path)
+      self.config_dict = self.get_config_file(configs_path, config_file)
     elif config_dict is not None and config_file is None:
       self.config_dict = config_dict
     else:
@@ -165,7 +154,48 @@ class RMFTestCase(TestCase):
                       method(RMFTestCase.env, *command_args)
 
     sys.path.remove(scriptsdir)
-  
+
+  def get_config_file(self, configs_path, config_file):
+    """
+    Loads the specified JSON config file
+    :param configs_path:
+    :param config_file:
+    :return:
+    """
+    config_file_path = os.path.join(configs_path, config_file)
+
+    try:
+      with open(config_file_path, "r") as f:
+        return json.load(f)
+    except IOError:
+      raise RuntimeError("Can not read config file: " + config_file_path)
+
+
+  def _get_test_paths(self, target, stack_version):
+    """
+    Gets the base and configs path variables.
+    :param target:
+    :param stack_version:
+    :return:
+    """
+    src_dir = RMFTestCase.get_src_folder()
+
+    if target == self.TARGET_STACKS:
+      base_path = os.path.join(src_dir, PATH_TO_STACKS)
+      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
+      return base_path, configs_path
+    elif target == self.TARGET_CUSTOM_ACTIONS:
+      base_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTIONS)
+      configs_path = os.path.join(src_dir, PATH_TO_CUSTOM_ACTION_TESTS, "configs")
+      return base_path, configs_path
+    elif target == self.TARGET_COMMON_SERVICES:
+      base_path = os.path.join(src_dir, PATH_TO_COMMON_SERVICES)
+      configs_path = os.path.join(src_dir, PATH_TO_STACK_TESTS, stack_version, "configs")
+      return base_path, configs_path
+    else:
+      raise RuntimeError("Wrong target value %s", target)
+
+
   def getConfig(self):
     return self.config_dict
           

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/hook.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/hook.py
index 8a583b3..8bae9e6 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/hook.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/hook.py
@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_stack_symlinks()
+    setup_stack_symlinks(self.stroutfile)
     setup_config()
 
     link_configs(self.stroutfile)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/params.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/params.py
index 819d8f7..bb04178 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/params.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/params.py
@@ -39,9 +39,6 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
index 9982dc6..7e4d8ae 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -29,7 +29,7 @@ from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.script import Script
 
 
-def setup_stack_symlinks():
+def setup_stack_symlinks(struct_out_file):
   """
   Invokes <stack-selector-tool> set all against a calculated fully-qualified, "normalized"
version based on a
   stack version, such as "2.3". This should always be called after a component has been
@@ -38,15 +38,21 @@ def setup_stack_symlinks():
   :return:
   """
   import params
-  if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted,
'2.2') >= 0:
-    # try using the exact version first, falling back in just the stack if it's not defined
-    # which would only be during an intial cluster installation
-    version = params.current_version if params.current_version is not None else params.stack_version_unformatted
-
-    if not params.upgrade_suspended:
-      # On parallel command execution this should be executed by a single process at a time.
-      with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled,
skip_fcntl_failures = True):
-        stack_select.select_all(version)
+  # get the packages which the stack-select tool should be used on
+  stack_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_packages is None:
+    return
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("There is no advertised version for this component stored in {0}".format(struct_out_file))
+    return
+
+  # On parallel command execution this should be executed by a single process at a time.
+  with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled,
skip_fcntl_failures = True):
+    for package in stack_packages:
+      stack_select.select(package, json_version)
 
 def setup_config():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
index 17bf581..a49bbd9 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_metastore.py
@@ -26,7 +26,6 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations
@@ -234,10 +233,8 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, current_version)):
-        schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
+    if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version_for_stack_feature_checks)):
+      schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {
       'HIVE_CONF_DIR': schematool_hive_server_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_upgrade.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_upgrade.py
index 478c240..9aa3d96 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_upgrade.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_server_upgrade.py
@@ -60,17 +60,8 @@ def post_upgrade_deregister():
     hive_server_conf_dir = "/etc/hive/conf.server"
 
   # deregister
-  hive_execute_path = params.execute_path
-  # If upgrading, the upgrade-target hive binary should be used to call the --deregister
command.
-  # If downgrading, the downgrade-source hive binary should be used to call the --deregister
command.
-  # By now <stack-selector-tool> has been called to set 'current' to target-stack
-  if "downgrade" == params.upgrade_direction:
-    # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
-
+  source_version = params.version_for_stack_feature_checks
+  hive_execute_path = _get_hive_execute_path(source_version)
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 --deregister
' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
 
@@ -109,13 +100,7 @@ def _get_current_hiveserver_version():
   import params
 
   try:
-    # When downgrading the source version should be the version we are downgrading from
-    if "downgrade" == params.upgrade_direction:
-      if not params.downgrade_from_version:
-        raise Fail('The version from which we are downgrading from should be provided in
\'downgrade_from_version\'')
-      source_version = params.downgrade_from_version
-    else:
-      source_version = params.current_version
+    source_version = params.version_for_stack_feature_checks
     hive_execute_path = _get_hive_execute_path(source_version)
     version_hive_bin = params.hive_bin
     formatted_source_version = format_stack_version(source_version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_service.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_service.py
index 22b4061..05b395d 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_service.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hive_service.py
@@ -64,7 +64,7 @@ def hive_service(name, action='start', upgrade_type=None):
     cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err
{pid_file} {hive_server_conf_dir} {hive_log_dir}")
 
 
-    if params.security_enabled and params.current_version and check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV,
params.current_version):
+    if params.security_enabled and check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV,
params.version_for_stack_feature_checks):
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal};
")
       Execute(hive_kinit_cmd, user=params.hive_user)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/params_linux.py
b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/params_linux.py
index 9d79e12..1e4487d 100755
--- a/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/params_linux.py
+++ b/contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/params_linux.py
@@ -36,6 +36,7 @@ from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
@@ -74,13 +75,6 @@ stack_version_formatted = functions.get_stack_version('hive-server2')
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
-# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target
version
-# downgrade_from_version provides the source-version the downgrade is happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
 
@@ -280,7 +274,8 @@ driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
 # normally, the JDBC driver would be referenced by <stack-root>/current/.../foo.jar
 # but in RU if <stack-selector-tool> is called and the restart fails, then this means
that current pointer
 # is now pointing to the upgraded version location; that's bad for the cp command
-source_jdbc_file = format("{stack_root}/{current_version}/hive/lib/{jdbc_jar_name}")
+version_for_source_jdbc_file = upgrade_summary.get_source_version(default_version = version_for_stack_feature_checks)
+source_jdbc_file = format("{stack_root}/{version_for_source_jdbc_file}/hive/lib/{jdbc_jar_name}")
 
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")


Mime
View raw message