ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject [4/4] ambari git commit: AMBARI-13980. Make sure Hive is selected/deployed when installing Spark Thrift server (Saisai Shao via smohanty)
Date Thu, 26 Nov 2015 09:04:57 GMT
AMBARI-13980. Make sure Hive is selected/deployed when installing Spark Thrift server (Saisai
Shao via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/babb3164
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/babb3164
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/babb3164

Branch: refs/heads/trunk
Commit: babb3164a55ed39b4c47a7f746c91e0c8a526fa5
Parents: c51227f
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Thu Nov 26 01:04:17 2015 -0800
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Thu Nov 26 01:04:45 2015 -0800

----------------------------------------------------------------------
 .../SPARK/1.4.1.2.3/metainfo.xml                |    11 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |    22 +-
 .../2.3/common/services-sparkts-hive.json       | 10043 +++++++++++++++++
 .../stacks/2.3/common/services-sparkts.json     |  5860 ++++++++++
 .../python/stacks/2.3/common/sparkts-host.json  |   220 +
 .../stacks/2.3/common/test_stack_advisor.py     |   101 +-
 6 files changed, 16220 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/babb3164/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
index 0923dda..8dcb39d 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
@@ -52,8 +52,15 @@
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>
-             </auto-deploy>
-           </dependency>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HIVE/HIVE_METASTORE</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
           </dependencies>
           <commandScript>
             <script>scripts/spark_thrift_server.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/babb3164/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index d96aa57..9fb9e24 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -39,7 +39,10 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
   def getComponentLayoutValidations(self, services, hosts):
     parentItems = super(HDP23StackAdvisor, self).getComponentLayoutValidations(services,
hosts)
 
-    if not "HAWQ" in [service["StackServices"]["service_name"] for service in services["services"]]:
+    hiveExists = "HIVE" in [service["StackServices"]["service_name"] for service in services["services"]]
+    sparkExists = "SPARK" in [service["StackServices"]["service_name"] for service in services["services"]]
+
+    if not "HAWQ" in [service["StackServices"]["service_name"] for service in services["services"]]
and not sparkExists:
       return parentItems
 
     childItems = []
@@ -73,7 +76,20 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
                   "If you leave them collocated, make sure to set HAWQ Master Port property
" \
                   "to a value different from the port number used by Ambari Server database."
         childItems.append( { "type": 'host-component', "level": 'WARN', "message": message,
"component-name": 'HAWQSTANDBY', "host": host } )
-    
+
+    if "SPARK_THRIFTSERVER" in [service["StackServices"]["service_name"] for service in services["services"]]:
+      if not "HIVE_SERVER" in [service["StackServices"]["service_name"] for service in services["services"]]:
+        message = "SPARK_THRIFTSERVER requires HIVE services to be selected."
+        childItems.append( {"type": 'host-component', "level": 'ERROR', "message": messge,
"component-name": 'SPARK_THRIFTSERVER'} )
+
+    hmsHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList
if component["StackServiceComponents"]["component_name"] == "HIVE_METASTORE"][0] if hiveExists
else []
+    sparkTsHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList
if component["StackServiceComponents"]["component_name"] == "SPARK_THRIFTSERVER"][0] if sparkExists
else []
+
+    # if Spark Thrift Server is deployed but no Hive Server is deployed
+    if len(sparkTsHosts) > 0 and len(hmsHosts) == 0:
+      message = "SPARK_THRIFTSERVER requires HIVE_METASTORE to be selected/deployed."
+      childItems.append( { "type": 'host-component', "level": 'ERROR', "message": message,
"component-name": 'SPARK_THRIFTSERVER' } )
+
     parentItems.extend(childItems)
     return parentItems
 
@@ -567,7 +583,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
         "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations,
                  "hive-site": self.validateHiveConfigurations},
         "HBASE": {"hbase-site": self.validateHBASEConfigurations},
-        "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}        
+        "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}
       }
       self.mergeValidators(parentValidators, childValidators)
       return parentValidators


Mime
View raw message