ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject [24/50] [abbrv] ambari git commit: AMBARI-19919: spark/livy (1.x) should not be configured in Zeppelin's interpreter if they are not installed (Prabhjyot Singh via r-kamath)
Date Mon, 13 Feb 2017 21:03:58 GMT
AMBARI-19919: spark/livy (1.x) should not be configured in Zeppelin's interpreter if they are
not installed (Prabhjyot Singh via r-kamath)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a9060614
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a9060614
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a9060614

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a9060614ffe9cd6812eb8ff9cbb6fd81572f66e2
Parents: b695bf2
Author: Renjith Kamath <renjith.kamath@gmail.com>
Authored: Mon Feb 13 13:27:06 2017 +0530
Committer: Renjith Kamath <renjith.kamath@gmail.com>
Committed: Mon Feb 13 13:29:00 2017 +0530

----------------------------------------------------------------------
 .../package/scripts/livy2_config_template.py    | 107 +++++++++++++++++++
 .../0.6.0.2.5/package/scripts/master.py         |  42 ++++++--
 .../0.6.0.2.5/package/scripts/params.py         |   8 ++
 3 files changed, 147 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
new file mode 100644
index 0000000..71d3817
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "id": "2C8A4SZ9T_livy2",
+  "status": "READY",
+  "group": "livy",
+  "name": "livy2",
+  "properties": {
+    "zeppelin.livy.keytab": "",
+    "zeppelin.livy.spark.sql.maxResult": "1000",
+    "livy.spark.executor.instances": "",
+    "livy.spark.executor.memory": "",
+    "livy.spark.dynamicAllocation.enabled": "",
+    "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+    "livy.spark.dynamicAllocation.initialExecutors": "",
+    "zeppelin.livy.session.create_timeout": "120",
+    "livy.spark.driver.memory": "",
+    "zeppelin.livy.displayAppInfo": "false",
+    "livy.spark.jars.packages": "",
+    "livy.spark.dynamicAllocation.maxExecutors": "",
+    "zeppelin.livy.concurrentSQL": "false",
+    "zeppelin.livy.principal": "",
+    "livy.spark.executor.cores": "",
+    "zeppelin.livy.url": "http://localhost:8998",
+    "zeppelin.livy.pull_status.interval.millis": "1000",
+    "livy.spark.driver.cores": "",
+    "livy.spark.dynamicAllocation.minExecutors": ""
+  },
+  "interpreterGroup": [
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "scala"
+      },
+      "name": "spark",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "sql"
+      },
+      "name": "sql",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark",
+      "defaultInterpreter": false
+              },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark3",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "r"
+      },
+      "name": "sparkr",
+      "defaultInterpreter": false
+    }
+  ],
+  "dependencies": [],
+  "option": {
+    "setPermission": false,
+    "remote": true,
+    "users": [],
+    "isExistingProcess": false,
+    "perUser": "scoped",
+    "isUserImpersonate": false,
+    "perNote": "shared",
+    "port": -1
+  }
+}
+'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index 9af48e5..8a1fad6 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -289,13 +289,18 @@ class Master(Script):
     config_data = self.get_interpreter_settings()
     interpreter_settings = config_data['interpreterSettings']
 
-    if params.spark2_home:
+    if 'spark2-env' in params.config['configurations']:
       spark2_config = self.get_spark2_interpreter_config()
       config_id = spark2_config["id"]
       interpreter_settings[config_id] = spark2_config
 
-    for interpreter_setting in interpreter_settings:
-      interpreter = interpreter_settings[interpreter_setting]
+    if 'livy2-env' in params.config['configurations']:
+      livy2_config = self.get_livy2_interpreter_config()
+      config_id = livy2_config["id"]
+      interpreter_settings[config_id] = livy2_config
+
+    for setting_key in interpreter_settings.keys():
+      interpreter = interpreter_settings[setting_key]
       if interpreter['group'] == 'jdbc':
         interpreter['dependencies'] = []
         if params.hive_server_host:
@@ -328,23 +333,34 @@ class Master(Script):
                                                     params.zookeeper_znode_parent
             interpreter['dependencies'].append(
                 {"groupArtifactVersion": "org.apache.phoenix:phoenix-core:4.7.0-HBase-1.1",
"local": "false"})
-      elif interpreter['group'] == 'livy' and params.livy_livyserver_host:
-        interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host
+\
-                                                      ":" + params.livy_livyserver_port
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
+        if params.livy_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host
+ \
+                                                           ":" + params.livy_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
+        if params.livy2_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy2_livyserver_host
+ \
+                                                           ":" + params.livy2_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
 
       elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark':
-        if params.spark_home:
+        if 'spark-env' in params.config['configurations']:
           interpreter['properties']['master'] = "yarn-client"
           interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark-client/"
         else:
-          interpreter['properties']['master'] = "local[*]"
+          del interpreter_settings[setting_key]
 
       elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2':
-        if params.spark2_home:
+        if 'spark2-env' in params.config['configurations']:
           interpreter['properties']['master'] = "yarn-client"
           interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark2-client/"
         else:
-          interpreter['properties']['master'] = "local[*]"
+          del interpreter_settings[setting_key]
 
     self.set_interpreter_settings(config_data)
 
@@ -367,5 +383,11 @@ class Master(Script):
 
     return json.loads(spark2_config_template.template)
 
+  def get_livy2_interpreter_config(self):
+    import livy2_config_template
+    import json
+
+    return json.loads(livy2_config_template.template)
+
 if __name__ == "__main__":
   Master().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/a9060614/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
index 97e93fe..16a2782 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
@@ -161,14 +161,22 @@ spark_client_version = get_stack_version('spark-client')
 
 hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 livy_hosts = default("/clusterHostInfo/livy_server_hosts", [])
+livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
 
 livy_livyserver_host = None
 livy_livyserver_port = None
+livy2_livyserver_host = None
+livy2_livyserver_port = None
 if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted)
and \
     len(livy_hosts) > 0:
   livy_livyserver_host = str(livy_hosts[0])
   livy_livyserver_port = config['configurations']['livy-conf']['livy.server.port']
 
+if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, stack_version_formatted)
and \
+    len(livy2_hosts) > 0:
+  livy2_livyserver_host = str(livy2_hosts[0])
+  livy2_livyserver_port = config['configurations']['livy2-conf']['livy.server.port']
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']


Mime
View raw message