ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject ambari git commit: AMBARI-13983. Change Spark TS definition to make it work under secure and non-secure environment (Saisai Shao via smohanty)
Date Mon, 23 Nov 2015 01:59:55 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 15bdb771c -> 00c733570


AMBARI-13983. Change Spark TS definition to make it work under secure and non-secure environment
(Saisai Shao via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/00c73357
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/00c73357
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/00c73357

Branch: refs/heads/branch-2.1
Commit: 00c733570b7fd9ba21d076e35d73fbc6fdb02ae2
Parents: 15bdb77
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Sun Nov 22 17:58:10 2015 -0800
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Sun Nov 22 17:59:29 2015 -0800

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/params.py             |  4 ++++
 .../SPARK/1.2.0.2.2/package/scripts/setup_spark.py        |  1 +
 .../SPARK/1.2.0.2.2/package/scripts/spark_service.py      | 10 ++++++++--
 .../SPARK/1.2.0.2.2/package/scripts/status_params.py      |  5 +++++
 .../common-services/SPARK/1.4.1.2.3/kerberos.json         |  3 +++
 .../SPARK/configuration/spark-hive-site-override.xml      |  4 ++--
 .../python/stacks/2.2/SPARK/test_job_history_server.py    | 10 +++++++---
 .../src/test/python/stacks/2.2/SPARK/test_spark_client.py |  8 ++++++--
 .../python/stacks/2.3/SPARK/test_spark_thrift_server.py   |  6 ++++--
 9 files changed, 40 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index e9a2dc0..35f6e08 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -80,6 +80,7 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 
 spark_user = status_params.spark_user
+hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
@@ -142,6 +143,9 @@ if security_enabled:
       'hive.server2.authentication': config['configurations']['hive-site']['hive.server2.authentication'],
     })
 
+    hive_kerberos_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
+    hive_kerberos_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+
 # thrift server support - available on HDP 2.3 or higher
 spark_thrift_sparkconf = None
 spark_thrift_cmd_opts_properties = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index debed40..945bd63 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -34,6 +34,7 @@ def setup_spark(env, type, action = None):
   Directory([params.spark_pid_dir, params.spark_log_dir],
             owner=params.spark_user,
             group=params.user_group,
+            mode=0775,
             recursive=True
   )
   if type == 'server' and action == 'config':

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index b274c61..d4c6732 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import socket
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
@@ -49,10 +50,15 @@ def spark_service(name, action):
               not_if=historyserver_no_op_test)
 
     elif name == 'sparkthriftserver':
+      if params.security_enabled:
+        hive_principal = params.hive_kerberos_principal.replace('_HOST', socket.getfqdn().lower())
+        hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal};
")
+        Execute(hive_kinit_cmd, user=params.hive_user)
+
       thriftserver_no_op_test = format(
       'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat
{spark_thrift_server_pid_file}` >/dev/null 2>&1')
       Execute(format('{spark_thrift_server_start} --properties-file {spark_thrift_server_conf_file}
{spark_thrift_cmd_opts_properties}'),
-              user=params.spark_user,
+              user=params.hive_user,
               environment={'JAVA_HOME': params.java_home},
               not_if=thriftserver_no_op_test
       )
@@ -68,7 +74,7 @@ def spark_service(name, action):
 
     elif name == 'sparkthriftserver':
       Execute(format('{spark_thrift_server_stop}'),
-              user=params.spark_user,
+              user=params.hive_user,
               environment={'JAVA_HOME': params.java_home}
       )
       File(params.spark_thrift_server_pid_file,

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
index bb0d35f..2272c72 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
@@ -27,6 +27,11 @@ spark_user = config['configurations']['spark-env']['spark_user']
 spark_group = config['configurations']['spark-env']['spark_group']
 user_group = config['configurations']['cluster-env']['user_group']
 
+if 'hive-env' in config['configurations']:
+  hive_user = config['configurations']['hive-env']['hive_user']
+else:
+  hive_user = "hive"
+
 spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
 spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
 spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/kerberos.json
b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/kerberos.json
index 3868a62..90d9090 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/kerberos.json
@@ -52,6 +52,9 @@
           "identities": [
             {
               "name": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "/HIVE/HIVE_SERVER/hive_server_hive"
             }
           ]
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
index 2de64c5..54df516 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
@@ -20,9 +20,9 @@ limitations under the License.
 <configuration supports_final="true">
   <property>
     <name>hive.server2.thrift.port</name>
-    <value>10000</value>
+    <value>10015</value>
     <description>
-      TCP port number to listen on, default 10000.
+      TCP port number to listen on, default 10015.
     </description>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 4fdbd12..67bba91 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -68,7 +68,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'spark',
     )
     self.assertNoMoreResources()
-    
+
   def test_stop_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
                    classname = "JobHistoryServer",
@@ -85,7 +85,7 @@ class TestJobHistoryServer(RMFTestCase):
         action = ['delete'],
     )
     self.assertNoMoreResources()
-    
+
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
                    classname = "JobHistoryServer",
@@ -131,7 +131,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'spark',
     )
     self.assertNoMoreResources()
-    
+
   def test_stop_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/job_history_server.py",
                    classname = "JobHistoryServer",
@@ -154,11 +154,13 @@ class TestJobHistoryServer(RMFTestCase):
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('Directory', '/var/log/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('HdfsResource', '/user/spark',
         security_enabled = False,
@@ -214,11 +216,13 @@ class TestJobHistoryServer(RMFTestCase):
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('Directory', '/var/log/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('HdfsResource', '/user/spark',
         security_enabled = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index 23f94d0..b942571 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -36,7 +36,7 @@ class TestSparkClient(RMFTestCase):
     )
     self.assert_configure_default()
     self.assertNoMoreResources()
-    
+
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/spark_client.py",
                    classname = "SparkClient",
@@ -53,11 +53,13 @@ class TestSparkClient(RMFTestCase):
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('Directory', '/var/log/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',
@@ -81,17 +83,19 @@ class TestSparkClient(RMFTestCase):
         group = 'spark',
     )
 
-      
+
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('Directory', '/var/log/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',

http://git-wip-us.apache.org/repos/asf/ambari/blob/00c73357/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index a8359cf..20054bc 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -56,7 +56,7 @@ class TestSparkThriftServer(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-thriftserver.sh
--properties-file /usr/hdp/current/spark-client/conf/spark-thrift-sparkconf.conf --driver-memory
1g',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid
>/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid`
>/dev/null 2>&1',
-        user = 'spark',
+        user = 'hive',
     )
     self.assertNoMoreResources()
 
@@ -70,7 +70,7 @@ class TestSparkThriftServer(RMFTestCase):
     )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/stop-thriftserver.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
-        user = 'spark',
+        user = 'hive',
     )
     self.assertResourceCalled('File', '/var/run/spark/spark-spark-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid',
         action = ['delete'],
@@ -82,11 +82,13 @@ class TestSparkThriftServer(RMFTestCase):
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('Directory', '/var/log/spark',
         owner = 'spark',
         group = 'hadoop',
         recursive = True,
+        mode = 0775
     )
     self.assertResourceCalled('HdfsResource', '/user/spark',
         security_enabled = False,


Mime
View raw message