ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject ambari git commit: Revert "AMBARI-17814 Spark Livy should wait for ATS start for BP deployments (dsen)"
Date Thu, 21 Jul 2016 23:38:07 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 0be0f7564 -> 05d2d0548


Revert "AMBARI-17814 Spark Livy should wait for ATS start for BP deployments (dsen)"

This reverts commit fbf7ada9a0729abf0ada83457a3b185e3ec8c2cb.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/05d2d054
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/05d2d054
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/05d2d054

Branch: refs/heads/branch-2.4
Commit: 05d2d05486c566b615a1124cbde6141c9503f2d7
Parents: 0be0f75
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Thu Jul 21 16:35:19 2016 -0700
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Thu Jul 21 16:35:19 2016 -0700

----------------------------------------------------------------------
 .../SPARK/1.2.1/package/scripts/livy_server.py  | 59 --------------------
 .../SPARK/1.2.1/package/scripts/params.py       | 13 +----
 .../stacks/2.2/SPARK/test_spark_client.py       |  1 -
 3 files changed, 1 insertion(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/05d2d054/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
index 4762be8..1e859a8 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
@@ -22,13 +22,6 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
-from resource_management.core.exceptions import Fail
-from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
-from resource_management.libraries.providers.hdfs_resource import HdfsResourceProvider
-from resource_management import is_empty
-from resource_management import shell
-from resource_management.libraries.functions.decorator import retry
-from resource_management.core.logger import Logger
 
 from livy_service import livy_service
 from setup_livy import setup_livy
@@ -51,10 +44,6 @@ class LivyServer(Script):
     import params
     env.set_params(params)
 
-    if params.has_ats and params.has_livyserver:
-      Logger.info("Verifying DFS directories where ATS stores time line data for active and
completed applications.")
-      self.wait_for_dfs_directories_created([params.entity_groupfs_store_dir, params.entity_groupfs_active_dir])
-
     self.configure(env)
     livy_service('server', upgrade_type=upgrade_type, action='start')
 
@@ -70,54 +59,6 @@ class LivyServer(Script):
 
     check_process_status(status_params.livy_server_pid_file)
 
-  #  TODO move out and compose with similar method in resourcemanager.py
-  def wait_for_dfs_directories_created(self, dirs):
-    import params
-
-    ignored_dfs_dirs = HdfsResourceProvider.get_ignored_resources_list(params.hdfs_resource_ignore_file)
-
-    if params.security_enabled:
-      Execute("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_kerberos_principal};",
-              user=params.livy_user
-              )
-      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
-              user=params.hdfs_user
-              )
-
-    for dir_path in dirs:
-      self.wait_for_dfs_directory_created(dir_path, ignored_dfs_dirs)
-
-
-  @retry(times=8, sleep_time=20, backoff_factor=1, err_class=Fail)
-  def wait_for_dfs_directory_created(self, dir_path, ignored_dfs_dirs):
-    import params
-
-
-    if not is_empty(dir_path):
-      dir_path = HdfsResourceProvider.parse_path(dir_path)
-
-      if dir_path in ignored_dfs_dirs:
-        Logger.info("Skipping DFS directory '" + dir_path + "' as it's marked to be ignored.")
-        return
-
-      Logger.info("Verifying if DFS directory '" + dir_path + "' exists.")
-
-      dir_exists = None
-
-      if WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
-        # check with webhdfs is much faster than executing hdfs dfs -test
-        util = WebHDFSUtil(params.hdfs_site, params.hdfs_user, params.security_enabled)
-        list_status = util.run_command(dir_path, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'],
assertable_result=False)
-        dir_exists = ('FileStatus' in list_status)
-      else:
-        # have to do time expensive hdfs dfs -d check.
-        dfs_ret_code = shell.call(format("hdfs --config {hadoop_conf_dir} dfs -test -d "
+ dir_path), user=params.livy_user)[0]
-        dir_exists = not dfs_ret_code #dfs -test -d returns 0 in case the dir exists
-
-      if not dir_exists:
-        raise Fail("DFS directory '" + dir_path + "' does not exist !")
-      else:
-        Logger.info("DFS directory '" + dir_path + "' exists.")
 
   def get_component_name(self):
     return "livy-server"

http://git-wip-us.apache.org/repos/asf/ambari/blob/05d2d054/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index bceda0a..8d4331b 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -185,10 +185,6 @@ if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations
 
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 hdfs_site = config['configurations']['hdfs-site']
-hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
-
-ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
-has_ats = len(ats_host) > 0
 
 dfs_type = default("/commandParams/dfs_type", "")
 
@@ -222,13 +218,6 @@ if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY,
stac
 
   livy_livyserver_hosts = default("/clusterHostInfo/livy_server_hosts", [])
 
-  # ats 1.5 properties
-  entity_groupfs_active_dir = config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.active-dir']
-  entity_groupfs_active_dir_mode = 01777
-  entity_groupfs_store_dir = config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.done-dir']
-  entity_groupfs_store_dir_mode = 0700
-  is_webhdfs_enabled = hdfs_site['dfs.webhdfs.enabled']
-
   if len(livy_livyserver_hosts) > 0:
     has_livyserver = True
 
@@ -242,7 +231,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
-  hdfs_resource_ignore_file = hdfs_resource_ignore_file,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/05d2d054/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index c32d66d..ed331ea 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -51,7 +51,6 @@ class TestSparkClient(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
-
   def assert_configure_default(self):
     self.assertResourceCalled('Directory', '/var/run/spark',
         owner = 'spark',


Mime
View raw message