ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject [1/2] ambari git commit: AMBARI-10409: [WinTP2] Merge HDPWIN HDFS package scripts into common services (jluniya)
Date Fri, 10 Apr 2015 06:05:56 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 880098c75 -> e989ec0e5


http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/datanode.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/datanode.py
deleted file mode 100644
index 3d8ed5c..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/datanode.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-import service_mapping
-
-class DataNode(Script):
-  def install(self, env):
-
-    if not check_windows_service_exists(service_mapping.datanode_win_service_name):
-      self.install_packages(env)
-
-  def start(self, env):
-    import params
-    self.configure(env)
-    Service(service_mapping.datanode_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-    Service(service_mapping.datanode_win_service_name, action="stop")
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs("datanode")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.datanode_win_service_name)
-
-if __name__ == "__main__":
-  DataNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs.py
deleted file mode 100644
index 92f6616..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-import os
-
-def hdfs(component=None):
-  import params
-  if component == "namenode":
-    directories = params.dfs_name_dir.split(",")
-    Directory(directories,
-              owner=params.hdfs_user,
-              mode="(OI)(CI)F",
-              recursive=True
-    )
-    File(params.exclude_file_path,
-         content=Template("exclude_hosts_list.j2"),
-         owner=params.hdfs_user,
-         mode="f",
-    )
-  if "hadoop-policy" in params.config['configurations']:
-    XmlConfig("hadoop-policy.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['hadoop-policy'],
-              owner=params.hdfs_user,
-              mode="f",
-              configuration_attributes=params.config['configuration_attributes']['hadoop-policy']
-    )
-
-  XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            mode="f",
-            configuration_attributes=params.config['configuration_attributes']['hdfs-site']
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_client.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_client.py
deleted file mode 100644
index 0227c4b..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-
-
-class HdfsClient(Script):
-  def install(self, env):
-    # client checks env var to determine if it is installed
-    if not os.environ.has_key("HADOOP_CONF_DIR"):
-      self.install_packages(env)
-    self.configure(env)
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs()
-
-
-if __name__ == "__main__":
-  HdfsClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_rebalance.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_rebalance.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_rebalance.py
deleted file mode 100644
index aea6fce..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/hdfs_rebalance.py
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import re
-
-class HdfsParser():
-  def __init__(self):
-    self.initialLine = None
-    self.state = None
-
-  def parseLine(self, line):
-    hdfsLine = HdfsLine()
-    type, matcher = hdfsLine.recognizeType(line)
-    if(type == HdfsLine.LineType.HeaderStart):
-      self.state = 'PROCESS_STARTED'
-    elif (type == HdfsLine.LineType.Progress):
-      self.state = 'PROGRESS'
-      hdfsLine.parseProgressLog(line, matcher)
-      if(self.initialLine == None): self.initialLine = hdfsLine
-
-      return hdfsLine
-    elif (type == HdfsLine.LineType.ProgressEnd):
-      self.state = 'PROCESS_FINISED'
-    return None
-
-class HdfsLine():
-
-  class LineType:
-    HeaderStart, Progress, ProgressEnd, Unknown = range(4)
-
-
-  MEMORY_SUFFIX = ['B','KB','MB','GB','TB','PB','EB']
-  MEMORY_PATTERN = '(?P<memmult_%d>(?P<memory_%d>(\d+)(.|,)?(\d+)?) (?P<mult_%d>'+"|".join(MEMORY_SUFFIX)+'))'
-
-  HEADER_BEGIN_PATTERN = re.compile('Time Stamp\w+Iteration#\w+Bytes Already Moved\w+Bytes
Left To Move\w+Bytes Being Moved')
-  PROGRESS_PATTERN = re.compile(
-                            "(?P<date>.*?)\s+" +
-                            "(?P<iteration>\d+)\s+" +
-                            MEMORY_PATTERN % (1,1,1) + "\s+" +
-                            MEMORY_PATTERN % (2,2,2) + "\s+" +
-                            MEMORY_PATTERN % (3,3,3)
-                            )
-  PROGRESS_END_PATTERN = re.compile('(The cluster is balanced. Exiting...|The cluster is
balanced. Exiting...)')
-
-  def __init__(self):
-    self.date = None
-    self.iteration = None
-    self.bytesAlreadyMoved = None
-    self.bytesLeftToMove = None
-    self.bytesBeingMoved = None
-    self.bytesAlreadyMovedStr = None
-    self.bytesLeftToMoveStr = None
-    self.bytesBeingMovedStr = None
-
-  def recognizeType(self, line):
-    for (type, pattern) in (
-                            (HdfsLine.LineType.HeaderStart, self.HEADER_BEGIN_PATTERN),
-                            (HdfsLine.LineType.Progress, self.PROGRESS_PATTERN),
-                            (HdfsLine.LineType.ProgressEnd, self.PROGRESS_END_PATTERN)
-                            ):
-      m = re.match(pattern, line)
-      if m:
-        return type, m
-    return HdfsLine.LineType.Unknown, None
-
-  def parseProgressLog(self, line, m):
-    '''
-    Parse the line of 'hdfs rebalancer' output. The example output being parsed:
-
-    Time Stamp               Iteration#  Bytes Already Moved  Bytes Left To Move  Bytes Being
Moved
-    Jul 28, 2014 5:01:49 PM           0                  0 B             5.74 GB        
   9.79 GB
-    Jul 28, 2014 5:03:00 PM           1                  0 B             5.58 GB        
   9.79 GB
-
-    Throws AmbariException in case of parsing errors
-
-    '''
-    m = re.match(self.PROGRESS_PATTERN, line)
-    if m:
-      self.date = m.group('date')
-      self.iteration = int(m.group('iteration'))
-
-      self.bytesAlreadyMoved = self.parseMemory(m.group('memory_1'), m.group('mult_1'))
-      self.bytesLeftToMove = self.parseMemory(m.group('memory_2'), m.group('mult_2'))
-      self.bytesBeingMoved = self.parseMemory(m.group('memory_3'), m.group('mult_3'))
-
-      self.bytesAlreadyMovedStr = m.group('memmult_1')
-      self.bytesLeftToMoveStr = m.group('memmult_2')
-      self.bytesBeingMovedStr = m.group('memmult_3')
-    else:
-      raise AmbariException("Failed to parse line [%s]")
-
-  def parseMemory(self, memorySize, multiplier_type):
-    try:
-      factor = self.MEMORY_SUFFIX.index(multiplier_type)
-    except ValueError:
-      raise AmbariException("Failed to memory value [%s %s]" % (memorySize, multiplier_type))
-
-    return float(memorySize) * (1024 ** factor)
-  def toJson(self):
-    return {
-            'timeStamp' : self.date,
-            'iteration' : self.iteration,
-
-            'dataMoved': self.bytesAlreadyMovedStr,
-            'dataLeft' : self.bytesLeftToMoveStr,
-            'dataBeingMoved': self.bytesBeingMovedStr,
-
-            'bytesMoved': self.bytesAlreadyMoved,
-            'bytesLeft' : self.bytesLeftToMove,
-            'bytesBeingMoved': self.bytesBeingMoved,
-          }
-  def __str__(self):
-    return "[ date=%s,iteration=%d, bytesAlreadyMoved=%d, bytesLeftToMove=%d, bytesBeingMoved=%d]"%(self.date,
self.iteration, self.bytesAlreadyMoved, self.bytesLeftToMove, self.bytesBeingMoved)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/journalnode.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/journalnode.py
deleted file mode 100644
index 9b56ae7..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/journalnode.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-import service_mapping
-
-class JournalNode(Script):
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.journalnode_win_service_name):
-      self.install_packages(env)
-
-  def start(self, env):
-    import params
-    self.configure(env)
-    Service(service_mapping.journalnode_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    Service(service_mapping.journalnode_win_service_name, action="stop")
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs()
-    pass
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.journalnode_win_service_name)
-
-if __name__ == "__main__":
-  JournalNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/namenode.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/namenode.py
deleted file mode 100644
index 32fc681..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/namenode.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-import service_mapping
-import hdfs_rebalance
-import time
-import json
-import subprocess
-import sys
-import os
-from datetime import datetime
-from ambari_commons.os_windows import *
-
-class NameNode(Script):
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.namenode_win_service_name):
-      self.install_packages(env)
-
-    import params
-    self.configure(env)
-    namenode_format_marker = os.path.join(params.hadoop_conf_dir,"NN_FORMATTED")
-    if not os.path.exists(namenode_format_marker):
-      hadoop_cmd = "cmd /C %s" % (os.path.join(params.hadoop_home, "bin", "hadoop.cmd"))
-      Execute("%s namenode -format" % (hadoop_cmd))
-      open(namenode_format_marker, 'a').close()
-
-  def start(self, env):
-    self.configure(env)
-    Service(service_mapping.namenode_win_service_name, action="start")
-
-  def stop(self, env):
-    Service(service_mapping.namenode_win_service_name, action="stop")
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs("namenode")
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.namenode_win_service_name)
-    pass
-
-  def decommission(self, env):
-    import params
-
-    env.set_params(params)
-    hdfs_user = params.hdfs_user
-    conf_dir = params.hadoop_conf_dir
-
-    File(params.exclude_file_path,
-         content=Template("exclude_hosts_list.j2"),
-         owner=hdfs_user
-    )
-
-    if params.dfs_ha_enabled:
-      # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
-      # need to execute each command scoped to a particular namenode
-      nn_refresh_cmd = format('cmd /c hadoop dfsadmin -fs hdfs://{namenode_rpc} -refreshNodes')
-    else:
-      nn_refresh_cmd = format('cmd /c hadoop dfsadmin -refreshNodes')
-    Execute(nn_refresh_cmd, user=hdfs_user)
-
-
-  def rebalancehdfs(self, env):
-    import params
-    env.set_params(params)
-
-    hdfs_user = params.hdfs_user
-
-    name_node_parameters = json.loads( params.name_node_params )
-    threshold = name_node_parameters['threshold']
-    _print("Starting balancer with threshold = %s\n" % threshold)
-
-    def calculateCompletePercent(first, current):
-      return 1.0 - current.bytesLeftToMove/first.bytesLeftToMove
-
-    def startRebalancingProcess(threshold):
-      rebalanceCommand = 'hdfs balancer -threshold %s' % threshold
-      return ['cmd', '/C', rebalanceCommand]
-
-    command = startRebalancingProcess(threshold)
-    basedir = os.path.join(env.config.basedir, 'scripts')
-
-    _print("Executing command %s\n" % command)
-
-    parser = hdfs_rebalance.HdfsParser()
-    returncode, stdout, err = run_os_command_impersonated(' '.join(command), hdfs_user, Script.get_password(hdfs_user))
-
-    for line in stdout.split('\n'):
-      _print('[balancer] %s %s' % (str(datetime.now()), line ))
-      pl = parser.parseLine(line)
-      if pl:
-        res = pl.toJson()
-        res['completePercent'] = calculateCompletePercent(parser.initialLine, pl)
-
-        self.put_structured_out(res)
-      elif parser.state == 'PROCESS_FINISED' :
-        _print('[balancer] %s %s' % (str(datetime.now()), 'Process is finished' ))
-        self.put_structured_out({'completePercent' : 1})
-        break
-
-    if returncode != None and returncode != 0:
-      raise Fail('Hdfs rebalance process exited with error. See the log output')
-
-def _print(line):
-  sys.stdout.write(line)
-  sys.stdout.flush()
-
-if __name__ == "__main__":
-  NameNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/params.py
deleted file mode 100644
index 6246ef0..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/params.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import os
-
-config = Script.get_config()
-hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
-hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
-hadoop_home = os.environ["HADOOP_HOME"]
-#directories & files
-dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
-#decomission
-hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
-exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
-# HDFS High Availability properties
-dfs_ha_enabled = False
-dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
-dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"),
None)
-
-namenode_id = None
-namenode_rpc = None
-hostname = config["hostname"]
-if dfs_ha_namenode_ids:
-  dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
-  dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
-  if dfs_ha_namenode_ids_array_len > 1:
-    dfs_ha_enabled = True
-if dfs_ha_enabled:
-  for nn_id in dfs_ha_namemodes_ids_list:
-    nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
-    if hostname in nn_host:
-      namenode_id = nn_id
-      namenode_rpc = nn_host
-
-hdfs_user = "hadoop"
-
-grep_exe = "findstr"
-
-name_node_params = default("/commandParams/namenode", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_check.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_check.py
deleted file mode 100644
index e5cbaab..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_check.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.libraries import functions
-
-class HdfsServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    unique = functions.get_unique_id_and_date()
-
-    #Hadoop uses POSIX-style paths, separator is always /
-    dir = '/tmp'
-    tmp_file = dir + '/' + unique
-
-    #commands for execution
-    hadoop_cmd = "cmd /C %s" % (os.path.join(params.hadoop_home, "bin", "hadoop.cmd"))
-    create_dir_cmd = "%s fs -mkdir %s" % (hadoop_cmd, dir)
-    own_dir = "%s fs -chmod 777 %s" % (hadoop_cmd, dir)
-    test_dir_exists = "%s fs -test -e %s" % (hadoop_cmd, dir)
-    cleanup_cmd = "%s fs -rm %s" % (hadoop_cmd, tmp_file)
-    create_file_cmd = "%s fs -put %s %s" % (hadoop_cmd, os.path.join(params.hadoop_conf_dir,
"core-site.xml"), tmp_file)
-    test_cmd = "%s fs -test -e %s" % (hadoop_cmd, tmp_file)
-
-    hdfs_cmd = "cmd /C %s" % (os.path.join(params.hadoop_home, "bin", "hdfs.cmd"))
-    safemode_command = "%s dfsadmin -safemode get | %s OFF" % (hdfs_cmd, params.grep_exe)
-
-    Execute(safemode_command, logoutput=True, try_sleep=3, tries=20)
-    Execute(create_dir_cmd, user=params.hdfs_user,logoutput=True, ignore_failures=True)
-    Execute(own_dir, user=params.hdfs_user,logoutput=True)
-    Execute(test_dir_exists, user=params.hdfs_user,logoutput=True)
-    Execute(create_file_cmd, user=params.hdfs_user,logoutput=True)
-    Execute(test_cmd, user=params.hdfs_user,logoutput=True)
-    Execute(cleanup_cmd, user=params.hdfs_user,logoutput=True)
-
-if __name__ == "__main__":
-  HdfsServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_mapping.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_mapping.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_mapping.py
deleted file mode 100644
index d76ce07..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/service_mapping.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-namenode_win_service_name = "namenode"
-datanode_win_service_name = "datanode"
-snamenode_win_service_name = "secondarynamenode"
-journalnode_win_service_name = "journalnode"
-zkfc_win_service_name = "zkfc"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/snamenode.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/snamenode.py
deleted file mode 100644
index a3f880a..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/snamenode.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-import service_mapping
-
-class SNameNode(Script):
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.snamenode_win_service_name):
-      self.install_packages(env)
-
-  def start(self, env):
-    import params
-    self.configure(env)
-    Service(service_mapping.snamenode_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    Service(service_mapping.snamenode_win_service_name, action="stop")
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs("secondarynamenode")
-
-  def status(self, env):
-    import params
-    check_windows_service_status(service_mapping.snamenode_win_service_name)
-
-if __name__ == "__main__":
-  SNameNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/zkfc_slave.py
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/zkfc_slave.py
deleted file mode 100644
index 5fadce0..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/scripts/zkfc_slave.py
+++ /dev/null
@@ -1,51 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs import hdfs
-import service_mapping
-
-class ZkfcSlave(Script):
-  def install(self, env):
-    if not check_windows_service_exists(service_mapping.zkfc_win_service_name):
-      import params
-      env.set_params(params)
-      self.install_packages(env)
-
-  def start(self, env):
-    import params
-    self.configure(env)
-    Service(service_mapping.zkfc_win_service_name, action="start")
-
-  def stop(self, env):
-    import params
-    Service(service_mapping.zkfc_win_service_name, action="stop")
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hdfs()
-    pass
-
-  def status(self, env):
-    check_windows_service_status(service_mapping.zkfc_win_service_name)
-
-
-if __name__ == "__main__":
-  ZkfcSlave().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/templates/exclude_hosts_list.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/templates/exclude_hosts_list.j2
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/templates/exclude_hosts_list.j2
deleted file mode 100644
index a92cdc1..0000000
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/package/templates/exclude_hosts_list.j2
+++ /dev/null
@@ -1,21 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in hdfs_exclude_file %}
-{{host}}
-{% endfor %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e989ec0e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
index e2a5597..5da0f04 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
@@ -25,6 +25,7 @@ from resource_management import *
 from stacks.utils.RMFTestCase import *
 
 
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch.object(tarfile,"open", new = MagicMock())
 @patch.object(tempfile,"mkdtemp", new = MagicMock(return_value='/tmp/123'))
 @patch.object(contextlib,"closing", new = MagicMock())


Mime
View raw message