hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ni...@apache.org
Subject svn commit: r608950 [4/7] - in /lucene/hadoop/trunk/src: contrib/hod/ contrib/hod/bin/ contrib/hod/conf/ contrib/hod/hodlib/ contrib/hod/hodlib/AllocationManagers/ contrib/hod/hodlib/Common/ contrib/hod/hodlib/GridServices/ contrib/hod/hodlib/Hod/ cont...
Date Fri, 04 Jan 2008 18:20:22 GMT
Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,1206 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+# $Id:types.py 6172 2007-05-22 20:26:54Z zim $
+#
+# Christopher Zimmerman - zim@yahoo-inc.com - 04/07/2007
+#------------------------------------------------------------------------------
+
+""" Higher level data types and type related classes.
+
+    Supported Types (Verification and Display):
+
+      address        - validates ip:port and host:port tcp addresses
+      ip_address     - validates and IP address
+      net_address    - validates an IP like address, ie netmask
+      hostname       - validates a hostname with DNS
+      eaddress       - validates a single email address or a comma
+                       seperated list of email addresses
+      http_version   - validates a value is a http version (1.0/1.1)
+      tcp_port       - validates a value to be a valid tcp port (2-65535)
+      bool           - validates value is (0, 1, true, false) / converts
+                       true -> 1 and false -> 0
+      directory      - validates a values is a directory / resolves path to
+                       absolute path
+      file           - validates a value is a file / resolves path to absolute
+                       path
+      float          - validates a value is a float, converts string to float
+      pos_float      - validates a value is a float and >= 0, converts string
+                       to float
+      pos_num        - same as pos_float
+      neg_float      - validates a value is a float and < 0, converts string to
+                       float
+      int            - validates a value is an integer, converts string to
+                       integer
+      pos_int        - validates a value is an integer and >= 0, converts
+                       string to integer
+      neg_int        - validates a values is an integer and < 0, converts
+                       striing to integer
+      freq           - frequency, positive integer
+      size           - validates a size in bytes, kb, mb, kb, and tb
+                       (int > 0 post fixed with K, M, G, or T) also converts
+                       value to integer bytes
+      range          - numeric range, x-y normalized to a tuple, if a single
+                       number is supplie a single element tuple is returned
+      timestamp      - utc timestamp of the form YYYYMMDDHHMMSS
+      user_account   - UNIX user account name
+      user_group     - UNIX group name
+      string         - arbitrarily long string
+      list           - comma seperated list of strings of arbitrary length,
+      keyval         - comma seperated list of key=value pairs, key does not 
+                       need to be unique."""
+
+import sys, os, socket, pwd, grp, stat, re, re, string, pprint
+
+from tcp import tcpSocket, check_net_address, check_ip_address
+from util import check_timestamp
+
+types = { 'directory'      : { 'db'    : 'string',
+                               'units' : None     },
+
+          'address'        : { 'db'    : 'string',
+                               'units' : None     },
+
+          'ip_address'     : { 'db'    : 'string',
+                               'units' : None     },
+
+          'net_address'    : { 'db'    : 'string',
+                               'units' : None     },
+
+          'bool'           : { 'db'    : 'bool',
+                               'units' : None     },
+
+          'int'            : { 'db'    : 'integer',
+                               'units' : None     },
+
+          'float'          : { 'db'    : 'float',
+                               'units' : None     },
+
+          'pos_int'        : { 'db'    : 'integer',
+                               'units' : None     },
+
+          'neg_int'        : { 'db'    : 'integer',
+                               'units' : None     },
+
+          'pos_num'        : { 'db'    : 'float',
+                               'units' : None     },
+
+          'pos_float'      : { 'db'    : 'float',
+                               'units' : None     },
+
+          'neg_float'      : { 'db'    : 'float',
+                               'units' : None     },
+
+          'string'         : { 'db'    : 'string',
+                               'units' : None     },
+
+          'list'           : { 'db'    : 'string',
+                               'units' : None     },
+
+          'file'           : { 'db'    : 'string',
+                               'units' : None     },
+
+          'size'           : { 'db'    : 'integer',
+                               'units' : 'bytes'  },
+
+          'freq'           : { 'db'    : 'integer',
+                               'units' : 'hz'     },
+
+          'eaddress'       : { 'db'    : 'string',
+                               'units' : None     },
+
+          'tcp_port'       : { 'db'    : 'integer',
+                               'units' : None     },
+
+          'http_version'   : { 'db'    : 'float',
+                               'units' : None     },
+
+          'range'          : { 'db'    : 'string',
+                               'units' : None     },
+
+          'hostname'       : { 'db'    : 'string',
+                               'units' : None     },
+
+          'user_account'   : { 'db'    : 'string',
+                               'units' : None     },
+
+          'user_group'     : { 'db'    : 'string',
+                               'units' : None     },
+
+          'timestamp'      : { 'db'    : 'timestamp',
+                               'units' : None     },
+
+          'keyval'         : { 'db'    : 'string',
+                               'units' : None     },
+
+          ''               : { 'db'    : 'string',
+                               'units' : None     }}
+
+dbTypes = { 'string'  :   { 'type'  : 'varchar',
+                            'store' : 'type_strings_0',
+                            'table' : True              },
+
+            'integer' :   { 'type'  : 'bigint',
+                            'store' : 'integers',
+                            'table' : False             },
+
+            'float' :     { 'type'  : 'real',
+                            'store' : 'floats',
+                            'table' : False             },
+
+            'bool' :      { 'type'  : 'boolean',
+                            'store' : 'bools',
+                            'table' : False             },
+
+            'timestamp' : { 'type'  : 'timestamp(0)',
+                            'store' : 'timestamps',
+                            'table' : False             }}
+
+reSizeFormat = re.compile("^(\d+)(k|m|g|t|p|kb|mb|gb|tb|pb)$", flags=2)
+reDash = re.compile("\s*-\s*")
+
+sizeFactors = { 'b'     : 1,
+                'bytes' : 1,
+                'k'     : 1024,
+                'kb'    : 1024,
+                'm'     : 1048576,
+                'mb'    : 1048576,
+                'g'     : 1073741824,
+                'gb'    : 1073741824,
+                't'     : 1099511627776,
+                'tb'    : 1099511627776,
+                'p'     : 1125899906842624,
+                'pb'    : 1125899906842624 }
+
+freqFactors = { 'hz'  : 1,
+                'khz' : 1000,
+                'mhz' : 1000000,
+                'ghz' : 1000000000,
+                'thz' : 1000000000000,
+                'phz' : 1000000000000000 }
+
+sizeMap = [ { 'factor' : sizeFactors['b'],
+              'long'   : 'byte',
+              'short'  : 'byte'           },
+
+            { 'factor' : sizeFactors['k'],
+              'long'   : 'Kilobyte',
+              'short'  : 'KB'             },
+
+            { 'factor' : sizeFactors['m'],
+              'long'   : 'Megabyte',
+              'short'  : 'MB'             },
+
+            { 'factor' : sizeFactors['g'],
+              'long'   : 'Gigabyte',
+              'short'  : 'GB'             },
+
+            { 'factor' : sizeFactors['t'],
+              'long'   : 'Terabyte',
+              'short'  : 'TB'             },
+
+            { 'factor' : sizeFactors['p'],
+              'long'   : 'Petabyte',
+              'short'  : 'PB'             } ]
+
+freqMap = [ { 'factor' : freqFactors['hz'],
+              'long'   : 'Hertz',
+              'short'  : 'Hz'               },
+
+            { 'factor' : freqFactors['khz'],
+              'long'   : 'Kilohertz',
+              'short'  : 'KHz'              },
+
+            { 'factor' : freqFactors['mhz'],
+              'long'   : 'Megahertz',
+              'short'  : 'MHz'              },
+
+            { 'factor' : freqFactors['ghz'],
+              'long'   : 'Gigahertz',
+              'short'  : 'GHz'              },
+
+            { 'factor' : freqFactors['thz'],
+              'long'   : 'Terahertz',
+              'short'  : 'THz'              },
+
+            { 'factor' : freqFactors['phz'],
+              'long'   : 'Petahertz',
+              'short'  : 'PHz'              } ]
+
+reListString = r"(?<!\\),"
+reList = re.compile(reListString)
+
+reKeyVal = r"(?<!\\)="
+reKeyVal = re.compile(reKeyVal)
+
+class typeToString:
+    """Provides method for converting normalized types to strings."""
+    def __init__(self):
+        self.toStringFunctions = {}
+        self.__build_to_string_functions()
+ 
+    def __call__(self, type, value):
+        return self.toStringFunctions[type](value) 
+ 
+    def __build_to_string_functions(self):
+        functions = {}
+        for function in dir(self):
+            functions[function] = 1
+
+        for type in types.keys():
+            # kinda bad, need to find out how to know the name of the class
+            #  I'm in.  But it works.
+            functionName = "_typeToString__tostring_%s" % type
+            if functions.has_key(functionName):
+                self.toStringFunctions[type] = getattr(self, functionName)
+            else:
+                if type == '':
+                    self.toStringFunctions[type] = self.__tostring_nothing
+                else:
+                    error = "To string function %s for type %s does not exist." \
+                        % (functionName, type)
+                    raise Exception(error)
+                    sys.exit(1)        
+
+    def __tostring(self, value):
+        return str(value)
+
+    def __tostring_directory(self, value):
+        return self.__tostring(value)
+
+    def __tostring_address(self, value):
+        return "%s:%s" % (value[0], value[1])
+
+    def __tostring_ip_address(self, value):
+        return self.__tostring(value)
+
+    def __tostring_net_address(self, value):
+        return self.__tostring(value)
+
+    def __tostring_bool(self, value):
+        if value == False:
+            return 'false'
+        elif value == True:
+            return 'true'
+        else:
+            return str(value)
+
+    def __tostring_int(self, value):
+        return self.__tostring(value)
+
+    def __tostring_float(self, value):
+        return self.__tostring(value)
+
+    def __tostring_pos_int(self, value):
+        return self.__tostring(value)
+
+    def __tostring_neg_int(self, value):
+        return self.__tostring(value)     
+
+    def __tostring_freq(self, value):
+        return self.__tostring(value)
+
+    def __tostring_pos_float(self, value):
+        return self.__tostring(value)
+
+    def __tostring_pos_num(self, value):
+        return self.__tostring(value)
+
+    def __tostring_neg_float(self, value):
+        return self.__tostring(value)
+
+    def __tostring_string(self, value):
+        return value
+
+    def __tostring_keyval(self, value):
+        string = ''
+        for key in value:
+            for item in value[key]:
+                string = "%s%s=%s," % (string, key, item)
+                
+        return string[:-1]  
+
+    def __tostring_list(self, value):
+        string = ''
+        for item in value:
+            string = "%s%s," % (string, item)
+            
+        return string[:-1]
+
+    def __tostring_file(self, value):
+        return self.__tostring(value)
+      
+    def __tostring_size(self, value):
+        return self.__tostring(value)
+        
+    def __tostring_eaddress(self, value):
+        return self.__tostring(value)
+
+    def __tostring_tcp_port(self, value):
+        return self.__tostring(value)
+
+    def __tostring_http_version(self, value):
+        return self.__tostring(value)
+
+    def __tostring_range(self, value):
+        if len(value) < 2:
+          return value[0]
+        else:
+          return "%s-%s" % (value[0], value[1])
+
+    def __tostring_timestamp(self, value):
+        return self.__tostring(value)
+
+    def __tostring_hostname(self, value):
+        return self.__tostring(value)
+
+    def __tostring_user_account(self, value):
+        return self.__tostring(value)
+
+    def __tostring_user_group(self, value):
+        return self.__tostring(value)
+
+    def __tostring_nothing(self, value):
+        return value
+
+class typeValidator:
+    """Type validation class used to normalize values or validated 
+       single/large sets of values by type."""
+
+    def __init__(self):
+        self.verifyFunctions = {}
+        self.__build_verify_functions()
+
+        self.validateList = []
+        self.validatedInfo = []
+
+    def __getattr__(self, attrname):
+        """validateList  = [ { 'func' : <bound method configValidator>,
+                               'name' : 'SA_COMMON.old_xml_dir',
+                               'value': 'var/data/old'                 },
+
+                             { 'func' : <bound method configValidator>,
+                               'name' : 'SA_COMMON.log_level',
+                               'value': '4'                            } ]
+
+           validatedInfo = [ { # name supplied to add()
+                               'name'       : 'SA_COMMON.tmp_xml_dir',
+
+                               # is valid or not
+                               'isValid'    : 1
+
+                               # normalized value
+                               'normalized' : /var/data/tmp,
+
+                               # error string ?
+                               'errorData'  : 0                        },
+
+                             { 'name'       : 'SA_COMMON.new_xml_dir',
+                               'isValid'    : 1
+                               'normalized' : /var/data/new,
+                               'errorData'  : 0                        } ]"""
+
+        if attrname   == "validateList":
+            return self.validateList   # list of items to be validated
+        elif attrname == "validatedInfo":
+            return self.validatedInfo  # list of validation results
+        else: raise AttributeError, attrname
+
+    def __build_verify_functions(self):
+        functions = {}
+        for function in dir(self):
+            functions[function] = 1
+
+        for type in types.keys():
+            # kinda bad, need to find out how to know the name of the class
+            #  I'm in.  But it works.
+            functionName = "_typeValidator__verify_%s" % type
+            if functions.has_key(functionName):
+                self.verifyFunctions[type] = getattr(self, functionName)
+            else:
+                if type == '':
+                    self.verifyFunctions[type] = self.__verify_nothing
+                else:
+                    error = "Verify function %s for type %s does not exist." \
+                        % (functionName, type)
+                    raise Exception(error)
+                    sys.exit(1)
+
+    def __get_value_info(self):
+        valueInfo = { 'isValid' : 0, 'normalized' : 0, 'errorData' : 0 }
+
+        return valueInfo
+
+    def __set_value_info(self, valueInfo, **valueData):
+        try:
+            valueInfo['normalized'] = valueData['normalized']
+            valueInfo['isValid'] = 1
+        except KeyError:
+            valueInfo['isValid'] = 0
+            try:
+                valueInfo['errorData'] = valueData['errorData']
+            except:
+                pass
+
+    # start of 'private' verification methods, each one should correspond to a
+    #   type string (see self.verify_config())
+    def __verify_directory(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if os.path.isdir(value):
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_directory(self, value):
+        return os.path.realpath(value)
+
+    def __verify_address(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        socket = tcpSocket(value)
+        if socket.verify():
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_address(self, value):
+        return value.split(':')
+
+    def __verify_ip_address(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if check_ip_address(value):
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+
+    def __verify_net_address(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if check_net_address(value):
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+
+    def __verify_bool(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        value = str(value)
+        if re.match("^false|0|f|no$", value, 2):
+            self.__set_value_info(valueInfo, normalized=False)
+        elif re.match("^true|1|t|yes$", value, 2):
+            self.__set_value_info(valueInfo, normalized=True)
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_bool(self, value):
+        value = str(value)
+        norm = ""
+        if re.match("^false|0|f|no$", value, 2):
+            norm = False
+        elif re.match("^true|1|t|yes$", value, 2):
+            norm = True
+        else:
+            raise Exception("invalid bool specified: %s" % value)
+            
+        return norm
+
+    def __verify_int(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        except:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_int(self, value):
+        return int(value)
+
+    def __verify_float(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        except:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_float(self, value):
+        return float(value)
+
+    def __verify_pos_int(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            value = self.normalize(type, value)
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            self.__set_value_info(valueInfo, normalized=value)
+
+        return valueInfo
+      
+    def __norm_pos_int(self, value):
+        value = int(value)
+        if value < 0:
+            raise Exception("value is not positive: %s" % value)
+        
+        return value
+
+    def __verify_neg_int(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            value = self.normalize(type, value)
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            self.__set_value_info(valueInfo, normalized=value)
+
+        return valueInfo
+      
+    def __norm_neg_int(self, type, value):
+        value = int(value)
+        if value > 0:
+            raise Exception("value is not negative: %s" % value)
+        
+        return value        
+
+    def __verify_freq(self, type, value):
+        return self.__verify_pos_int(type, value)
+
+    def __norm_freq(self, value):
+        return self.__norm_pos_int(value)
+
+    def __verify_pos_float(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            value = self.normalize(type, value)
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            self.__set_value_info(valueInfo, normalized=value)
+
+        return valueInfo
+
+    def __norm_pos_float(self, value):
+        value = float(value)
+        if value < 0:
+            raise Exception("value is not positive: %s" % value)
+        
+        return value
+
+    def __verify_pos_num(self, type, value):
+        return self.__verify_pos_float(value)
+      
+    def __norm_pos_num(self, value):
+        return self.__norm_pos_float(value)
+
+    def __verify_neg_float(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            value = self.normalize(type, value)
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            self.__set_value_info(valueInfo, normalized=value)
+
+        return valueInfo
+
+    def __norm_neg_float(self, value):
+        value = float(value)
+        if value >= 0:
+            raise Exception("value is not negative: %s" % value)
+        
+        return value
+
+    def __verify_string(self, type, value):
+        valueInfo = self.__get_value_info()
+        self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                   value))
+        
+        return valueInfo
+      
+    def __norm_string(self, value):
+        return str(value)
+
+    def __verify_keyval(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if reKeyVal.search(value):
+          try:
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                value))
+          except:
+            self.__set_value_info(valueInfo, errorData = \
+              "invalid list of key-value pairs : [ %s ]" % value)
+        else:
+            msg = "No key value pairs found?"
+            self.__set_value_info(valueInfo, errorData=msg)
+
+        return valueInfo
+      
+    def __norm_keyval(self, value):
+        list = self.__norm_list(value)
+        keyValue = {}
+        for item in list:
+            # we only consider the first '=' for splitting
+            # we do this to support passing params like 
+            # mapred.child.java.opts=-Djava.library.path=some_dir
+            (key, value) = reKeyVal.split(item,1)
+            if not keyValue.has_key(key):
+                keyValue[key] = []
+            keyValue[key].append(value)
+        return keyValue     
+
+    def __verify_list(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        self.__set_value_info(valueInfo, normalized=self.normalize(type,value))
+
+        return valueInfo
+      
+    def __norm_list(self, value):
+        norm = []
+        if reList.search(value):
+            norm = reList.split(value)
+        else:
+            norm = [value,]
+            
+        return norm
+
+    def __verify_file(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if os.path.isfile(value):
+            self.__set_value_info(valueInfo, normalized=self.normalize(type,
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_file(self, value):
+        return os.path.realpath(value)
+
+    def __verify_size(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        value = str(value)
+        if reSizeFormat.match(value):
+            numberPart = int(reSizeFormat.sub("\g<1>", value))
+            factorPart = reSizeFormat.sub("\g<2>", value)
+            try:
+                normalized = normalize_size(numberPart, factorPart)
+                self.__set_value_info(valueInfo,
+                    normalized=normalized)
+            except:
+                self.__set_value_info(valueInfo)
+        else:
+            try:
+                value = int(value)
+            except:
+                self.__set_value_info(valueInfo)
+            else:
+                if value >= 0:
+                    self.__set_value_info(valueInfo, normalized=value)
+                else:
+                    self.__set_value_info(valueInfo)
+
+        return valueInfo
+
+    def __norm_size(self, file):
+        norm = None
+        if reSizeFormat.match(value):
+            numberPart = int(reSizeFormat.sub("\g<1>", value))
+            factorPart = reSizeFormat.sub("\g<2>", value)
+            norm = normalize_size(numberPart, factorPart)            
+        else:
+            norm = int(value)
+            
+        return norm
+        
+        
+    def __verify_eaddress(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        emailList = reComma.split(value)
+
+        for emailAddress in emailList:
+            if reEmailAddress.match(emailAddress):
+                emailParts = reEmailDelimit.split(emailAddress)
+                try:
+                    socket.gethostbyname(emailParts[1])
+                    self.__set_value_info(valueInfo, normalized=self.normalize(
+                                          type, value))
+                except:
+                    errorString = "%s is invalid (domain lookup failed)" % \
+                        emailAddress
+                    self.__set_value_info(valueInfo, errorData=errorString)
+            else:
+                errorString = "%s is invalid" % emailAddress
+                self.__set_value_info(valueInfo, errorData=errorString)
+
+        return valueInfo
+
+    def __verify_tcp_port(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            value = self.__norm_tcp_port(value)
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            if value in range(2, 65536):
+                self.__set_value_info(valueInfo, normalized=value)
+            else:
+                self.__set_value_info(valueInfo)
+
+        return valueInfo
+      
+    def __norm_tcp_port(self, value):
+        return int(value)
+
+    def __verify_http_version(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if value in ('1.0', '1.1'):
+            self.__set_value_info(valueInfo, normalized=float(value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+
+    def __verify_range(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        range = reDash.split(value)
+
+        try:
+            if len(range) > 1:
+                start = int(range[0])
+                end = int(range[1])
+            else:
+                start = int(range[0])
+                end = None
+        except:
+            self.__set_value_info(valueInfo)
+        else:
+            if end:
+                if end - start != 0:
+                    self.__set_value_info(valueInfo, normalized=(start, end))
+                else:
+                    self.__set_value_info(valueInfo)
+            else:
+                self.__set_value_info(valueInfo, normalized=(start,))
+
+        return valueInfo
+      
+    def __norm_range(self, value):
+        range = reDash.split(value)
+        if len(range) > 1:
+            start = int(range[0])
+            end = int(range[1])
+        else:
+            start = int(range[0])
+            end = None   
+            
+        return (start, end)     
+
+    def __verify_timestamp(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        if check_timestamp(value):
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        else:
+            self.__set_value_info(valueInfo)
+
+        return valueInfo
+
+    def __verify_hostname(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            socket.gethostbyname(value)
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+        except:
+            errorString = "%s is invalid (domain lookup failed)" % value
+            self.__set_value_info(valueInfo, errorData=errorString)
+
+        return valueInfo
+
+    def __verify_user_account(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            pwd.getpwnam(value)
+        except:
+            errorString = "'%s' user account does not exist" % value
+            self.__set_value_info(valueInfo, errorData=errorString)
+        else:
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+
+        return valueInfo
+
+    def __verify_user_group(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        try:
+            grp.getgrnam(value)
+        except:
+            errorString = "'%s' group does not exist" % value
+            self.__set_value_info(valueInfo, errorData=errorString)
+        else:
+            self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                       value))
+
+        return valueInfo
+
+    def __verify_nothing(self, type, value):
+        valueInfo = self.__get_value_info()
+
+        self.__set_value_info(valueInfo, normalized=self.normalize(type, 
+                                                                   value))
+
+        return valueInfo
+
+    #--------------------------------------------------------------------------
+
+    def normalize(self, type, value):
+        try:
+          normFunc = getattr(self, "_typeValidator__norm_%s" % type)
+          return normFunc(value)
+        except AttributeError, A:
+          # this exception should occur only when we don't have corresponding normalize function
+          return value
+
+    def verify(self, type, value, allowNone=False):
+        """Verifies a value based on its type.
+
+           type      - supported configValidator type
+           value     - data to be validated
+           allowNone - don't freak out if None or '' is supplied
+
+           returns a valueInfo dictionary:
+
+            valueInfo = { 'isValid' : 1, 'normalized' : 5, 'errorData' : 0 }
+
+           where:
+
+            isValid    - true or false (0/1)
+            normalized - the normalized value
+            errorData  - if invalid an error string
+
+           supported types:
+
+            see top level"""
+
+        result = None
+        if allowNone:
+            if value == '' or value == None:
+                result = self.__verify_nothing(None, None)
+                result['normalized'] = None
+            else:
+                result = self.verifyFunctions[type](type, value)
+        else:
+            result = self.verifyFunctions[type](type, value)
+
+        return result
+
+    def is_valid_type(self, type):
+        """Returns true if type is valid."""
+
+        return types.has_key(type)
+
+    def type_info(self, type):
+        """Returns type info dictionary."""
+
+        dbInfo = dbTypes[types[type]['db']]
+        typeInfo = types[type].copy()
+        typeInfo['db'] = dbInfo
+
+        return typeInfo
+
+    def add(self, name, type, value):
+        """Adds a value and type by name to the configValidate object to be
+           verified using validate().
+
+           name  - name used to key values and access the results of the
+                   validation
+           type  - configValidator type
+           value - data to be verified"""
+
+        self.validateList.append({ 'name' : name,
+                                   'type' : type,
+                                   'value': value })
+
+    def validate(self, allowNone=False):
+        """Validates configValidate object populating validatedInfo with
+           valueInfo dictionaries for each value added to the object."""
+
+        for valItem in self.validateList:
+            valueInfo = self.verify(valItem['type'], valItem['value'],
+                allowNone)
+            if valueInfo:
+                valueInfo['name'] = valItem['name']
+                self.validatedInfo.append(valueInfo)
+            else:
+                raise Exception("\nMissing a return value: valueInfo\n%s" % \
+                    self.verifyFunctions[valItem['type']](valItem['value']))
+
+class display:
+    def __init__(self):
+        self.displayFunctions = {}
+        self.__build_dispaly_functions()
+
+    def __build_dispaly_functions(self):
+        functions = {}
+        for function in dir(self):
+            functions[function] = 1
+
+        for type in types.keys():
+            # kinda bad, need to find out how to know the name of the class
+            #  I'm in.  But it works.
+            functionName = "_cisplay__display_%s" % type
+            if functions.has_key(functionName):
+                self.displayFunctions[type] = getattr(self, functionName)
+            else:
+                if type == '':
+                    self.displayFunctions[type] = self.__display_default
+                else:
+                    error = "Display function %s for type %s does not exist." \
+                        % (functionName, type)
+                    raise Exception(error)
+                    sys.exit(1)
+
+    def __display_default(self, value, style):
+        return value
+
+    def __display_generic_number(self, value):
+        displayNumber = ''
+        splitNum = string.split(str(value), sep='.')
+        numList = list(str(splitNum[0]))
+        numList.reverse()
+        length = len(numList)
+        counter = 0
+        for char in numList:
+            counter = counter + 1
+            if counter % 3 or counter == length:
+                displayNumber = "%s%s" % (char, displayNumber)
+            else:
+                displayNumber = ",%s%s" % (char, displayNumber)
+
+        if len(splitNum) > 1:
+            displayNumber = "%s.%s" % (displayNumber, splitNum[1])
+
+        return displayNumber
+
+    def __display_generic_mappable(self, map, value, style, plural=True):
+        displayValue = ''
+        length = len(str(value))
+        if length > 3:
+            for factorSet in map:
+                displayValue = float(value) / factorSet['factor']
+                if len(str(int(displayValue))) <= 3 or \
+                    factorSet['factor'] == map[-1]['factor']:
+                    displayValue = "%10.2f" % displayValue    
+                    if displayValue[-1] == '0':
+                        if displayValue > 1 and style != 'short' and plural:
+                            displayValue = "%s %ss" % (displayValue[:-1], 
+                                                      factorSet[style])
+                        else:
+                            displayValue = "%s %s" % (displayValue[:-1], 
+                                                      factorSet[style])
+                    else:
+                        if displayValue > 1 and style != 'short' and plural:
+                            displayValue = "%s %ss" % (displayValue, 
+                                                      factorSet[style])
+                        else:
+                            displayValue = "%s %s" % (displayValue, 
+                                                      factorSet[style])
+                    break
+
+        return displayValue
+
+    def __display_directory(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_address(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_ip_address(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_net_address(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_bool(self, value, style):
+        displayValue = value
+        
+        if not isinstance(displayValue, bool):
+            if re.match("^false|0|f|no$", value, 2):
+                displayValue=False
+            elif re.match("^true|1|t|yes$", value, 2):
+                displayValue=True
+
+        return displayValue
+
+    def __display_int(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_float(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_pos_int(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_neg_int(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_pos_num(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_pos_float(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_neg_float(self, value, style):
+        return self.__display_generic_number(value)
+
+    def __display_string(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_list(self, value, style):
+        value = value.rstrip()
+        return value.rstrip(',')
+
+    def __display_keyval(self, value, style):
+        value = value.rstrip()
+        return value.rstrip(',')
+
+    def __display_file(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_size(self, value, style):
+        return self.__display_generic_mappable(sizeMap, value, style)
+
+    def __display_freq(self, value, style):
+        return self.__display_generic_mappable(freqMap, value, style, False)
+
+    def __display_eaddress(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_tcp_port(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_http_version(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_range(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_hostname(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_user_account(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_user_group(self, value, style):
+        return self.__display_default(value, style)
+
+    def __display_timestamp(self, value, style):
+        return self.__display_default(value, style)
+
+    def display(self, type, value, style='short'):
+        displayValue = value
+        if value != None:
+            displayValue = self.displayFunctions[type](value, style)
+
+        return displayValue
+
+typeValidatorInstance = typeValidator()
+
+def is_valid_type(type):
+    """Returns true if type is valid."""
+
+    return typeValidatorInstance.is_valid_type(type)
+
+def type_info(type):
+    """Returns type info dictionary."""
+
+    return typeValidatorInstance.type_info(type)
+
+def verify(type, value, allowNone=False):
+    """Returns a normalized valueInfo dictionary."""
+
+    return typeValidatorInstance.verify(type, value, allowNone)
+
+def __normalize(map, val, factor):
+    normFactor = string.lower(factor)
+    normVal = float(val)
+    return int(normVal * map[normFactor])
+
+def normalize_size(size, factor):
+    """ Normalize a size to bytes.
+
+        size   - number of B, KB, MB, GB, TB, or PB
+        factor - size factor (case insensitive):
+                 b | bytes - bytes
+                 k | kb    - kilobytes
+                 m | mb    - megabytes
+                 g | gb    - gigabytes
+                 t | tb    - terabytes
+                 p | pb    - petabytes
+    """
+
+    return __normalize(sizeFactors, size, factor)
+
+def normalize_freq(freq, factor):
+    """ Normalize a frequency to hertz.
+
+        freq   - number of Hz, Khz, Mhz, Ghz, Thz, or Phz
+        factor - size factor (case insensitive):
+                 Hz  - Hertz
+                 Mhz - Megahertz
+                 Ghz - Gigahertz
+                 Thz - Terahertz
+                 Phz - Petahertz
+    """
+
+    return __normalize(freqFactors, freq, factor)

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/types.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,172 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+import sys, os, traceback, stat, socket, re, warnings
+
+from hodlib.Common.tcp import tcpSocket, tcpError 
+from hodlib.Common.threads import simpleCommand
+
+setUGV   = { 'S_ISUID' : 2, 'S_ISGID' : 1, 'S_ISVTX' : 0 }
+
+class AlarmException(Exception):
+    def __init__(self, msg=''):
+        self.message = msg
+        Exception.__init__(self, msg)
+
+    def __repr__(self):
+        return self.message
+
+def untar(file, targetDir):
+    status = False
+    command = 'tar -C %s -zxf %s' % (targetDir, file)
+    commandObj = simpleCommand('untar', command)
+    commandObj.start()
+    commandObj.wait()
+    commandObj.join()
+    if commandObj.exit_code() == 0:
+        status = True
+        
+    return status
+
+def tar(tarFile, tarDirectory, tarList):
+    currentDir = os.getcwd()
+    os.chdir(tarDirectory)
+    status = False
+    command = 'tar -czf %s ' % (tarFile)
+
+    for file in tarList:
+        command = "%s%s " % (command, file)
+    
+    commandObj = simpleCommand('tar', command)
+    commandObj.start()
+    commandObj.wait()
+    commandObj.join()
+    if commandObj.exit_code() == 0:
+        status = True
+    else:
+        status = commandObj.exit_status_string()
+    
+    os.chdir(currentDir)
+        
+    return status
+  
+def to_http_url(list):
+    """convert [hostname, port]  to a http url""" 
+    str = ''
+    str = "http://%s:%s" % (list[0], list[1])
+    
+    return str
+
+def get_exception_string():
+    (type, value, tb) = sys.exc_info()
+    exceptList = traceback.format_exception(type, value, tb)
+    exceptString = ''
+    for line in exceptList:
+        exceptString = "%s%s" % (exceptString, line)
+    
+    return exceptString
+  
+def get_exception_error_string():
+  (type, value, tb) = sys.exc_info()
+  if value:
+    exceptString = "%s %s" % (type, value)
+  else:
+    exceptString = type
+    
+  return exceptString
+
+def check_timestamp(timeStamp):
+    """ Checks the validity of a timeStamp.
+
+        timeStamp - (YYYY-MM-DD HH:MM:SS in UTC)
+
+        returns True or False
+    """
+    isValid = True
+
+    try:
+        timeStruct = time.strptime(timeStamp, "%Y-%m-%d %H:%M:%S")
+    except:
+        isValid = False
+
+    return isValid
+
+def sig_wrapper(sigNum, handler, *args):
+  if args:
+      handler(args)
+  else:
+      handler()
+      
+def get_perms(filename):
+    mode = stat.S_IMODE(os.stat(filename)[stat.ST_MODE])
+    permsString = ''
+    permSet = 0
+    place = 2
+    for who in "USR", "GRP", "OTH":
+        for what in "R", "W", "X":
+            if mode & getattr(stat,"S_I"+what+who):
+                permSet = permSet + 2**place
+            place = place - 1
+
+        permsString = "%s%s" % (permsString, permSet)
+        permSet = 0
+        place = 2
+
+    permSet = 0
+    for permFlag in setUGV.keys():
+        if mode & getattr(stat, permFlag):
+            permSet = permSet + 2**setUGV[permFlag]
+
+    permsString = "%s%s" % (permSet, permsString)
+
+    return permsString
+
+def local_fqdn():
+    """Return a system's true FQDN rather than any aliases, which are
+       occasionally returned by socket.gethostname."""
+
+    fqdn = None
+    me = os.uname()[1]
+    nameInfo=socket.gethostbyname_ex(me)
+    nameInfo[1].append(nameInfo[0])
+    for name in nameInfo[1]:
+        if name.count(".") and name.startswith(me):
+            fqdn = name
+
+    return(fqdn)
+  
+def need_to_allocate(allocated, config, command):
+    status = True
+    
+    if allocated.isSet():
+        status = False
+    elif re.search("\s*dfs.*$", command) and \
+        config['gridservice-hdfs']['external']:    
+        status = False
+    elif config['gridservice-mapred']['external']:    
+        status = False
+        
+    return status
+  
+def filter_warnings():
+    warnings.filterwarnings('ignore',
+        message=".*?'with' will become a reserved keyword.*")
+    
+def args_to_string(list):
+  """return a string argument space seperated"""
+  arg = ''
+  for item in list:
+    arg = "%s%s " % (arg, item)
+  return arg[:-1]

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/util.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,54 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+import xmlrpclib, time, random, signal
+
+class hodXRClient(xmlrpclib.ServerProxy):
+    def __init__(self, uri, transport=None, encoding=None, verbose=0,
+                 allow_none=0, installSignalHandlers=1, retryRequests=True, timeOut=15):
+        xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose, 
+                                       allow_none)
+        self.__retryRequests = retryRequests
+        self.__timeOut = timeOut
+        if (installSignalHandlers!=0):
+          self.__set_alarm()
+    
+    def __set_alarm(self):
+        def alarm_handler(sigNum, sigHandler):
+            raise Exception("XML-RPC socket timeout.")
+          
+        signal.signal(signal.SIGALRM, alarm_handler)
+      
+    def __request(self, methodname, params):
+        response = None
+        retryWaitTime = 5 + random.randint(0, 5)
+        for i in range(0, 30):
+            signal.alarm(self.__timeOut)
+            try:
+                response = self._ServerProxy__request(methodname, params)
+                signal.alarm(0)
+                break
+            except Exception:
+                if self.__retryRequests:
+                  time.sleep(retryWaitTime)
+                else:
+                  raise Exception("hodXRClientTimeout")
+
+        return response
+                
+    def __getattr__(self, name):
+        # magic method dispatcher
+        return xmlrpclib._Method(self.__request, name)
+                           

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/xmlrpc.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,18 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+
+from mapred import MapReduce, MapReduceExternal
+from hdfs import Hdfs, HdfsExternal

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/__init__.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,298 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+"""define Hdfs as subclass of Service"""
+
+# -*- python -*-
+
+import os
+
+from service import *
+from hodlib.Hod.nodePool import *
+from hodlib.Common.desc import CommandDesc
+from hodlib.Common.util import get_exception_string
+
+class HdfsExternal(MasterSlave):
+  """dummy proxy to external HDFS instance"""
+
+  def __init__(self, serviceDesc, workDirs):
+    MasterSlave.__init__(self, serviceDesc, workDirs,None)
+    self.launchedMaster = True
+    self.masterInitialized = True
+    
+  def getMasterRequest(self):
+    return None
+
+  def getMasterCommands(self, serviceDict):
+    return []
+
+  def getAdminCommands(self, serviceDict):
+    return []
+
+  def getWorkerCommands(self, serviceDict):
+    return []
+
+  def getMasterAddrs(self):
+    attrs = self.serviceDesc.getfinalAttrs()
+    addr = attrs['fs.default.name']
+    return [addr]
+  
+  def setMasterParams(self, list):
+    raise NotImplementedError
+
+  def getInfoAddrs(self):
+    attrs = self.serviceDesc.getfinalAttrs()
+    addr = attrs['fs.default.name']
+    k,v = addr.split( ":")
+    # infoaddr = k + ':' + attrs['dfs.info.port']
+    # After Hadoop-2185
+    infoaddr = attrs['dfs.http.bindAddress']
+    return [infoaddr]
+
+class Hdfs(MasterSlave):
+
+  def __init__(self, serviceDesc, nodePool, required_node, format=True, upgrade=False):
+    MasterSlave.__init__(self, serviceDesc, nodePool, required_node)
+    self.masterNode = None
+    self.masterAddr = None
+    self.runAdminCommands = True
+    self.infoAddr = None
+    self._isLost = False
+    self.format = format
+    self.upgrade = upgrade
+    self.workers = []
+
+  def getMasterRequest(self):
+    req = NodeRequest(1, [], False)
+    return req
+
+  def getMasterCommands(self, serviceDict):
+
+    masterCommands = []
+    if self.format:
+      masterCommands.append(self._getNameNodeCommand(True))
+
+    if self.upgrade:
+      masterCommands.append(self._getNameNodeCommand(False, True))
+    else:
+      masterCommands.append(self._getNameNodeCommand(False))
+
+    return masterCommands
+
+  def getAdminCommands(self, serviceDict):
+
+    adminCommands = []
+    if self.upgrade and self.runAdminCommands:
+      adminCommands.append(self._getNameNodeAdminCommand('-safemode wait'))
+      adminCommands.append(self._getNameNodeAdminCommand('-finalizeUpgrade',
+                                                          True, True))
+
+    self.runAdminCommands = False
+    return adminCommands
+
+  def getWorkerCommands(self, serviceDict):
+    cmdDesc = self._getDataNodeCommand()
+    return [cmdDesc]
+
+  def setMasterNodes(self, list):
+    node = list[0]
+    self.masterNode = node
+    
+  def getMasterAddrs(self):
+    return [self.masterAddr]
+
+  def getInfoAddrs(self):
+    return [self.infoAddr]
+
+  def getWorkers(self):
+    return self.workers
+
+  def setMasterParams(self, list):
+    dict = self._parseEquals(list)
+    self.masterAddr = dict['fs.default.name']
+    k,v = self.masterAddr.split( ":")
+    self.masterNode = k
+    # self.infoAddr = self.masterNode + ':' + dict['dfs.info.port']
+    # After Hadoop-2185
+    self.infoAddr = dict['dfs.http.bindAddress']
+   
+  def _parseEquals(self, list):
+    dict = {}
+    for elems in list:
+      splits = elems.split('=')
+      dict[splits[0]] = splits[1]
+    return dict
+  
+  def _getNameNodePort(self):
+    sd = self.serviceDesc
+    attrs = sd.getfinalAttrs()
+    if not 'fs.default.name' in attrs:
+      return ServiceUtil.getUniqPort()
+
+    v = attrs['fs.default.name']
+    try:
+      [n, p] = v.split(':', 1)
+      return int(p)
+    except:
+      print get_exception_string()
+      raise ValueError, "Can't find port from attr fs.default.name: %s" % (v)
+
+  def _getNameNodeInfoPort(self):
+    sd = self.serviceDesc
+    attrs = sd.getfinalAttrs()
+    if 'dfs.http.bindAddress' not in attrs:
+      return ServiceUtil.getUniqPort()
+
+    # p = attrs['dfs.info.port'] 
+    p = attrs['dfs.http.bindAddress'].split(':')[1]
+    try:
+      return int(p)
+    except:
+      print get_exception_string()
+      raise ValueError, "Can't find port from attr dfs.info.port: %s" % (p)
+
+  def _setWorkDirs(self, workDirs, envs, attrs, parentDirs, subDir):
+    namedir = None
+    datadir = []
+
+    for p in parentDirs:
+      workDirs.append(p)
+      workDirs.append(os.path.join(p, subDir))
+      dir = os.path.join(p, subDir, 'dfs-data')
+      datadir.append(dir)
+
+      if not namedir:
+        namedir = os.path.join(p, subDir, 'dfs-name')
+
+    workDirs.append(namedir)
+    workDirs.extend(datadir)
+
+    # FIXME!! use csv
+    attrs['dfs.name.dir'] = namedir
+    attrs['dfs.data.dir'] = ','.join(datadir)
+    # FIXME -- change dfs.client.buffer.dir
+    envs['HADOOP_ROOT_LOGGER'] = ["INFO,DRFA",]
+
+
+  def _getNameNodeCommand(self, format=False, upgrade=False):
+    sd = self.serviceDesc
+
+    parentDirs = self.workDirs
+    workDirs = []
+    attrs = sd.getfinalAttrs().copy()
+    envs = sd.getEnvs().copy()
+    #self.masterPort = port = self._getNameNodePort()
+    
+    if 'fs.default.name' not in attrs:
+      attrs['fs.default.name'] = 'fillinhostport'
+    #self.infoPort = port = self._getNameNodeInfoPort()
+ 
+    # if 'dfs.info.port' not in attrs:
+    #  attrs['dfs.info.port'] = 'fillinport'
+   
+    # Addressing Hadoop-2815, added the following. Earlier version don't
+    # care about this
+    if 'dfs.http.bindAddress' not in attrs:
+      attrs['dfs.http.bindAddress'] = 'fillinhostport'
+
+    self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'hdfs-nn')
+
+    dict = { 'name' : 'namenode' }
+    dict['program'] = os.path.join('bin', 'hadoop')
+    argv = ['namenode']
+    if format:
+      argv.append('-format')
+    elif upgrade:
+      argv.append('-upgrade')
+    dict['argv'] = argv
+    dict['envs'] = envs
+    dict['pkgdirs'] = sd.getPkgDirs()
+    dict['workdirs'] = workDirs
+    dict['final-attrs'] = attrs
+    dict['attrs'] = sd.getAttrs()
+    if format:
+      dict['fg'] = 'true'
+      dict['stdin'] = 'Y'
+    cmd = CommandDesc(dict)
+    return cmd
+
+  def _getNameNodeAdminCommand(self, adminCommand, wait=True, ignoreFailures=False):
+    sd = self.serviceDesc
+
+    parentDirs = self.workDirs
+    workDirs = []
+    attrs = sd.getfinalAttrs().copy()
+    envs = sd.getEnvs().copy()
+    nn = self.masterAddr
+
+    if nn == None:
+      raise ValueError, "Can't get namenode address"
+
+    attrs['fs.default.name'] = nn
+
+    self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'hdfs-nn')
+
+    dict = { 'name' : 'dfsadmin' }
+    dict['program'] = os.path.join('bin', 'hadoop')
+    argv = ['dfsadmin']
+    argv.append(adminCommand)
+    dict['argv'] = argv
+    dict['envs'] = envs
+    dict['pkgdirs'] = sd.getPkgDirs()
+    dict['workdirs'] = workDirs
+    dict['final-attrs'] = attrs
+    dict['attrs'] = sd.getAttrs()
+    if wait:
+      dict['fg'] = 'true'
+      dict['stdin'] = 'Y'
+    if ignoreFailures:
+      dict['ignorefailures'] = 'Y'
+    cmd = CommandDesc(dict)
+    return cmd
+ 
+  def _getDataNodeCommand(self):
+
+    sd = self.serviceDesc
+
+    parentDirs = self.workDirs
+    workDirs = []
+    attrs = sd.getfinalAttrs().copy()
+    envs = sd.getEnvs().copy()
+    nn = self.masterAddr
+
+    if nn == None:
+      raise ValueError, "Can't get namenode address"
+
+    attrs['fs.default.name'] = nn
+
+    # Adding the following. Hadoop-2815
+    if 'dfs.datanode.bindAddress' not in attrs:
+      attrs['dfs.datanode.bindAddress'] = 'fillinhostport'
+    if 'dfs.datanode.http.bindAddress' not in attrs:
+      attrs['dfs.datanode.http.bindAddress'] = 'fillinhostport'
+    self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'hdfs-dn')
+
+    dict = { 'name' : 'datanode' }
+    dict['program'] = os.path.join('bin', 'hadoop')
+    dict['argv'] = ['datanode']
+    dict['envs'] = envs
+    dict['pkgdirs'] = sd.getPkgDirs()
+    dict['workdirs'] = workDirs
+    dict['final-attrs'] = attrs
+    dict['attrs'] = sd.getAttrs()
+
+    cmd = CommandDesc(dict)
+    return cmd
+

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/hdfs.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,267 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+"""define MapReduce as subclass of Service"""
+
+# -*- python -*-
+
+import os, copy, time
+
+from service import *
+from hodlib.Hod.nodePool import *
+from hodlib.Common.desc import CommandDesc
+from hodlib.Common.util import get_exception_string
+
+class MapReduceExternal(MasterSlave):
+  """dummy proxy to external MapReduce instance"""
+
+  def __init__(self, serviceDesc, workDirs):
+    MasterSlave.__init__(self, serviceDesc, workDirs,None)
+    self.launchedMaster = True
+    self.masterInitialized = True
+    
+  def getMasterRequest(self):
+    return None
+
+  def getMasterCommands(self, serviceDict):
+    return []
+
+  def getAdminCommands(self, serviceDict):
+    return []
+
+  def getWorkerCommands(self, serviceDict):
+    return []
+
+  def getMasterAddrs(self):
+    attrs = self.serviceDesc.getfinalAttrs()
+    addr = attrs['mapred.job.tracker']
+    return [addr]
+
+  def needsMore(self):
+    return 0
+
+  def needsLess(self):
+    return 0
+
+  def setMasterParams(self, list):
+    raise NotImplementedError
+  
+  def getInfoAddrs(self):
+    attrs = self.serviceDesc.getfinalAttrs()
+    addr = attrs['mapred.job.tracker']
+    k,v = addr.split( ":")
+    # infoaddr = k + ':' + attrs['mapred.job.tracker.info.port']
+    # After Hadoop-2185
+    # Note: earlier,we never respected mapred.job.tracker.http.bindAddress
+    infoaddr = attrs['mapred.job.tracker.http.bindAddress']
+    return [infoaddr]
+  
+class MapReduce(MasterSlave):
+
+  def __init__(self, serviceDesc, workDirs,required_node):
+    MasterSlave.__init__(self, serviceDesc, workDirs,required_node)
+
+    self.masterNode = None
+    self.masterAddr = None
+    self.infoAddr = None
+    self.workers = []
+    self.required_node = required_node
+
+  def isLaunchable(self, serviceDict):
+    hdfs = serviceDict['hdfs']
+    if (hdfs.isMasterInitialized()):
+      return True
+    return False
+  
+  def getMasterRequest(self):
+    req = NodeRequest(1, [], False)
+    return req
+
+  def getMasterCommands(self, serviceDict):
+
+    hdfs = serviceDict['hdfs']
+
+    cmdDesc = self._getJobTrackerCommand(hdfs)
+    return [cmdDesc]
+
+  def getAdminCommands(self, serviceDict):
+    return []
+
+  def getWorkerCommands(self, serviceDict):
+
+    hdfs = serviceDict['hdfs']
+
+    cmdDesc = self._getTaskTrackerCommand(hdfs)
+    return [cmdDesc]
+
+  def setMasterNodes(self, list):
+    node = list[0]
+    self.masterNode = node
+
+  def getMasterAddrs(self):
+    return [self.masterAddr]
+
+  def getInfoAddrs(self):
+    return [self.infoAddr]
+
+  def getWorkers(self):
+    return self.workers
+
+  def requiredNode(self):
+    return self.required_host
+
+  def setMasterParams(self, list):
+    dict = self._parseEquals(list)
+    self.masterAddr = dict['mapred.job.tracker']
+    k,v = self.masterAddr.split(":")
+    self.masterNode = k
+    # self.infoAddr = self.masterNode + ':' + dict['mapred.job.tracker.info.port']
+    # After Hadoop-2185
+    self.infoAddr = dict['mapred.job.tracker.http.bindAddress']
+  
+  def _parseEquals(self, list):
+    dict = {}
+    for elems in list:
+      splits = elems.split('=')
+      dict[splits[0]] = splits[1]
+    return dict
+
+  def _getJobTrackerPort(self):
+    sd = self.serviceDesc
+    attrs = sd.getfinalAttrs()
+    if not 'mapred.job.tracker' in attrs:
+      return ServiceUtil.getUniqPort()
+    
+    v = attrs['mapred.job.tracker']
+    try:
+      [n, p] = v.split(':', 1)
+      return int(p)
+    except:
+      print get_exception_string()
+      raise ValueError, "Can't find port from attr mapred.job.tracker: %s" % (v)
+
+  def _getJobTrackerInfoPort(self):
+    sd = self.serviceDesc
+    attrs = sd.getfinalAttrs()
+    # if not 'mapred.job.tracker.info.port' in attrs:
+    if 'mapred.job.tracker.http.bindAddress' not in attrs:
+      return ServiceUtil.getUniqPort()
+
+    # p = attrs['mapred.job.tracker.info.port']
+    p = attrs['mapred.job.tracker.http.bindAddress']
+    try:
+      return int(p)
+    except:
+      print get_exception_string()
+      # raise ValueError, "Can't find port from attr mapred.job.tracker.info.port: %s" % (p)
+      raise ValueError, "Can't find port from attr mapred.job.tracker.http.bindAddress: %s" % (p)
+
+  def _setWorkDirs(self, workDirs, envs, attrs, parentDirs, subDir):
+    local = []
+    system = None
+    temp = None
+    dfsclient = []
+    
+    for p in parentDirs:
+      workDirs.append(p)
+      workDirs.append(os.path.join(p, subDir))
+      dir = os.path.join(p, subDir, 'mapred-local')
+      local.append(dir)
+      if not system:
+        system = os.path.join(p, subDir, 'mapred-system')
+      if not temp:
+        temp = os.path.join(p, subDir, 'mapred-temp')
+      dfsclientdir = os.path.join(p, subDir, 'dfs-client')
+      dfsclient.append(dfsclientdir)
+      workDirs.append(dfsclientdir)
+    # FIXME!! use csv
+    attrs['mapred.local.dir'] = ','.join(local)
+    attrs['mapred.system.dir'] = 'fillindir'
+    attrs['mapred.temp.dir'] = temp
+    attrs['dfs.client.buffer.dir'] = ','.join(dfsclient)
+
+
+    envs['HADOOP_ROOT_LOGGER'] = ["INFO,DRFA",]
+
+
+  def _getJobTrackerCommand(self, hdfs):
+    sd = self.serviceDesc
+
+    parentDirs = self.workDirs
+    workDirs = []
+    attrs = sd.getfinalAttrs()
+    envs = sd.getEnvs()
+
+    #self.masterPort = port = self._getJobTrackerPort()
+    if 'mapred.job.tracker' not in attrs:
+      attrs['mapred.job.tracker'] = 'fillinhostport'
+
+    #self.infoPort = port = self._getJobTrackerInfoPort()
+    # if 'mapred.job.tracker.info.port' not in attrs:
+    #   attrs['mapred.job.tracker.info.port'] = 'fillinport'
+
+    attrs['fs.default.name'] = hdfs.getMasterAddrs()[0]
+    # Addressing Hadoop-2815,
+    if 'mapred.job.tracker.http.bindAddress' not in attrs:
+      attrs['mapred.job.tracker.http.bindAddress'] = 'fillinhostport'
+
+    self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'mapred-jt')
+
+    dict = { 'name' : 'jobtracker' }
+    dict['program'] = os.path.join('bin', 'hadoop')
+    dict['argv'] = ['jobtracker']
+    dict['envs'] = envs
+    dict['pkgdirs'] = sd.getPkgDirs()
+    dict['workdirs'] = workDirs
+    dict['final-attrs'] = attrs
+    dict['attrs'] = sd.getAttrs()
+    cmd = CommandDesc(dict)
+    return cmd
+
+  def _getTaskTrackerCommand(self, hdfs):
+
+    sd = self.serviceDesc
+
+    parentDirs = self.workDirs
+    workDirs = []
+    attrs = sd.getfinalAttrs()
+    envs = sd.getEnvs()
+    jt = self.masterAddr
+
+    if jt == None:
+      raise ValueError, "Can't get job tracker address"
+
+    attrs['mapred.job.tracker'] = jt
+    attrs['fs.default.name'] = hdfs.getMasterAddrs()[0]
+
+    # Adding the following. Hadoop-2815
+    if 'mapred.task.tracker.report.bindAddress' not in attrs:
+      attrs['mapred.task.tracker.report.bindAddress'] = 'fillinhostport'
+    if 'mapred.task.tracker.http.bindAddress' not in attrs:
+      attrs['mapred.task.tracker.http.bindAddress'] = 'fillinhostport'
+
+    self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'mapred-tt')
+
+    dict = { 'name' : 'tasktracker' }
+    dict['program'] = os.path.join('bin', 'hadoop')
+    dict['argv'] = ['tasktracker']
+    dict['envs'] = envs
+    dict['pkgdirs'] = sd.getPkgDirs()
+    dict['workdirs'] = workDirs
+    dict['final-attrs'] = attrs
+    dict['attrs'] = sd.getAttrs()
+    cmd = CommandDesc(dict)
+    return cmd
+

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/mapred.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,226 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+"""defines Service as abstract interface"""
+
+# -*- python -*-
+import random, socket
+
+class Service:
+  """ the service base class that all the 
+  other services inherit from. """
+  def __init__(self, serviceDesc, workDirs):
+    self.serviceDesc = serviceDesc
+    self.workDirs = workDirs
+
+  def getName(self):
+    return self.serviceDesc.getName()
+
+  def getInfoAddrs(self):
+    """Return a list of addresses that provide 
+    information about the servie"""
+    return []
+
+  def isLost(self):
+    """True if the service is down"""
+    raise NotImplementedError
+
+  def addNodes(self, nodeList):
+    """add nodeSet"""
+    raise NotImplementedError
+
+  def removeNodes(self, nodeList):
+    """remove a nodeset"""
+    raise NotImplementedError
+
+  def getWorkers(self):
+     raise NotImplementedError
+
+  def needsMore(self):
+    """return number of nodes the service wants to add"""
+    raise NotImplementedError
+
+  def needsLess(self):
+    """return number of nodes the service wants to remove"""
+    raise NotImplementedError
+
+class MasterSlave(Service):
+  """ the base class for a master slave 
+  service architecture. """
+  def __init__(self, serviceDesc, workDirs,requiredNode):
+    Service.__init__(self, serviceDesc, workDirs)
+    self.launchedMaster = False
+    self.masterInitialized = False
+    self.masterAddress = 'none'
+    self.requiredNode = requiredNode
+
+  def getRequiredNode(self):
+    return self.requiredNode
+ 
+  def getMasterRequest(self):
+    """ the number of master you need
+    to run for this service. """
+    raise NotImplementedError
+  
+  def isLaunchable(self, serviceDict):
+    """ if your service does not depend on
+    other services. is set to true by default. """
+    return True
+  
+  def getMasterCommands(self, serviceDict):
+    """ a list of master commands you 
+    want to run for this service. """
+    raise NotImplementedError
+
+  def getAdminCommands(self, serviceDict):
+    """ a list of admin commands you 
+    want to run for this service. """
+    raise NotImplementedError
+
+  def getWorkerCommands(self, serviceDict):
+    """ a list of worker commands you want to 
+    run for this service. """
+    raise NotImplementedError
+
+  def setMasterNodes(self, list):
+    """ set the status of master nodes 
+    after they start running on a node cluster. """
+    raise NotImplementedError
+
+  def addNodes(self, list):
+    """ add nodes to a service. Not implemented
+    currently. """
+    raise NotImplementedError
+
+  def getMasterAddrs(self):
+    """ return the addresses of master. the 
+    hostname:port to which worker nodes should
+    connect. """
+    raise NotImplementedError
+  
+  def setMasterParams(self, list):
+    """ set the various master params 
+    depending on what each hodring set 
+    the master params to. """
+    raise NotImplementedError
+
+  def setlaunchedMaster(self):
+    """ set the status of master launched
+    to true. """
+    self.launchedMaster = True
+
+  def isMasterLaunched(self):
+    """ return if a master has been launched
+    for the service or not. """
+    return self.launchedMaster
+
+  def isMasterInitialized(self):
+    """ return if a master if launched 
+    has been initialized or not. """
+    return self.masterInitialized
+
+  def setMasterInitialized(self):
+    """ set the master initialized to
+    true. """
+    self.masterInitialized = True
+
+  def getMasterAddress(self):
+    """ it needs to change to reflect 
+    more that one masters. Currently it 
+    keeps a knowledge of where the master 
+    was launched and to keep track if it was actually
+    up or not. """
+    return self.masterAddress
+
+  def setMasterAddress(self, addr):
+    self.masterAddress = addr
+
+  def isExternal(self):
+    return self.serviceDesc.isExternal()
+  
+class NodeRequest:
+  """ A class to define 
+  a node request. """
+  def __init__(self, n, required = [], preferred = [], isPreemptee = True):
+    self.numNodes = n
+    self.preferred = preferred
+    self.isPreemptee = isPreemptee
+    self.required = required
+
+  def setNumNodes(self, n):
+    self.numNodes = n
+
+  def setPreferredList(self, list):
+    self.preferred = list
+
+  def setIsPreemptee(self, flag):
+    self.isPreemptee = flag
+
+
+class ServiceUtil:
+  """ this class should be moved out of 
+  service.py to a util file"""
+  localPortUsed = {}
+    
+  def getUniqRandomPort(h=None, low=50000, high=60000, retry = 30):
+    """This allocates a randome free port between low and high"""
+    while retry > 0:
+      n = random.randint(low, high)
+      if n in ServiceUtil.localPortUsed:
+        retry -= 1
+        continue
+      s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+      if not h:
+        h = socket.gethostname()
+      avail = False
+      try:
+        s.connect((h, n))
+      except:
+        avail = True
+
+      if avail:
+        ServiceUtil.localPortUsed[n] = True
+        return n
+    raise ValueError, "Can't find unique local port between %d and %d" % (low, high)
+  
+  getUniqRandomPort = staticmethod(getUniqRandomPort)
+  
+  def getUniqPort(h=None, low=40000, high=60000, retry = 30):
+    """get unique port on a host that can be used by service
+    This and its consumer code should disappear when master
+    nodes get allocatet by nodepool"""
+
+    n  = low
+    while retry > 0:
+      n = n + 1
+      if n in ServiceUtil.localPortUsed:
+        retry -= 1
+        continue
+      s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+      if not h:
+        h = socket.gethostname()
+      avail = False
+      try:
+        s.connect((h, n))
+      except:
+        avail = True
+
+      if avail:
+        ServiceUtil.localPortUsed[n] = True
+        return n
+
+    raise ValueError, "Can't find unique local port between %d and %d" % (low, high)
+
+  getUniqPort = staticmethod(getUniqPort)

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/GridServices/service.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL

Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py?rev=608950&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py (added)
+++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py Fri Jan  4 10:20:17 2008
@@ -0,0 +1,15 @@
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py
------------------------------------------------------------------------------
    svn:executable = *

Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Hod/__init__.py
------------------------------------------------------------------------------
    svn:keywords = Id Revision HeadURL



Mime
View raw message