Return-Path: Delivered-To: apmail-lucene-hadoop-commits-archive@locus.apache.org Received: (qmail 4321 invoked from network); 4 Jan 2008 18:21:05 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 4 Jan 2008 18:21:05 -0000 Received: (qmail 3080 invoked by uid 500); 4 Jan 2008 18:20:53 -0000 Delivered-To: apmail-lucene-hadoop-commits-archive@lucene.apache.org Received: (qmail 3043 invoked by uid 500); 4 Jan 2008 18:20:53 -0000 Mailing-List: contact hadoop-commits-help@lucene.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hadoop-dev@lucene.apache.org Delivered-To: mailing list hadoop-commits@lucene.apache.org Received: (qmail 3034 invoked by uid 99); 4 Jan 2008 18:20:53 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 04 Jan 2008 10:20:53 -0800 X-ASF-Spam-Status: No, hits=-97.2 required=10.0 tests=ALL_TRUSTED,WEIRD_QUOTING X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO eris.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 04 Jan 2008 18:20:40 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id C86BA1A983A; Fri, 4 Jan 2008 10:20:30 -0800 (PST) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r608950 [2/7] - in /lucene/hadoop/trunk/src: contrib/hod/ contrib/hod/bin/ contrib/hod/conf/ contrib/hod/hodlib/ contrib/hod/hodlib/AllocationManagers/ contrib/hod/hodlib/Common/ contrib/hod/hodlib/GridServices/ contrib/hod/hodlib/Hod/ cont... Date: Fri, 04 Jan 2008 18:20:22 -0000 To: hadoop-commits@lucene.apache.org From: nigel@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20080104182030.C86BA1A983A@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,335 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +"""manage component descriptors""" +# -*- python -*- + +import random + +from sets import Set +from pprint import pformat +from hodlib.Common.util import local_fqdn +from hodlib.Common.tcp import tcpSocket, tcpError + +class Schema: + """the primary class for describing + schema's """ + STRING, LIST, MAP = range(3) + + def __init__(self, name, type = STRING, delim=','): + self.name = name + self.type = type + self.delim = delim + + def getName(self): + return self.name + + def getType(self): + return self.type + + def getDelim(self): + return self.delim + +class _Merger: + """A class to merge lists and add key/value + pairs to a dictionary""" + def mergeList(x, y, uniq=True): + l = [] + l.extend(x) + l.extend(y) + if not uniq: + return l + + s = Set(l) + l = list(s) + return l + + mergeList = staticmethod(mergeList) + + def mergeMap(to, add): + + for k in add: + to.setdefault(k, add[k]) + + return to + + mergeMap = staticmethod(mergeMap) + +class NodePoolDesc: + """a schema for describing + Nodepools""" + def __init__(self, dict): + self.dict = dict.copy() + + self.dict.setdefault('attrs', {}) + + self._checkRequired() + + def _checkRequired(self): + + if not 'id' in self.dict: + raise ValueError, "nodepool needs 'id'" + + if self.getPkgDir() == None: + raise ValueError, "nodepool %s needs 'pkgs'" % (self.getName()) + + def getName(self): + return self.dict['id'] + + def getPkgDir(self): + return self.dict['batch-home'] + + def getAttrs(self): + return self.dict['attrs'] + + def getSchema(): + schema = {} + + s = Schema('id') + schema[s.getName()] = s + + s = Schema('batch-home', Schema.LIST, ':') + schema[s.getName()] = s + + s = Schema('attrs', Schema.MAP) + schema[s.getName()] = s + + return schema + + getSchema = staticmethod(getSchema) + +class ServiceDesc: + """A schema for describing services""" + def __init__(self, dict): + self.dict = dict.copy() + + self.dict.setdefault('external', False) + self.dict.setdefault('attrs', {}) + self.dict.setdefault('java-opts', []) + self.dict.setdefault('envs', {}) + self.dict.setdefault('host',None) + self.dict.setdefault('port',None) + self.dict.setdefault('tar', None) + self.dict.setdefault('pkgs', '') + self.dict.setdefault('final-attrs', {}) + self._checkRequired() + self.__dict_update() + + def __dict_update(self): + getattr(self, "_%s" % self.dict['id'])() + + def _mapred(self): + if self.isExternal(): + self.dict['final-attrs']['mapred.job.tracker'] = "%s:%s" % (self.dict['host'], + self.dict['tracker_port']) + + # self.dict['final-attrs']['mapred.job.tracker.info.port'] = \ + # str(self.dict['info_port']) + # After Hadoop-2185 + self.dict['final-attrs']['mapred.job.tracker.http.bindAddress'] = \ + "%s:%s" %(self.dict['host'], self.dict['info_port']) + + if self.dict.has_key('hadoop-tar-ball'): + self.dict['tar'] = self.dict['hadoop-tar-ball'] + + def _hdfs(self): + if self.isExternal(): + self.dict['final-attrs']['fs.default.name'] = "%s:%s" % (self.dict['host'], + self.dict['fs_port']) + + # self.dict['final-attrs']['dfs.info.port'] = str(self.dict['info_port']) + # After Hadoop-2185 + self.dict['final-attrs']['dfs.http.bindAddress'] = "%s:%s" % \ + (self.dict['host'], self.dict['info_port']) + + if self.dict.has_key('hadoop-tar-ball'): + self.dict['tar'] = self.dict['hadoop-tar-ball'] + + def _checkRequired(self): + + if not 'id' in self.dict: + raise ValueError, "service description needs 'id'" + +# if len(self.getPkgDirs()) <= 0: +# raise ValueError, "service description %s needs 'pkgs'" % (self.getName()) + + def getName(self): + return self.dict['id'] + + def isExternal(self): + """True if the service is outside hod. + e.g. connect to existing HDFS""" + + return self.dict['external'] + + def getPkgDirs(self): + return self.dict['pkgs'] + + def getTar(self): + return self.dict['tar'] + + def getAttrs(self): + return self.dict['attrs'] + + def getfinalAttrs(self): + return self.dict['final-attrs'] + + def getJavaOpts(self): + return self.dict['java-opts'] + + def getEnvs(self): + return self.dict['envs'] + + def getSchema(): + schema = {} + + s = Schema('id') + schema[s.getName()] = s + + s = Schema('external') + schema[s.getName()] = s + + s = Schema('pkgs', Schema.LIST, ':') + schema[s.getName()] = s + + s = Schema('tar', Schema.LIST, ":") + schema[s.getName()] = s + + s = Schema('attrs', Schema.MAP) + schema[s.getName()] = s + + s = Schema('final-attrs', Schema.MAP) + schema[s.getName()] = s + + s = Schema('envs', Schema.MAP) + schema[s.getName()] = s + + s = Schema('java-opts', Schema.MAP) + schema[s.getName()] = s + + return schema + + getSchema = staticmethod(getSchema) + +class CommandDesc: + + def __init__(self, dict): + """a class for how a command is described""" + self.dict = dict + + def __repr__(self): + return pformat(self.dict) + + def _getName(self): + """return the name of the command to be run""" + return self.dict['name'] + + def _getProgram(self): + """return where the program is """ + return self.dict['program'] + + def _getArgv(self): + """return the arguments for the command to be run""" + return self.dict['argv'] + + def _getEnvs(self): + """return the environment in which the command is to be run""" + return self.dict['envs'] + + def _getJavaOpts(self): + return self.dict['java-opts'] + + def _getPkgDirs(self): + """return the packages for this command""" + return self.dict['pkgdirs'] + + def _getWorkDirs(self): + """return the working directories for this command""" + return self.dict['workdirs'] + + def _getAttrs(self): + """return the list of attributes for this command""" + return self.dict['attrs'] + + def _getfinalAttrs(self): + """return the final xml params list for this command""" + return self.dict['final-attrs'] + + def _getForeground(self): + """return if the command is to be run in foreground or not""" + return self.dict['fg'] + + def _getStdin(self): + return self.dict['stdin'] + + def toString(cmdDesc): + """return a string representation of this command""" + row = [] + row.append('name=%s' % (cmdDesc._getName())) + row.append('program=%s' % (cmdDesc._getProgram())) + row.append('pkgdirs=%s' % CommandDesc._csv(cmdDesc._getPkgDirs(), ':')) + + if 'argv' in cmdDesc.dict: + row.append('argv=%s' % CommandDesc._csv(cmdDesc._getArgv())) + + if 'envs' in cmdDesc.dict: + envs = cmdDesc._getEnvs() + list = [] + for k in envs: + v = envs[k] + list.append('%s=%s' % (k, v)) + row.append('envs=%s' % CommandDesc._csv(list)) + + if 'workdirs' in cmdDesc.dict: + row.append('workdirs=%s' % CommandDesc._csv(cmdDesc._getWorkDirs(), ':')) + + if 'attrs' in cmdDesc.dict: + attrs = cmdDesc._getAttrs() + list = [] + for k in attrs: + v = attrs[k] + list.append('%s=%s' % (k, v)) + row.append('attrs=%s' % CommandDesc._csv(list)) + + if 'final-attrs' in cmdDesc.dict: + fattrs = cmdDesc._getAttrs() + list = [] + for k in fattrs: + v = fattrs[k] + list.append('%s=%s' % (k, v)) + row.append('final-attrs=%s' % CommandDesc._cvs(list)) + + if 'fg' in cmdDesc.dict: + row.append('fg=%s' % (cmdDesc._getForeground())) + + if 'stdin' in cmdDesc.dict: + row.append('stdin=%s' % (cmdDesc._getStdin())) + + return CommandDesc._csv(row) + + toString = staticmethod(toString) + + def _csv(row, delim=','): + """return a string in csv format""" + import cStringIO + import csv + + queue = cStringIO.StringIO() + writer = csv.writer(queue, delimiter=delim, escapechar='\\', quoting=csv.QUOTE_NONE, + doublequote=False, lineterminator='\n') + writer.writerow(row) + return queue.getvalue().rstrip('\n') + + _csv = staticmethod(_csv) Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/desc.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,72 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +"""manage hod configuration""" +# -*- python -*- + +import sys, csv, os +from optparse import Option, OptionParser +from xml.dom import minidom +from sets import Set +from select import select, poll, POLLIN + +from hodlib.Common.desc import * + +class DescGenerator: + """Contains the conversion to descriptors and other method calls + to config""" + def __init__(self, hodConfig): + """parse all the descriptors""" + + self.hodConfig = hodConfig + + def initializeDesc(self): + self.hodConfig['nodepooldesc'] = self.createNodePoolDesc() + self.hodConfig['servicedesc'] = self.createServiceDescDict() + + return self.hodConfig + + def getServices(self): + """get all the services from the config""" + + sdd = {} + for keys in self.hodConfig: + if keys.startswith('gridservice-'): + str = keys.split('-') + dict = self.hodConfig[keys] + if 'server-params' in dict: dict['attrs'] = dict['server-params'] + if 'final-server-params' in dict: dict['final-attrs'] = dict['final-server-params'] + dict['id'] = str[1] + desc = ServiceDesc(dict) + sdd[desc.getName()] = desc + + return sdd + + def createNodePoolDesc(self): + """ create a node pool descriptor and store + it in hodconfig""" + + desc = NodePoolDesc(self.hodConfig['resource_manager']) + return desc + + def createServiceDescDict(self): + """create a service descriptor for + all the services and store it in the + hodconfig""" + + sdd = self.getServices() + return sdd + + Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/descGenerator.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,229 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +# $Id:setup.py 5158 2007-04-09 00:14:35Z zim $ +# +# Christopher Zimmerman - zim@yahoo-inc.com - 04/13/2007 +#------------------------------------------------------------------------------ +import os, time, shutil, xmlrpclib, socket, pprint + +from signal import * + +from hodlib.Common.logger import hodLog, hodDummyLogger +from hodlib.Common.socketServers import hodXMLRPCServer +from hodlib.Common.util import local_fqdn +from hodlib.Common.xmlrpc import hodXRClient + +class hodBaseService: + """hodBaseService class - This class provides service registration, logging, + and configuration access methods. It also provides an XML-RPC server. + This class should be extended to create hod services. Methods beginning + with _xr_method will automatically be added to instances of this class. + """ + def __init__(self, name, config, xrtype='threaded'): + """ Initialization requires a name string and a config object of type + hodlib.Common.setup.options or hodlib.Common.setup.config.""" + + self.name = name + self.hostname = local_fqdn() + self._cfg = config + self._xrc = None + self.logs = {} + self._baseLogger = None + self._serviceID = os.getenv('PBS_JOBID') + + self.__logDir = None + self.__svcrgy = None + self.__stop = False + self.__xrtype = xrtype + + self._init_logging() + + self._init_signals() + self._init_xrc_server() + + def __set_logging_level(self, level): + self.logs['main'].info("Setting log level to %s." % level) + for loggerName in self.loggers.keys(): + self.logs['main'].set_logger_level(loggerName, level) + + def __get_logging_level(self): + if self._cfg.has_key('stream'): + return self.loggers['main'].get_level('stream', 'main') + elif self._cfg.has_key('log-dir'): + return self.loggers['main'].get_level('file', 'main') + else: + return 0 + + def _xr_method_stop(self, *args): + """XML-RPC method, calls stop() on ourselves.""" + + return self.stop() + + def _xr_method_status(self, *args): + """XML-RPC method, calls status() on ourselves.""" + + return self.status() + + def _init_logging(self): + if self._cfg.has_key('debug'): + if self._cfg['debug'] > 0: + self._baseLogger = hodLog(self.name) + self.logs['main'] = self._baseLogger.add_logger('main') + + if self._cfg.has_key('stream'): + if self._cfg['stream']: + self._baseLogger.add_stream(level=self._cfg['debug'], + addToLoggerNames=('main',)) + + if self._cfg.has_key('log-dir'): + if self._serviceID: + self.__logDir = os.path.join(self._cfg['log-dir'], "%s.%s" % ( + self._cfg['userid'], self._serviceID)) + else: + self.__logDir = os.path.join(self._cfg['log-dir'], + self._cfg['userid']) + if not os.path.exists(self.__logDir): + os.mkdir(self.__logDir) + + self._baseLogger.add_file(logDirectory=self.__logDir, + level=self._cfg['debug'], addToLoggerNames=('main',)) + + if self._cfg.has_key('syslog-address'): + self._baseLogger.add_syslog(self._cfg['syslog-address'], + level=self._cfg['debug'], addToLoggerNames=('main',)) + + if not self.logs.has_key('main'): + self.logs['main'] = hodDummyLogger() + else: + self.logs['main'] = hodDummyLogger() + else: + self.logs['main'] = hodDummyLogger() + + def _init_signals(self): + def sigStop(sigNum, handler): + self.sig_wrapper(sigNum, self.stop) + + def toggleLevel(): + currentLevel = self.__get_logging_level() + if currentLevel == 4: + self.__set_logging_level(1) + else: + self.__set_logging_level(currentLevel + 1) + + def sigStop(sigNum, handler): + self._sig_wrapper(sigNum, self.stop) + + def sigDebug(sigNum, handler): + self.sig_wrapper(sigNum, toggleLevel) + + signal(SIGTERM, sigStop) + signal(SIGQUIT, sigStop) + signal(SIGINT, sigStop) + signal(SIGUSR2, sigDebug) + + def _sig_wrapper(self, sigNum, handler, *args): + self.logs['main'].info("Caught signal %s." % sigNum) + + if args: + handler(args) + else: + handler() + + def _init_xrc_server(self): + host = None + ports = None + if self._cfg.has_key('xrs-address'): + (host, port) = (self._cfg['xrs-address'][0], self._cfg['xrs-address'][1]) + ports = (port,) + elif self._cfg.has_key('xrs-port-range'): + host = '' + ports = self._cfg['xrs-port-range'] + + if host != None: + if self.__xrtype == 'threaded': + self._xrc = hodXMLRPCServer(host, ports) + elif self.__xrtype == 'twisted': + try: + from socketServers import twistedXMLRPCServer + self._xrc = twistedXMLRPCServer(host, ports, self.logs['main']) + except ImportError: + self.logs['main'].error("Twisted XML-RPC server not available, " + + "falling back on threaded server.") + self._xrc = hodXMLRPCServer(host, ports) + for attr in dir(self): + if attr.startswith('_xr_method_'): + self._xrc.register_function(getattr(self, attr), + attr[11:]) + + self._xrc.register_introspection_functions() + + def _register_service(self, port=None, installSignalHandlers=1): + if self.__svcrgy: + self.logs['main'].info( + "Registering service with service registery %s... " % self.__svcrgy) + svcrgy = hodXRClient(self.__svcrgy, None, None, 0, 0, installSignalHandlers) + + if self._xrc and self._http: + svcrgy.registerService(self._cfg['userid'], self._serviceID, + self.hostname, self.name, 'hod', { + 'xrs' : "http://%s:%s" % ( + self._xrc.server_address[0], + self._xrc.server_address[1]),'http' : + "http://%s:%s" % (self._http.server_address[0], + self._http.server_address[1])}) + elif self._xrc: + svcrgy.registerService(self._cfg['userid'], self._serviceID, + self.hostname, self.name, 'hod', { + 'xrs' : "http://%s:%s" % ( + self._xrc.server_address[0], + self._xrc.server_address[1]),}) + elif self._http: + svcrgy.registerService(self._cfg['userid'], self._serviceID, + self.hostname, self.name, 'hod', {'http' : + "http://%s:%s" % (self._http.server_address[0], + self._http.server_address[1]),}) + else: + svcrgy.registerService(self._cfg['userid'], self._serviceID, + self.hostname, name, 'hod', {} ) + + def start(self): + """ Start XML-RPC server and register service.""" + + self.logs['main'].info("Starting HOD service: %s ..." % self.name) + + if self._xrc: self._xrc.serve_forever() + if self._cfg.has_key('register') and self._cfg['register']: + self._register_service() + + def stop(self): + """ Stop XML-RPC server, unregister service and set stop flag. """ + + self.logs['main'].info("Stopping service...") + if self._xrc: self._xrc.stop() + self.__stop = True + + return True + + def status(self): + """Returns true, should be overriden.""" + + return True + + def wait(self): + """Wait until stop method is called.""" + + while not self.__stop: + time.sleep(.1) Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/hodsvc.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,740 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +"""hodLogger provides a customized interface to Python's core logging package. +""" + +import sys, os, re, logging, logging.handlers, inspect, pprint, types +from tcp import get_address_tuple + +fileFormatString = '[%(asctime)s] %(levelname)s/%(levelno)s \ +%(module)s:%(lineno)s - %(message)s' + +streamFormatString = '%(levelname)s - %(message)s' + +debugStreamFormatString = '[%(asctime)s] %(levelname)s/%(levelno)s \ +%(module)s:%(lineno)s - %(message)s' + +syslogFormatString = '(%(process)d) %(levelname)s/%(levelno)s \ +%(module)s:%(lineno)s - %(message)s' + +smtpFormatString = '[%(asctime)s] %(levelname)s/%(levelno)s \ +%(module)s:%(lineno)s\n\n%(message)s' + +fileFormater = logging.Formatter(fileFormatString) +streamFormater = logging.Formatter(streamFormatString) +debugStreamFormater = logging.Formatter(debugStreamFormatString) +syslogFormater = logging.Formatter(syslogFormatString) +smtpFormater = logging.Formatter(smtpFormatString) + +defaultFileLevel = 3 +defaultStreamLevel = 4 +defaultSyslogLevel = 3 +defaultSmtpLevel = 0 + +hodLogLevelMap = { 0 : logging.CRITICAL, + 1 : logging.ERROR, + 2 : logging.WARNING, + 3 : logging.INFO, + 4 : logging.DEBUG } + +hodStreamFormatMap = { 0 : streamFormater, + 1 : streamFormater, + 2 : streamFormater, + 3 : streamFormater, + 4 : debugStreamFormater } + +rehodLogLevelMap = {} +for key in hodLogLevelMap.keys(): + rehodLogLevelMap[hodLogLevelMap[key]] = key + + +reModule = re.compile("^(.*)\..*$") + +hodLogs = {} + +class hodRotatingFileHandler(logging.handlers.RotatingFileHandler): + """ This class needs to be used in place of RotatingFileHandler when + the 2.4.0 Python interpreter is used.""" + + def emit(self, record): + """ + Emit a record. + + If a formatter is specified, it is used to format the record. + The record is then written to the stream with a trailing newline + [N.B. this may be removed depending on feedback]. If exception + information is present, it is formatted using + traceback.print_exception and appended to the stream. + + ***** + + THIS IS A HACK, when instances of hodLogger get passed to the child of + a child thread for some reason self.stream gets closed. This version + of emit re-opens self.stream if it is closed. After testing it appears + that self.stream is only closed once after the second thread is + initialized so there is not performance penalty to this hack. This + problem only exists in python 2.4. + + ***** + """ + try: + if self.shouldRollover(record): + self.doRollover() + try: + msg = self.format(record) + fs = "%s\n" + if not hasattr(types, "UnicodeType"): #if no unicode support... + self.stream.write(fs % msg) + else: + try: + self.stream.write(fs % msg) + except UnicodeError: + self.stream.write(fs % msg.encode("UTF-8")) + except ValueError: + self.stream = open(self.baseFilename, self.mode) + self.stream.write(fs % msg) + + self.flush() + except: + self.handleError(record) + except: + self.handleError(record) + + def shouldRollover(self, record): + """ + Determine if rollover should occur. + + Basically, see if the supplied record would cause the file to exceed + the size limit we have. + + ***** + + THIS IS A HACK, when instances of hodLogger get passed to the child of + a child thread for some reason self.stream gets closed. This version + of emit re-opens self.stream if it is closed. After testing it appears + that self.stream is only closed once after the second thread is + initialized so there is not performance penalty to this hack. This + problem only exists in python 2.4. + + ***** + """ + if self.maxBytes > 0: # are we rolling over? + msg = "%s\n" % self.format(record) + + try: + #due to non-posix-compliant Windows feature + self.stream.seek(0, 2) + except ValueError: + self.stream = open(self.baseFilename, self.mode) + self.stream.seek(0, 2) + + if self.stream.tell() + len(msg) >= self.maxBytes: + return 1 + return 0 + +class hodCustomLoggingLogger(logging.Logger): + """ Slight extension of the logging.Logger class used by the hodLog class. + """ + def findCaller(self): + """ findCaller() is supposed to return the callers file name and line + number of the caller. This was broken when the logging package was + wrapped by hodLog. We should return much more relevant info now. + """ + + callerModule = '' + callerLine = 0 + + currentModule = os.path.basename(__file__) + currentModule = reModule.sub("\g<1>", currentModule) + + frames = inspect.stack() + for i in range(len(frames)): + frameModule = os.path.basename(frames[i][1]) + frameModule = reModule.sub("\g<1>", frameModule) + if frameModule == currentModule: + previousFrameModule = os.path.basename(frames[i+1][1]) + previousFrameModule = reModule.sub("\g<1>", + previousFrameModule) + callerFile = frames[i+1][1] + callerLine = frames[i+1][2] + continue + + returnValues = (callerFile, callerLine) + if sys.version.startswith('2.4.4') or sys.version.startswith('2.5'): + returnValues = (callerFile, callerLine, None) + + return returnValues + +class hodLog: + """ Cluster management logging class. + + logging levels: 0 - log only critical messages + 1 - log critical and error messages + 2 - log critical, error, and warning messages + 3 - log critical, error, warning, and info messages + 4 - log critical, error, warning, info, and debug + messages""" + + def __init__(self, appName): + """Constructs a hodLogger object. + + appName - name of logging application, log filenames will be + prepended with this name""" + + self.__appName = appName + + # initialize a dictionary to hold loggerNames + self.__loggerNames = {} + + # initialize a dictionary to track log handlers and handler classes + self.__logObjs = { 'file' : {}, 'smtp' : {}, + 'syslog' : {}, 'strm' : {} } + + # use a custom logging.Logger class + logging.setLoggerClass(hodCustomLoggingLogger) + + # get the root app logger + self.__logger = logging.getLogger(appName) + self.__logger.setLevel(logging.DEBUG) + + hodLogs[self.__appName] = self + + def __attr__(self, attrname): + """loggerNames - list of defined logger names""" + + if attrname == "loggerNames": return self.__loggerNames.keys() + else: raise AttributeError, attrname + + def __repr__(self): + """Returns a string representation of a hodLog object of the form: + + LOG_NAME + file: FILENAME (level LEVEL) + smtp: SMTP_SERVER from FROM_ADDRESS (level LEVEL) + strm: STRM_OBJECT (level LEVEL) + ... """ + + hodLogString = "hodLog: %s\n\n" % self.__appName + for loggerName in self.__loggerNames.keys(): + hodLogString = "%s logger: %s\n" % (hodLogString, loggerName) + handlerClasses = self.__logObjs.keys() + handlerClasses.sort() + for handlerClass in handlerClasses: + try: + loggerLevelName = logging.getLevelName( + self.__logObjs[handlerClass][loggerName]['level']) + hodLogString = "%s %s: %s (level %s)\n" % ( + hodLogString, handlerClass, + self.__logObjs[handlerClass][loggerName]['data'], + loggerLevelName) + except: + hodLogString = "%s %s: none\n" % ( + hodLogString, handlerClass) + hodLogString = "%s\n" % hodLogString + + return hodLogString + + # 'private' method which adds handlers to self.__logObjs + def __add_to_handlers(self, handlerClass, loggerName, handler, data, + level): + self.__logObjs[handlerClass][loggerName] = {} + self.__logObjs[handlerClass][loggerName]['handler'] = handler + self.__logObjs[handlerClass][loggerName]['data'] = data + self.__logObjs[handlerClass][loggerName]['level'] = level + + # 'private' method which determines whether a hod log level is valid and + # returns a valid logging.Logger level + def __get_logging_level(self, level, defaultLevel): + loggingLevel = '' + try: + loggingLevel = hodLogLevelMap[int(level)] + except: + loggingLevel = hodLogLevelMap[defaultLevel] + + return loggingLevel + + # make a logging.logger name rootLogger.childLogger in our case the + # appName.componentName + def __get_logging_logger_name(self, loggerName): + return "%s.%s" % (self.__appName, loggerName) + + def add_logger(self, loggerName): + """Adds a logger of name loggerName. + + loggerName - name of component of a given application doing the + logging + + Returns a hodLogger object for the just added logger.""" + + try: + self.__loggerNames[loggerName] + except: + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logging.getLogger(loggingLoggerName) + + self.__loggerNames[loggerName] = 1 + + return hodLogger(self.__appName, loggingLoggerName) + + def add_file(self, logDirectory, maxBytes=0, backupCount=0, + level=defaultFileLevel, addToLoggerNames=None): + """Adds a file handler to all defined loggers or a specified set of + loggers. Each log file will be located in logDirectory and have a + name of the form appName-loggerName.log. + + logDirectory - logging directory + maxBytes - maximum log size to write in bytes before rotate + backupCount - number of rotated logs to keep + level - cluster management log level + addToLoggerNames - list of logger names to which stream handling + will be added""" + + def add_file_handler(loggerName): + if not self.__logObjs['file'].has_key(loggerName): + loggingLevel = self.__get_logging_level(level, + defaultFileLevel) + + logFile = os.path.join(logDirectory, "%s-%s.log" % ( + self.__appName, loggerName)) + + if sys.version.startswith('2.4'): + fileHandler = hodRotatingFileHandler(logFile, + maxBytes=maxBytes, backupCount=backupCount) + else: + fileHandler = logging.handlers.RotatingFileHandler(logFile, + maxBytes=maxBytes, backupCount=backupCount) + + fileHandler.setLevel(loggingLevel) + fileHandler.setFormatter(fileFormater) + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + aLogger = logging.getLogger(loggingLoggerName) + aLogger.addHandler(fileHandler) + + fileData = "%s" % logFile + self.__add_to_handlers('file', loggerName, fileHandler, + fileData, loggingLevel) + + if addToLoggerNames: + for loggerName in addToLoggerNames: + add_file_handler(loggerName) + else: + for loggerName in self.__loggerNames: + add_file_handler(loggerName) + + def add_stream(self, stream=sys.stderr, level=defaultStreamLevel, + addToLoggerNames=None): + """Adds a stream handler to all defined loggers or a specified set of + loggers. + + stream - a stream such as sys.stderr or sys.stdout + level - cluster management log level + addToLoggerNames - tupple of logger names to which stream handling + will be added""" + + def add_stream_handler(loggerName): + if not self.__logObjs['strm'].has_key(loggerName): + loggingLevel = self.__get_logging_level(level, + defaultStreamLevel) + + streamHandler = logging.StreamHandler(stream) + + streamHandler.setLevel(loggingLevel) + + streamHandler.setFormatter(hodStreamFormatMap[int(level)]) + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + aLogger = logging.getLogger(loggingLoggerName) + aLogger.addHandler(streamHandler) + + streamData = "%s" % stream + self.__add_to_handlers('strm', loggerName, streamHandler, + streamData, loggingLevel) + + if addToLoggerNames: + for loggerName in addToLoggerNames: + add_stream_handler(loggerName) + else: + for loggerName in self.__loggerNames: + add_stream_handler(loggerName) + + def add_syslog(self, address, level=defaultSyslogLevel, + addToLoggerNames=None): + def add_syslog_handler(loggerName): + if not self.__logObjs['syslog'].has_key(loggerName): + loggingLevel = self.__get_logging_level(level, + defaultStreamLevel) + + address[1] = int(address[1]) + syslogHandler = logging.handlers.SysLogHandler(tuple(address), + 9) + + syslogHandler.setLevel(loggingLevel) + + syslogHandler.setFormatter(syslogFormater) + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + aLogger = logging.getLogger(loggingLoggerName) + aLogger.addHandler(syslogHandler) + + syslogData = "%s:%s" % (address[0], address[1]) + self.__add_to_handlers('syslog', loggerName, syslogHandler, + syslogData, loggingLevel) + + if addToLoggerNames: + for loggerName in addToLoggerNames: + add_syslog_handler(loggerName) + else: + for loggerName in self.__loggerNames: + add_syslog_handler(loggerName) + + + def add_smtp(self, mailHost, fromAddress, toAddresses, + level=defaultSmtpLevel, addToLoggerNames=None): + """Adds an SMTP handler to all defined loggers or a specified set of + loggers. + + mailHost - SMTP server to used when sending mail + fromAddress - email address to use as the from address when + sending mail + toAdresses - comma seperated list of email address to which + mail will be sent + level - cluster management log level + addToLoggerNames - tupple of logger names to which smtp handling + will be added""" + + def add_email_handler(loggerName): + if not self.__logObjs['smtp'].has_key(loggerName): + loggingLevel = self.__get_logging_level(level, + defaultSmtpLevel) + + subject = loggerName + if loggingLevel == 50: + subject = "%s - a critical error has occured." % subject + elif loggingLevel == 40: + subject = "%s - an error has occured." % subject + elif loggingLevel == 30: + subject = "%s - warning message." % subject + elif loggingLevel == 20: + subject = "%s - information message." % subject + elif loggingLevel == 10: + subject = "%s - debugging message." % subject + + mailHostTuple = get_address_tuple(mailHost) + emailHandler = logging.handlers.SMTPHandler(mailHostTuple, + fromAddress, toAddresses, subject) + + emailHandler.setFormatter(smtpFormater) + emailHandler.setLevel(loggingLevel) + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + aLogger = logging.getLogger(loggingLoggerName) + aLogger.addHandler(emailHandler) + + emailData = "%s from %s" % (mailHost, fromAddress) + self.__add_to_handlers('smtp', loggerName, emailHandler, + emailData, loggingLevel) + + if addToLoggerNames: + for loggerName in addToLoggerNames: + add_email_handler(loggerName) + else: + for loggerName in self.__loggerNames: + add_email_handler(loggerName) + + def status(self): + statusStruct = {} + for loggerName in self.__loggerNames.keys(): + statusStruct[loggerName] = [] + for handlerClass in self.__logObjs.keys(): + loggerDict = {} + try: + level = self.__logObjs[handlerClass][loggerName]['level'] + level = rehodLogLevelMap[level] + + loggerDict['handler'] = handlerClass + loggerDict['level'] = level + loggerDict['data'] = \ + self.__logObjs[handlerClass][loggerName]['data'] + except: + pass + else: + statusStruct[loggerName].append(loggerDict) + + return statusStruct + + def lock_handlers(self): + for handlerClass in self.__logObjs.keys(): + for loggerName in self.__logObjs[handlerClass].keys(): + self.__logObjs[handlerClass][loggerName]['handler'].acquire() + + def release_handlers(self): + for handlerClass in self.__logObjs.keys(): + for loggerName in self.__logObjs[handlerClass].keys(): + self.__logObjs[handlerClass][loggerName]['handler'].release() + + def get_level(self, handler, loggerName): + return rehodLogLevelMap[self.__logObjs[handler][loggerName]['level']] + + def set_level(self, handler, loggerName, level): + """Sets the logging level of a particular logger and logger handler. + + handler - handler (smtp, file, or stream) + loggerName - logger to set level on + level - level to set logger + """ + + level = self.__get_logging_level(level, defaultFileLevel) + self.__logObjs[handler][loggerName]['handler'].setLevel(level) + self.__logObjs[handler][loggerName]['level'] = level + + if handler == 'stream': + self.__logObjs[handler][loggerName]['handler'].setFormatter( + hodStreamFormatMap[int(level)]) + + def set_logger_level(self, loggerName, level): + status = 0 + for handlerClass in self.__logObjs.keys(): + if self.__logObjs[handlerClass].has_key(loggerName): + self.set_level(handlerClass, loggerName, level) + else: + status = 1 + + return status + + def rollover(self, loggerName): + status = 0 + if self.__logObjs['file'].has_key(loggerName): + if self.__logObjs['file'][loggerName]['handler'].shouldRollover(): + self.__logObjs['file'][loggerName]['handler'].doRollover() + else: + status = 1 + + return status + + def set_max_bytes(self, maxBytes): + status = 0 + if self.__logObjs.has_key('file'): + for loggerName in self.__logObjs['file'].keys(): + self.__logObjs['file'][loggerName]['handler'].maxBytes = 0 + else: + status = 1 + + return status + + def get_logger(self, loggerName): + """ Returns a hodLogger object for a logger by name. """ + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + return hodLogger(self.__appName, loggingLoggerName) + + def critical(self, loggerName, msg): + """Logs a critical message and flushes log buffers. This method really + should only be called upon a catastrophic failure. + + loggerName - logger to use + msg - message to be logged""" + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logger = logging.getLogger(loggingLoggerName) + logger.critical(msg) + self.flush() + + def error(self, loggerName, msg): + """Logs an error message and flushes log buffers. + + loggerName - logger to use + msg - message to be logged""" + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logger = logging.getLogger(loggingLoggerName) + logger.error(msg) + self.flush() + + def warn(self, loggerName, msg): + """Logs a warning message. + + loggerName - logger to use + msg - message to be logged""" + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logger = logging.getLogger(loggingLoggerName) + logger.warn(msg) + + def info(self, loggerName, msg): + """Logs an information message. + + loggerName - logger to use + msg - message to be logged""" + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logger = logging.getLogger(loggingLoggerName) + logger.info(msg) + + def debug(self, loggerName, msg): + """Logs a debugging message. + + loggerName - logger to use + msg - message to be logged""" + + loggingLoggerName = self.__get_logging_logger_name(loggerName) + logger = logging.getLogger(loggingLoggerName) + logger.debug(msg) + + def flush(self): + """Flush all log handlers.""" + + for handlerClass in self.__logObjs.keys(): + for loggerName in self.__logObjs[handlerClass].keys(): + self.__logObjs[handlerClass][loggerName]['handler'].flush() + + def shutdown(self): + """Shutdown all logging, flushing all buffers.""" + + for handlerClass in self.__logObjs.keys(): + for loggerName in self.__logObjs[handlerClass].keys(): + self.__logObjs[handlerClass][loggerName]['handler'].flush() + # Causes famous 'ValueError: I/O operation on closed file' + # self.__logObjs[handlerClass][loggerName]['handler'].close() + +class hodLogger: + """ Encapsulates a particular logger from a hodLog object. """ + def __init__(self, appName, loggingLoggerName): + """Constructs a hodLogger object (a particular logger in a hodLog + object). + + loggingLoggerName - name of a logger in hodLog object""" + + self.__appName = appName + self.__loggerName = loggingLoggerName + self.__logger = logging.getLogger(self.__loggerName) + + def __repr__(self): + """Returns a string representation of a hodComponentLog object.""" + + return "%s hodLog" % self.__loggerName + + def __call__(self): + pass + + def set_logger_level(self, loggerName, level): + + return hodLogs[self.__appName].set_logger_level(loggerName, level) + + def set_max_bytes(self, maxBytes): + + return hodLogs[self.__appName].set_max_bytes(maxBytes) + + def rollover(self): + return hodLogs[self.__appName].rollover(self.__loggerName) + + def get_level(self, handler, loggerName): + + return hodLogs[self.__appName].get_level(handler, loggerName) + + def critical(self, msg): + """Logs a critical message and calls sys.exit(1). + + msg - message to be logged""" + + self.__logger.critical(msg) + + def error(self, msg): + """Logs an error message. + + msg - message to be logged""" + + self.__logger.error(msg) + + def warn(self, msg): + """Logs a warning message. + + msg - message to be logged""" + + self.__logger.warn(msg) + + def info(self, msg): + """Logs an information message. + + msg - message to be logged""" + + self.__logger.info(msg) + + def debug(self, msg): + """Logs a debugging message. + + msg - message to be logged""" + + self.__logger.debug(msg) + +class hodDummyLogger: + """ Dummy hodLogger class. Other hod classes requiring a hodLogger default + to this hodLogger if no logger is passed.""" + + def __init__(self): + """pass""" + + pass + + def __repr__(self): + return "dummy hodLogger" + + def __call__(self): + """pass""" + + pass + + def set_logger_level(self, loggerName, level): + + return 0 + + def set_max_bytes(self, loggerName, maxBytes): + + return 0 + + def get_level(self, handler, loggerName): + + return 4 + + def rollover(self): + + return 0 + + def critical(self, msg): + """pass""" + + pass + + def error(self, msg): + """pass""" + + pass + + def warn(self, msg): + """pass""" + + pass + + def info(self, msg): + """pass""" + + pass + + def debug(self, msg): + """pass""" + + pass + Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/logger.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,45 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +import urllib, urlparse, re + +from HTMLParser import HTMLParser + +class miniHTMLParser( HTMLParser ): + + viewedQueue = [] + instQueue = [] + + def setBaseUrl(self, url): + self.baseUrl = url + + def getNextLink( self ): + if self.instQueue == []: + return None + else: + return self.instQueue.pop(0) + + def handle_starttag( self, tag, attrs ): + if tag == 'a': + newstr = urlparse.urljoin(self.baseUrl, str(attrs[0][1])) + if re.search('mailto', newstr) != None: + return + + if (newstr in self.viewedQueue) == False: + self.instQueue.append( newstr ) + self.viewedQueue.append( newstr ) + + + Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/miniHTMLParser.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL Added: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py?rev=608950&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py (added) +++ lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py Fri Jan 4 10:20:17 2008 @@ -0,0 +1,26 @@ +#Licensed to the Apache Software Foundation (ASF) under one +#or more contributor license agreements. See the NOTICE file +#distributed with this work for additional information +#regarding copyright ownership. The ASF licenses this file +#to you under the Apache License, Version 2.0 (the +#"License"); you may not use this file except in compliance +#with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. +from hodlib.NodePools.torque import TorquePool + +class NodePoolUtil: + def getNodePool(nodePoolDesc, cfg, log): + """returns a concrete instance of NodePool as configured by 'cfg'""" + npd = nodePoolDesc + name = npd.getName() + if name == 'torque': + return TorquePool(npd, cfg, log) + + getNodePool = staticmethod(getNodePool) Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py ------------------------------------------------------------------------------ svn:executable = * Propchange: lucene/hadoop/trunk/src/contrib/hod/hodlib/Common/nodepoolutil.py ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL