incubator-allura-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tvansteenbu...@apache.org
Subject [01/32] PEP8 cleanup
Date Fri, 10 Jan 2014 18:19:25 GMT
Updated Branches:
  refs/heads/master 37ecc5e23 -> c93733acb


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/scrub-allura-data.py
----------------------------------------------------------------------
diff --git a/scripts/scrub-allura-data.py b/scripts/scrub-allura-data.py
index 3b0918b..3864c42 100644
--- a/scripts/scrub-allura-data.py
+++ b/scripts/scrub-allura-data.py
@@ -37,7 +37,7 @@ def public(obj, project=None):
     role_anon = M.ProjectRole.by_name(name='*anonymous', project=project)
     if not role_anon:
         log.info('Missing *anonymous role for project "%s"' %
-                project.shortname)
+                 project.shortname)
         return False
     read = M.ACE.allow(role_anon._id, 'read')
     return read in obj.acl
@@ -72,7 +72,7 @@ def scrub_project(p, options):
             if ac.options.get('TicketMonitoringEmail'):
                 log.info('%s options.TicketMonitoringEmail from the %s/%s '
                          'tool on project "%s"' % (preamble, tool_name,
-                             mount_point, p.shortname))
+                                                   mount_point, p.shortname))
                 if not options.dry_run:
                     ac.options['TicketMonitoringEmail'] = None
             for tickets in utils.chunked_find(TM.Ticket, q):
@@ -86,7 +86,7 @@ def scrub_project(p, options):
             if counter > 0:
                 log.info('%s %s tickets from the %s/%s tool on '
                          'project "%s"' % (preamble, counter, tool_name,
-                             mount_point, p.shortname))
+                                           mount_point, p.shortname))
         elif tool_name == 'discussion':
             for forums in utils.chunked_find(DM.Forum, q):
                 for f in forums:
@@ -96,7 +96,7 @@ def scrub_project(p, options):
             if counter > 0:
                 log.info('%s %s forums from the %s/%s tool on '
                          'project "%s"' % (preamble, counter, tool_name,
-                             mount_point, p.shortname))
+                                           mount_point, p.shortname))
 
 
 def main(options):
@@ -121,25 +121,25 @@ def main(options):
             (preamble, M.User.query.find().count()))
     log.info('%s monitoring_email addresses from %s Forum documents' %
             (preamble, DM.Forum.query.find({"monitoring_email":
-                    {"$nin": [None, ""]}}).count()))
+                                            {"$nin": [None, ""]}}).count()))
 
     if not options.dry_run:
         M.EmailAddress.query.remove()
         M.User.query.update({}, {"$set": {"email_addresses": []}}, multi=True)
         DM.Forum.query.update({"monitoring_email": {"$nin": [None, ""]}},
-                {"$set": {"monitoring_email": None}}, multi=True)
+                              {"$set": {"monitoring_email": None}}, multi=True)
     return 0
 
 
 def parse_options():
     import argparse
     parser = argparse.ArgumentParser(
-            description='Removes private data from the Allura MongoDB.')
+        description='Removes private data from the Allura MongoDB.')
     parser.add_argument('--dry-run', dest='dry_run', default=False,
-            action='store_true',
-            help='Run in test mode (no updates will be applied).')
+                        action='store_true',
+                        help='Run in test mode (no updates will be applied).')
     parser.add_argument('--log', dest='log_level', default='INFO',
-            help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
+                        help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
     return parser.parse_args()
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/setup-scm-server.py
----------------------------------------------------------------------
diff --git a/scripts/setup-scm-server.py b/scripts/setup-scm-server.py
index 1daa2a0..050122c 100644
--- a/scripts/setup-scm-server.py
+++ b/scripts/setup-scm-server.py
@@ -22,6 +22,7 @@ from ConfigParser import ConfigParser, NoOptionError
 
 config = ConfigParser()
 
+
 def main():
     config.read('.setup-scm-cache')
     if not config.has_section('scm'):
@@ -45,15 +46,18 @@ def get_value(key, default):
         value = config.get('scm', key)
     except NoOptionError:
         value = raw_input('%s? [%s]' % key, default)
-        if not value: value = default
+        if not value:
+            value = default
         config.set('scm', key, value)
     return value
 
+
 def run(command):
     rc = os.system(command)
     assert rc == 0
     return rc
 
+
 def add_ldif(template, **values):
     fd, name = mkstemp()
     os.write(fd, template.substitute(values))
@@ -61,7 +65,7 @@ def add_ldif(template, **values):
     run('ldapadd -Y EXTERNAL -H ldapi:/// -f %s' % name)
     os.remove(name)
 
-backend_ldif=string.Template('''
+backend_ldif = string.Template('''
 # Load dynamic backend modules
 dn: cn=module,cn=config
 objectClass: olcModuleList
@@ -92,7 +96,7 @@ olcAccess: to * by dn="cn=admin,$domain" write by * read
 
 ''')
 
-frontend_ldif=string.Template('''
+frontend_ldif = string.Template('''
 # Create top-level object in domain
 dn: $domain
 objectClass: top

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/teamforge-import.py
----------------------------------------------------------------------
diff --git a/scripts/teamforge-import.py b/scripts/teamforge-import.py
index f86b063..9f601a0 100644
--- a/scripts/teamforge-import.py
+++ b/scripts/teamforge-import.py
@@ -53,20 +53,21 @@ http://www.open.collab.net/nonav/community/cif/csfe/50/javadoc/index.html?com/co
 '''
 
 options = None
-s = None # security token
-client = None # main api client
+s = None  # security token
+client = None  # main api client
 users = {}
 
 cj = CookieJar()
 loggedInOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
 
+
 def make_client(api_url, app):
     return Client(api_url + app + '?wsdl', location=api_url + app)
 
 
 def main():
     global options, s, client, users
-    defaults=dict(
+    defaults = dict(
         api_url=None,
         attachment_url='/sf/%s/do/%s/',
         default_wiki_text='PRODUCT NAME HERE',
@@ -86,7 +87,7 @@ def main():
         config = ConfigParser()
         config.read(options.config_file)
         defaults.update(
-            (k, eval(v)) for k,v in config.items('teamforge-import'))
+            (k, eval(v)) for k, v in config.items('teamforge-import'))
         optparser = get_parser(defaults)
         options, project_ids = optparser.parse_args()
 
@@ -99,12 +100,16 @@ def main():
         client = make_client(options.api_url, 'CollabNet')
         api_v = client.service.getApiVersion()
         if not api_v.startswith('5.4.'):
-            log.warning('Unexpected API Version %s.  May not work correctly.' % api_v)
+            log.warning('Unexpected API Version %s.  May not work correctly.' %
+                        api_v)
 
-        s = client.service.login(options.username, options.password or getpass('Password: '))
+        s = client.service.login(
+            options.username, options.password or getpass('Password: '))
         teamforge_v = client.service.getVersion(s)
         if not teamforge_v.startswith('5.4.'):
-            log.warning('Unexpected TeamForge Version %s.  May not work correctly.' % teamforge_v)
+            log.warning(
+                'Unexpected TeamForge Version %s.  May not work correctly.' %
+                teamforge_v)
 
     if options.load:
         if not options.neighborhood:
@@ -114,7 +119,8 @@ def main():
             nbhd = M.Neighborhood.query.get(name=options.neighborhood)
         except:
             log.exception('error querying mongo')
-            log.error('This should be run as "paster script production.ini ../scripts/teamforge-import.py -- ...options.."')
+            log.error(
+                'This should be run as "paster script production.ini ../scripts/teamforge-import.py -- ...options.."')
             return
         assert nbhd
 
@@ -135,7 +141,8 @@ def main():
         if options.extract:
             try:
                 project = client.service.getProjectData(s, pid)
-                log.info('Project: %s %s %s' % (project.id, project.title, project.path))
+                log.info('Project: %s %s %s' %
+                         (project.id, project.title, project.path))
                 out_dir = os.path.join(options.output_dir, project.id)
                 if not os.path.exists(out_dir):
                     os.mkdir(out_dir)
@@ -159,13 +166,15 @@ def main():
             except:
                 log.exception('Error creating %s' % pid)
 
+
 def load_users():
     ''' load the users data from file, if it hasn't been already '''
     global users
     user_filename = os.path.join(options.output_dir, 'users.json')
     if not users and os.path.exists(user_filename):
         with open(user_filename) as user_file:
-            users = json.load(user_file, object_hook=Object) # Object for attribute access
+            # Object for attribute access
+            users = json.load(user_file, object_hook=Object)
 
 
 def save_user(usernames):
@@ -179,7 +188,8 @@ def save_user(usernames):
             user_data = client.service.getUserData(s, username)
             users[username] = Object(user_data)
             if users[username].status != 'Active':
-                log.warn('user: %s status: %s' % (username, users[username].status))
+                log.warn('user: %s status: %s' %
+                         (username, users[username].status))
 
 
 def get_project(project):
@@ -187,7 +197,7 @@ def get_project(project):
     cats = make_client(options.api_url, 'CategorizationApp')
 
     data = client.service.getProjectData(s, project.id)
-    access_level = { 1: 'public', 4: 'private', 3: 'gated community'}[
+    access_level = {1: 'public', 4: 'private', 3: 'gated community'}[
         client.service.getProjectAccessLevel(s, project.id)
     ]
     admins = client.service.listProjectAdmins(s, project.id).dataRows
@@ -195,14 +205,14 @@ def get_project(project):
     groups = client.service.getProjectGroupList(s, project.id).dataRows
     categories = cats.service.getProjectCategories(s, project.id).dataRows
     save(json.dumps(dict(
-            data = dict(data),
-            access_level = access_level,
-            admins = map(dict, admins),
-            members = map(dict, members),
-            groups = map(dict, groups),
-            categories = map(dict, categories),
-        ), default=str),
-        project, project.id+'.json')
+        data=dict(data),
+        access_level=access_level,
+        admins=map(dict, admins),
+        members=map(dict, members),
+        groups=map(dict, groups),
+        categories=map(dict, categories),
+    ), default=str),
+        project, project.id + '.json')
 
     if len(groups):
         log.warn('Project has groups %s' % groups)
@@ -216,6 +226,7 @@ def get_project(project):
     save_user(u.userName for u in admins)
     save_user(u.userName for u in members)
 
+
 def get_user(orig_username):
     'returns an allura User object'
     sf_username = make_valid_sf_username(orig_username)
@@ -239,7 +250,7 @@ def get_user(orig_username):
                    email=user.email.lower().encode('utf-8'),
                    realname=user.fullName.encode('utf-8'),
                    status='A' if user.status == 'Active' else 'D',
-                   language=275, # english trove id
+                   language=275,  # english trove id
                    timezone=user.timeZone,
                    user_pw=''.join(random.sample(string.printable, 32)),
                    unix_pw=''.join(random.sample(string.printable, 32)),
@@ -247,11 +258,15 @@ def get_user(orig_username):
                    mail_siteupdates=0,
                    add_date=int(time.time()),
                    )
-        user_id = sqlalchemy.select([T.users.c.user_id], T.users.c.user_name==sf_username).execute().fetchone().user_id
+        user_id = sqlalchemy.select(
+            [T.users.c.user_id], T.users.c.user_name == sf_username).execute().fetchone().user_id
         npref = T.user_preferences.insert()
-        npref.execute(user_id=user_id, preference_name='country', preference_value='US')
-        npref.execute(user_id=user_id, preference_name='opt_research', preference_value=0)
-        npref.execute(user_id=user_id, preference_name='opt_thirdparty', preference_value=0)
+        npref.execute(user_id=user_id, preference_name='country',
+                      preference_value='US')
+        npref.execute(user_id=user_id,
+                      preference_name='opt_research', preference_value=0)
+        npref.execute(user_id=user_id,
+                      preference_name='opt_thirdparty', preference_value=0)
 
         new_audit = T.audit_trail_user.insert()
         new_audit.execute(
@@ -267,10 +282,11 @@ def get_user(orig_username):
     assert u
     return u
 
+
 def convert_project_shortname(teamforge_path):
     'convert from TeamForge to SF, and validate early'
     tf_shortname = teamforge_path.split('.')[-1]
-    sf_shortname = tf_shortname.replace('_','-')
+    sf_shortname = tf_shortname.replace('_', '-')
 
     # FIXME hardcoded translations
     sf_shortname = {
@@ -281,46 +297,51 @@ def convert_project_shortname(teamforge_path):
     }.get(sf_shortname, sf_shortname)
 
     if not 3 <= len(sf_shortname) <= 15:
-        raise ValueError('Project name length must be between 3 & 15, inclusive: %s (%s)' %
-                         (sf_shortname, len(sf_shortname)))
+        raise ValueError(
+            'Project name length must be between 3 & 15, inclusive: %s (%s)' %
+            (sf_shortname, len(sf_shortname)))
     return sf_shortname
 
 
 # FIXME hardcoded
 skip_perms_usernames = set([
-    'faisal_saeed','dsarkisian','debonairamit','nishanthiremath','Bhuvnesh','bluetooth','cnkurzke','makow2','jannes1','Joel_Hegberg','Farroc','brian_chen','eirikur',
-    'dmitry_flyorov','bipingm','MornayJo','ibv','b_weisshaar','k9srb','johnmmills','a_gomolitsky','filim','kapoor','ljzegers','jrukes','dwilson9','jlin','quickie',
-    'johnbell','nnikolenko','Gaetan','Giannetta','Katia','jackhan','jacobwangus','adwankar','dinobrusco','qbarnes','ilmojung','clifford_chan','nbaig','fhutchi1',
-    'rinofarina','baiyanbin','muralidhar','duanyiruo','bredding','mkolkey','manvith','nanduk','engyihan','deepsie','dabon','dino_jiang','mattrose','peter_j_wilhelm',
-    'emx2500','jmcguire','lfilimowski','guruppandit','abhilashisme','edwinhm','rabbi','ferrans','guna','kevin_robinson','adathiruthi','kochen','onehap','kalanithi',
-    'jamesn','obu001','chetanv','Avinash','HugoBoss','Han_Wei','mhooper','g16872','mfcarignano','jim_burke','kevin','arunkarra','adam_feng','pavan_scm','kostya_katz',
-    'ppazderka','eileenzhuang','pyammine','judyho','ashoykh','rdemento','ibrahim','min_wang','arvind_setlur','moorthy_karthik','daniel_nelson','dms','esnmurthy',
-    'rasa_bonyadlou','prashantjoshi','edkeating','billsaez','cambalindo','jims','bozkoyun','andry_deltsov','bpowers','manuel_milli','maryparsons','spriporov','yutianli',
-    'xiebin','tnemeth1','udayaps','zzzzuser','timberger','sbarve1','zarman','rwallace67','thangavelu_arum','yuhuaixie','tingup','sekchai','sasanplus','rupal','sebastien_hertz',
-    'sab8123','rony_lim','slava_kirillin','smwest','wendydu_yq','sco002','RonFred','spatnala','vd','Sunny','tthompson','sunijams','slaw','rodovich','zhangqingqi82','venki',
-    'yuntaom','xiaojin','walterciocosta','straus','Thomas','stupka','wangyu','yaowang','wisekb','tyler_louie','smartgarfield','shekar_mahalingam',
-    'venkata_akella','v_yellapragada','vavasthi','rpatel','zhengfang','sweetybala','vap','sergey','ymhuang','spatel78745'
+    'faisal_saeed', 'dsarkisian', 'debonairamit', 'nishanthiremath', 'Bhuvnesh', 'bluetooth', 'cnkurzke', 'makow2', 'jannes1', 'Joel_Hegberg', 'Farroc', 'brian_chen', 'eirikur',
+    'dmitry_flyorov', 'bipingm', 'MornayJo', 'ibv', 'b_weisshaar', 'k9srb', 'johnmmills', 'a_gomolitsky', 'filim', 'kapoor', 'ljzegers', 'jrukes', 'dwilson9', 'jlin', 'quickie',
+    'johnbell', 'nnikolenko', 'Gaetan', 'Giannetta', 'Katia', 'jackhan', 'jacobwangus', 'adwankar', 'dinobrusco', 'qbarnes', 'ilmojung', 'clifford_chan', 'nbaig', 'fhutchi1',
+    'rinofarina', 'baiyanbin', 'muralidhar', 'duanyiruo', 'bredding', 'mkolkey', 'manvith', 'nanduk', 'engyihan', 'deepsie', 'dabon', 'dino_jiang', 'mattrose', 'peter_j_wilhelm',
+    'emx2500', 'jmcguire', 'lfilimowski', 'guruppandit', 'abhilashisme', 'edwinhm', 'rabbi', 'ferrans', 'guna', 'kevin_robinson', 'adathiruthi', 'kochen', 'onehap', 'kalanithi',
+    'jamesn', 'obu001', 'chetanv', 'Avinash', 'HugoBoss', 'Han_Wei', 'mhooper', 'g16872', 'mfcarignano', 'jim_burke', 'kevin', 'arunkarra', 'adam_feng', 'pavan_scm', 'kostya_katz',
+    'ppazderka', 'eileenzhuang', 'pyammine', 'judyho', 'ashoykh', 'rdemento', 'ibrahim', 'min_wang', 'arvind_setlur', 'moorthy_karthik', 'daniel_nelson', 'dms', 'esnmurthy',
+    'rasa_bonyadlou', 'prashantjoshi', 'edkeating', 'billsaez', 'cambalindo', 'jims', 'bozkoyun', 'andry_deltsov', 'bpowers', 'manuel_milli', 'maryparsons', 'spriporov', 'yutianli',
+    'xiebin', 'tnemeth1', 'udayaps', 'zzzzuser', 'timberger', 'sbarve1', 'zarman', 'rwallace67', 'thangavelu_arum', 'yuhuaixie', 'tingup', 'sekchai', 'sasanplus', 'rupal', 'sebastien_hertz',
+    'sab8123', 'rony_lim', 'slava_kirillin', 'smwest', 'wendydu_yq', 'sco002', 'RonFred', 'spatnala', 'vd', 'Sunny', 'tthompson', 'sunijams', 'slaw', 'rodovich', 'zhangqingqi82', 'venki',
+    'yuntaom', 'xiaojin', 'walterciocosta', 'straus', 'Thomas', 'stupka', 'wangyu', 'yaowang', 'wisekb', 'tyler_louie', 'smartgarfield', 'shekar_mahalingam',
+    'venkata_akella', 'v_yellapragada', 'vavasthi', 'rpatel', 'zhengfang', 'sweetybala', 'vap', 'sergey', 'ymhuang', 'spatel78745'
 ])
 
+
 def create_project(pid, nbhd):
     M.session.artifact_orm_session._get().skip_mod_date = True
-    data = loadjson(pid, pid+'.json')
-    #pprint(data)
+    data = loadjson(pid, pid + '.json')
+    # pprint(data)
     log.info('Loading: %s %s %s' % (pid, data.data.title, data.data.path))
     shortname = convert_project_shortname(data.data.path)
 
-    project = M.Project.query.get(shortname=shortname, neighborhood_id=nbhd._id)
+    project = M.Project.query.get(
+        shortname=shortname, neighborhood_id=nbhd._id)
     if not project:
         private = (data.access_level == 'private')
         log.debug('Creating %s private=%s' % (shortname, private))
-        one_admin = [u.userName for u in data.admins if u.status == 'Active'][0]
+        one_admin = [
+            u.userName for u in data.admins if u.status == 'Active'][0]
         project = nbhd.register_project(shortname,
                                         get_user(one_admin),
                                         project_name=data.data.title,
                                         private_project=private)
     project.notifications_disabled = True
     project.short_description = data.data.description
-    project.last_updated = datetime.strptime(data.data.lastModifiedDate, '%Y-%m-%d %H:%M:%S')
+    project.last_updated = datetime.strptime(
+        data.data.lastModifiedDate, '%Y-%m-%d %H:%M:%S')
     M.main_orm_session.flush(project)
     # TODO: push last_updated to gutenberg?
     # TODO: try to set createdDate?
@@ -335,7 +356,7 @@ def create_project(pid, nbhd):
         user = get_user(admin.userName)
         c.user = user
         pr = M.ProjectRole.by_user(user, project=project, upsert=True)
-        pr.roles = [ role_admin._id ]
+        pr.roles = [role_admin._id]
         ThreadLocalORMSession.flush_all()
     role_developer = M.ProjectRole.by_name('Developer', project)
     for member in data.members:
@@ -346,19 +367,20 @@ def create_project(pid, nbhd):
             continue
         user = get_user(member.userName)
         pr = M.ProjectRole.by_user(user, project=project, upsert=True)
-        pr.roles = [ role_developer._id ]
+        pr.roles = [role_developer._id]
         ThreadLocalORMSession.flush_all()
-    project.labels = [cat.path.split('projects/categorization.root.')[1] for cat in data.categories]
+    project.labels = [cat.path.split('projects/categorization.root.')[1]
+                      for cat in data.categories]
     icon_file = 'emsignia-MOBILITY-red.png'
     if 'nsn' in project.labels or 'msi' in project.labels:
         icon_file = 'emsignia-SOLUTIONS-blue.gif'
     if project.icon:
         M.ProjectFile.remove(dict(project_id=project._id, category='icon'))
-    with open(os.path.join('..','scripts',icon_file)) as fp:
+    with open(os.path.join('..', 'scripts', icon_file)) as fp:
         M.ProjectFile.save_image(
             icon_file, fp, content_type=utils.guess_mime_type(icon_file),
-            square=True, thumbnail_size=(48,48),
-            thumbnail_meta=dict(project_id=project._id,category='icon'))
+            square=True, thumbnail_size=(48, 48),
+            thumbnail_meta=dict(project_id=project._id, category='icon'))
     ThreadLocalORMSession.flush_all()
 
     dirs = os.listdir(os.path.join(options.output_dir, pid))
@@ -378,11 +400,14 @@ def create_project(pid, nbhd):
     ThreadLocalORMSession.flush_all()
     return project
 
+
 def import_wiki(project, pid, nbhd):
     from forgewiki import model as WM
+
     def upload_attachments(page, pid, beginning):
         dirpath = os.path.join(options.output_dir, pid, 'wiki', beginning)
-        if not os.path.exists(dirpath): return
+        if not os.path.exists(dirpath):
+            return
         files = os.listdir(dirpath)
         for f in files:
             with open(os.path.join(options.output_dir, pid, 'wiki', beginning, f)) as fp:
@@ -437,11 +462,13 @@ def import_wiki(project, pid, nbhd):
                 page_data = loadjson(pid, 'wiki', page)
                 content = load(pid, 'wiki', markdown_file)
                 if page == 'HomePage.json':
-                    globals = WM.Globals.query.get(app_config_id=wiki_app.config._id)
+                    globals = WM.Globals.query.get(
+                        app_config_id=wiki_app.config._id)
                     if globals is not None:
                         globals.root = page_data.title
                     else:
-                        globals = WM.Globals(app_config_id=wiki_app.config._id, root=page_data.title)
+                        globals = WM.Globals(
+                            app_config_id=wiki_app.config._id, root=page_data.title)
                 p = WM.Page.upsert(page_data.title)
                 p.viewable_by = ['all']
                 p.text = wiki2markdown(content)
@@ -451,6 +478,7 @@ def import_wiki(project, pid, nbhd):
                     p.commit()
     ThreadLocalORMSession.flush_all()
 
+
 def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
     from forgediscussion import model as DM
     discuss_app = project.app_instance('discussion')
@@ -471,21 +499,25 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
         M.ACE.allow(role_admin, 'configure'),
         M.ACE.allow(role_admin, 'admin')]
     ThreadLocalORMSession.flush_all()
-    DM.Forum.query.remove(dict(app_config_id=discuss_app.config._id,shortname='general'))
+    DM.Forum.query.remove(
+        dict(app_config_id=discuss_app.config._id, shortname='general'))
     forums = os.listdir(os.path.join(options.output_dir, pid, 'forum'))
     for forum in forums:
         ending = forum[-5:]
         forum_name = forum[:-5]
         if '.json' == ending and forum_name in forums:
             forum_data = loadjson(pid, 'forum', forum)
-            fo = DM.Forum.query.get(shortname=forum_name, app_config_id=discuss_app.config._id)
+            fo = DM.Forum.query.get(
+                shortname=forum_name, app_config_id=discuss_app.config._id)
             if not fo:
-                fo = DM.Forum(app_config_id=discuss_app.config._id, shortname=forum_name)
+                fo = DM.Forum(app_config_id=discuss_app.config._id,
+                              shortname=forum_name)
             fo.name = forum_data.title
             fo.description = forum_data.description
             fo_num_topics = 0
             fo_num_posts = 0
-            topics = os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name))
+            topics = os.listdir(os.path.join(options.output_dir, pid, 'forum',
+                                forum_name))
             for topic in topics:
                 ending = topic[-5:]
                 topic_name = topic[:-5]
@@ -508,37 +540,47 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
                             discussion_id=fo._id,
                             import_id=topic_data.id,
                             app_config_id=discuss_app.config._id)
-                    to.import_id=topic_data.id
+                    to.import_id = topic_data.id
                     to_num_replies = 0
                     oldest_post = None
                     newest_post = None
-                    posts = sorted(os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name, topic_name)))
+                    posts = sorted(
+                        os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name, topic_name)))
                     for post in posts:
                         ending = post[-5:]
                         post_name = post[:-5]
                         if '.json' == ending:
                             to_num_replies += 1
-                            post_data = loadjson(pid, 'forum', forum_name, topic_name, post)
+                            post_data = loadjson(pid, 'forum',
+                                                 forum_name, topic_name, post)
                             p = DM.ForumPost.query.get(
-                                _id='%s%s@import' % (post_name,str(discuss_app.config._id)),
+                                _id='%s%s@import' % (
+                                    post_name, str(discuss_app.config._id)),
                                 thread_id=to._id,
                                 discussion_id=fo._id,
                                 app_config_id=discuss_app.config._id)
 
                             if not p:
                                 p = DM.ForumPost(
-                                    _id='%s%s@import' % (post_name,str(discuss_app.config._id)),
+                                    _id='%s%s@import' % (
+                                        post_name, str(
+                                            discuss_app.config._id)),
                                     thread_id=to._id,
                                     discussion_id=fo._id,
                                     app_config_id=discuss_app.config._id)
-                            create_date = datetime.strptime(post_data.createdDate, '%Y-%m-%d %H:%M:%S')
+                            create_date = datetime.strptime(
+                                post_data.createdDate, '%Y-%m-%d %H:%M:%S')
                             p.timestamp = create_date
-                            p.author_id = str(get_user(post_data.createdByUserName)._id)
-                            p.text = convert_post_content(frs_mapping, sf_project_shortname, post_data.content, nbhd)
+                            p.author_id = str(
+                                get_user(post_data.createdByUserName)._id)
+                            p.text = convert_post_content(
+                                frs_mapping, sf_project_shortname, post_data.content, nbhd)
                             p.status = 'ok'
                             if post_data.replyToId:
-                                p.parent_id = '%s%s@import' % (post_data.replyToId,str(discuss_app.config._id))
-                            slug, full_slug = p.make_slugs(parent = p.parent, timestamp = create_date)
+                                p.parent_id = '%s%s@import' % (
+                                    post_data.replyToId, str(discuss_app.config._id))
+                            slug, full_slug = p.make_slugs(
+                                parent=p.parent, timestamp=create_date)
                             p.slug = slug
                             p.full_slug = full_slug
                             if oldest_post == None or oldest_post.timestamp > create_date:
@@ -555,6 +597,7 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
             fo.num_posts = fo_num_posts
             ThreadLocalORMSession.flush_all()
 
+
 def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
     from forgeblog import model as BM
     posts = os.listdir(os.path.join(options.output_dir, pid, 'news'))
@@ -567,7 +610,8 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
         for post in posts:
             if '.json' == post[-5:]:
                 post_data = loadjson(pid, 'news', post)
-                create_date = datetime.strptime(post_data.createdOn, '%Y-%m-%d %H:%M:%S')
+                create_date = datetime.strptime(
+                    post_data.createdOn, '%Y-%m-%d %H:%M:%S')
                 p = BM.BlogPost.query.get(title=post_data.title,
                                           timestamp=create_date,
                                           app_config_id=news_app.config._id)
@@ -575,7 +619,8 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
                     p = BM.BlogPost(title=post_data.title,
                                     timestamp=create_date,
                                     app_config_id=news_app.config._id)
-                p.text = convert_post_content(frs_mapping, sf_project_shortname, post_data.body, nbhd)
+                p.text = convert_post_content(
+                    frs_mapping, sf_project_shortname, post_data.body, nbhd)
                 p.mod_date = create_date
                 p.state = 'published'
                 if not p.slug:
@@ -584,15 +629,16 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
                     p.commit()
                     ThreadLocalORMSession.flush_all()
                     M.Thread.new(discussion_id=p.app_config.discussion_id,
-                           ref_id=p.index_id(),
-                           subject='%s discussion' % p.title)
+                                 ref_id=p.index_id(),
+                                 subject='%s discussion' % p.title)
                 user = get_user(post_data.createdByUsername)
-                p.history().first().author=dict(
+                p.history().first().author = dict(
                     id=user._id,
                     username=user.username,
                     display_name=user.get_pref('display_name'))
                 ThreadLocalORMSession.flush_all()
 
+
 def check_unsupported_tools(project):
     docs = make_client(options.api_url, 'DocumentApp')
     doc_count = 0
@@ -601,21 +647,28 @@ def check_unsupported_tools(project):
             continue
         doc_count += 1
     if doc_count:
-        log.warn('Migrating documents is not supported, but found %s docs' % doc_count)
+        log.warn('Migrating documents is not supported, but found %s docs' %
+                 doc_count)
 
     scm = make_client(options.api_url, 'ScmApp')
     for repo in scm.service.getRepositoryList(s, project.id).dataRows:
-        log.warn('Migrating SCM repos is not supported, but found %s' % repo.repositoryPath)
+        log.warn('Migrating SCM repos is not supported, but found %s' %
+                 repo.repositoryPath)
 
     tasks = make_client(options.api_url, 'TaskApp')
-    task_count = len(tasks.service.getTaskList(s, project.id, filters=None).dataRows)
+    task_count = len(
+        tasks.service.getTaskList(s, project.id, filters=None).dataRows)
     if task_count:
-        log.warn('Migrating tasks is not supported, but found %s tasks' % task_count)
+        log.warn('Migrating tasks is not supported, but found %s tasks' %
+                 task_count)
 
     tracker = make_client(options.api_url, 'TrackerApp')
-    tracker_count = len(tracker.service.getArtifactList(s, project.id, filters=None).dataRows)
+    tracker_count = len(
+        tracker.service.getArtifactList(s, project.id, filters=None).dataRows)
     if tracker_count:
-        log.warn('Migrating trackers is not supported, but found %s tracker artifacts' % task_count)
+        log.warn(
+            'Migrating trackers is not supported, but found %s tracker artifacts' %
+            task_count)
 
 
 def load(project_id, *paths):
@@ -624,10 +677,12 @@ def load(project_id, *paths):
         content = input.read()
     return unicode(content, 'utf-8')
 
+
 def loadjson(*args):
     # Object for attribute access
     return json.loads(load(*args), object_hook=Object)
 
+
 def save(content, project, *paths):
     out_file = os.path.join(options.output_dir, project.id, *paths)
     if not os.path.exists(os.path.dirname(out_file)):
@@ -635,6 +690,7 @@ def save(content, project, *paths):
     with open(out_file, 'w') as out:
         out.write(content.encode('utf-8'))
 
+
 def download_file(tool, url_path, *filepaths):
     if tool == 'wiki':
         action = 'viewAttachment'
@@ -676,6 +732,8 @@ h1 = re.compile(r'^!!!', re.MULTILINE)
 h2 = re.compile(r'^!!', re.MULTILINE)
 h3 = re.compile(r'^!', re.MULTILINE)
 re_stats = re.compile(r'#+ .* [Ss]tatistics\n+(.*\[sf:.*?Statistics\].*)+')
+
+
 def wiki2markdown(markup):
     '''
     Partial implementation of http://help.collab.net/index.jsp?topic=/teamforge520/reference/wiki-wikisyntax.html
@@ -707,6 +765,8 @@ def wiki2markdown(markup):
     return markup
 
 re_rel = re.compile(r'\b(rel\d+)\b')
+
+
 def convert_post_content(frs_mapping, sf_project_shortname, text, nbhd):
     def rel_handler(matchobj):
         relno = matchobj.group(1)
@@ -728,6 +788,7 @@ def find_image_references(markup):
         if ext in ('jpg', 'gif', 'png'):
             yield snippet
 
+
 def get_news(project):
     '''
     Extracts news posts
@@ -737,9 +798,11 @@ def get_news(project):
     # find the forums
     posts = app.service.getNewsPostList(s, project.id)
     for post in posts.dataRows:
-        save(json.dumps(dict(post), default=str), project, 'news', post.id+'.json')
+        save(json.dumps(dict(post), default=str),
+             project, 'news', post.id + '.json')
         save_user(post.createdByUsername)
 
+
 def get_discussion(project):
     '''
     Extracts discussion forums and posts
@@ -751,15 +814,18 @@ def get_discussion(project):
     for forum in forums.dataRows:
         forumname = forum.path.split('.')[-1]
         log.info('Retrieving data for forum: %s' % forumname)
-        save(json.dumps(dict(forum), default=str), project, 'forum', forumname+'.json')
+        save(json.dumps(dict(forum), default=str), project, 'forum',
+             forumname + '.json')
         # topic in this forum
         topics = app.service.getTopicList(s, forum.id)
         for topic in topics.dataRows:
-            save(json.dumps(dict(topic), default=str), project, 'forum', forumname, topic.id+'.json')
+            save(json.dumps(dict(topic), default=str), project, 'forum',
+                 forumname, topic.id + '.json')
             # posts in this topic
             posts = app.service.getPostList(s, topic.id)
             for post in posts.dataRows:
-                save(json.dumps(dict(post), default=str), project, 'forum', forumname, topic.id, post.id+'.json')
+                save(json.dumps(dict(post), default=str), project, 'forum',
+                     forumname, topic.id, post.id + '.json')
                 save_user(post.createdByUserName)
 
 
@@ -774,7 +840,8 @@ def get_homepage_wiki(project):
     for wiki_page in wiki_pages.dataRows:
         wiki_page = wiki.service.getWikiPageData(s, wiki_page.id)
         pagename = wiki_page.path.split('/')[-1]
-        save(json.dumps(dict(wiki_page), default=str), project, 'wiki', pagename+'.json')
+        save(json.dumps(dict(wiki_page), default=str),
+             project, 'wiki', pagename + '.json')
         if not wiki_page.wikiText:
             log.debug('skip blank wiki page %s' % wiki_page.path)
             continue
@@ -802,20 +869,23 @@ def get_homepage_wiki(project):
                 img_url = img_ref
             else:
                 img_url = project.path + '/wiki/' + img_ref
-            download_file('wiki', img_url, project.id, 'wiki', 'homepage', filename)
+            download_file('wiki', img_url, project.id,
+                          'wiki', 'homepage', filename)
 
     for path, text in pages.iteritems():
         if options.default_wiki_text in text:
             log.debug('skipping default wiki page %s' % path)
         else:
-            save(text, project, 'wiki', path+'.markdown')
+            save(text, project, 'wiki', path + '.markdown')
             for img_ref in find_image_references(text):
                 filename = img_ref.split('/')[-1]
                 if '://' in img_ref:
                     img_url = img_ref
                 else:
                     img_url = project.path + '/wiki/' + img_ref
-                download_file('wiki', img_url, project.id, 'wiki', path, filename)
+                download_file('wiki', img_url, project.id,
+                              'wiki', path, filename)
+
 
 def _dir_sql(created_on, project, dir_name, rel_path):
     assert options.neighborhood_shortname
@@ -834,35 +904,40 @@ def _dir_sql(created_on, project, dir_name, rel_path):
     """ % (created_on, convert_project_shortname(project.path), options.neighborhood_shortname, dir_name, parent_directory)
     return sql
 
+
 def get_files(project):
     frs = make_client(options.api_url, 'FrsApp')
-    valid_pfs_filename = re.compile(r'(?![. ])[-_ +.,=#~@!()\[\]a-zA-Z0-9]+(?<! )$')
-    pfs_output_dir = os.path.join(os.path.abspath(options.output_dir), 'PFS', convert_project_shortname(project.path))
+    valid_pfs_filename = re.compile(
+        r'(?![. ])[-_ +.,=#~@!()\[\]a-zA-Z0-9]+(?<! )$')
+    pfs_output_dir = os.path.join(
+        os.path.abspath(options.output_dir), 'PFS', convert_project_shortname(project.path))
     sql_updates = ''
 
     def handle_path(obj, prev_path):
-        path_component = obj.title.strip().replace('/', ' ').replace('&','').replace(':','')
+        path_component = obj.title.strip().replace(
+            '/', ' ').replace('&', '').replace(':', '')
         path = os.path.join(prev_path, path_component)
         if not valid_pfs_filename.match(path_component):
             log.error('Invalid filename: "%s"' % path)
         save(json.dumps(dict(obj), default=str),
-            project, 'frs', path+'.json')
+             project, 'frs', path + '.json')
         return path
 
     frs_mapping = {}
 
     for pkg in frs.service.getPackageList(s, project.id).dataRows:
         pkg_path = handle_path(pkg, '')
-        pkg_details = frs.service.getPackageData(s, pkg.id) # download count
+        pkg_details = frs.service.getPackageData(s, pkg.id)  # download count
         save(json.dumps(dict(pkg_details), default=str),
-             project, 'frs', pkg_path+'_details.json')
+             project, 'frs', pkg_path + '_details.json')
 
         for rel in frs.service.getReleaseList(s, pkg.id).dataRows:
             rel_path = handle_path(rel, pkg_path)
             frs_mapping[rel['id']] = rel_path
-            rel_details = frs.service.getReleaseData(s, rel.id) # download count
+            # download count
+            rel_details = frs.service.getReleaseData(s, rel.id)
             save(json.dumps(dict(rel_details), default=str),
-                 project, 'frs', rel_path+'_details.json')
+                 project, 'frs', rel_path + '_details.json')
 
             for file in frs.service.getFrsFileList(s, rel.id).dataRows:
                 details = frs.service.getFrsFileData(s, file.id)
@@ -875,19 +950,23 @@ def get_files(project):
                                 default=str),
                      project,
                      'frs',
-                     file_path+'.json'
+                     file_path + '.json'
                      )
                 if not options.skip_frs_download:
-                    download_file('frs', rel.path + '/' + file.id, pfs_output_dir, file_path)
+                    download_file('frs', rel.path + '/' + file.id,
+                                  pfs_output_dir, file_path)
                     mtime = int(mktime(details.lastModifiedDate.timetuple()))
-                    os.utime(os.path.join(pfs_output_dir, file_path), (mtime, mtime))
+                    os.utime(os.path.join(pfs_output_dir, file_path),
+                             (mtime, mtime))
 
             # releases
             created_on = int(mktime(rel.createdOn.timetuple()))
             mtime = int(mktime(rel.lastModifiedOn.timetuple()))
             if os.path.exists(os.path.join(pfs_output_dir, rel_path)):
-                os.utime(os.path.join(pfs_output_dir, rel_path), (mtime, mtime))
-            sql_updates += _dir_sql(created_on, project, rel.title.strip(), pkg_path)
+                os.utime(os.path.join(pfs_output_dir, rel_path),
+                         (mtime, mtime))
+            sql_updates += _dir_sql(created_on, project,
+                                    rel.title.strip(), pkg_path)
         # packages
         created_on = int(mktime(pkg.createdOn.timetuple()))
         mtime = int(mktime(pkg.lastModifiedOn.timetuple()))
@@ -922,10 +1001,10 @@ def get_parser(defaults):
     optparser.add_option(
         '--api-url', dest='api_url', help='e.g. https://hostname/ce-soap50/services/')
     optparser.add_option(
-            '--attachment-url', dest='attachment_url')
+        '--attachment-url', dest='attachment_url')
     optparser.add_option(
-            '--default-wiki-text', dest='default_wiki_text',
-            help='used in determining if a wiki page text is default or changed')
+        '--default-wiki-text', dest='default_wiki_text',
+        help='used in determining if a wiki page text is default or changed')
     optparser.add_option(
         '-u', '--username', dest='username')
     optparser.add_option(
@@ -954,24 +1033,26 @@ def get_parser(defaults):
     return optparser
 
 re_username = re.compile(r"^[a-z\-0-9]+$")
+
+
 def make_valid_sf_username(orig_username):
-    sf_username = orig_username.replace('_','-').lower()
+    sf_username = orig_username.replace('_', '-').lower()
 
     # FIXME username translation is hardcoded here:
     sf_username = dict(
-        rlevy = 'ramilevy',
-        mkeisler = 'mkeisler',
-        bthale = 'bthale',
-        mmuller = 'mattjustmull',
-        MalcolmDwyer = 'slagheap',
-        tjyang = 'tjyang',
-        manaic = 'maniac76',
-        srinid = 'cnudav',
-        es = 'est016',
-        david_peyer = 'david-mmi',
-        okruse = 'ottokruse',
-        jvp = 'jvpmoto',
-        dmorelli = 'dmorelli',
+        rlevy='ramilevy',
+        mkeisler='mkeisler',
+        bthale='bthale',
+        mmuller='mattjustmull',
+        MalcolmDwyer='slagheap',
+        tjyang='tjyang',
+        manaic='maniac76',
+        srinid='cnudav',
+        es='est016',
+        david_peyer='david-mmi',
+        okruse='ottokruse',
+        jvp='jvpmoto',
+        dmorelli='dmorelli',
     ).get(sf_username, sf_username + '-mmi')
 
     if not re_username.match(sf_username):
@@ -982,7 +1063,7 @@ def make_valid_sf_username(orig_username):
                   sf_username, adjusted_username)
         sf_username = adjusted_username
     if len(sf_username) > 15:
-        adjusted_username = sf_username[0:15-4] + '-mmi'
+        adjusted_username = sf_username[0:15 - 4] + '-mmi'
         log.error('invalid sf_username length: %s   Changing it to %s',
                   sf_username, adjusted_username)
         sf_username = adjusted_username
@@ -993,19 +1074,21 @@ if __name__ == '__main__':
     log.setLevel(logging.DEBUG)
     main()
 
+
 def test_make_valid_sf_username():
     tests = {
         # basic
-        'foo':'foo-mmi',
+        'foo': 'foo-mmi',
         # lookup
-        'rlevy':'ramilevy',
+        'rlevy': 'ramilevy',
         # too long
         'u012345678901234567890': 'u0123456789-mmi',
         'foo^213': 'foo213-mmi'
-        }
-    for k,v in tests.iteritems():
+    }
+    for k, v in tests.iteritems():
         assert make_valid_sf_username(k) == v
 
+
 def test_convert_post_content():
     nbhd = Object()
     nbhd.url_prefix = '/motorola/'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/test-branches-against-tickets.py
----------------------------------------------------------------------
diff --git a/scripts/test-branches-against-tickets.py b/scripts/test-branches-against-tickets.py
index 1905880..a742f03 100755
--- a/scripts/test-branches-against-tickets.py
+++ b/scripts/test-branches-against-tickets.py
@@ -44,12 +44,18 @@ def match_ticket_branches(target_dir=None):
 
     git('remote prune origin')
 
-    branches_for_tickets = dict() # maps ticket numbers to the actual branch e.g., int(42) -> 'origin/rc/42'
-    ticket_nums = dict() # maps ticket numbers to 'merged' or 'unmerged' according to the matching branch
-    commit_diffs = dict() # maps ticket numbers to differences in (number of) commit messages
-
-    merged_branches = [ branch[2:] for branch in git('branch -r --merged dev') if re_ticket_branch.match(branch) ]
-    unmerged_branches = [ branch[2:] for branch in git('branch -r --no-merged dev') if re_ticket_branch.match(branch) ]
+    # maps ticket numbers to the actual branch e.g., int(42) -> 'origin/rc/42'
+    branches_for_tickets = dict()
+    # maps ticket numbers to 'merged' or 'unmerged' according to the matching
+    # branch
+    ticket_nums = dict()
+    # maps ticket numbers to differences in (number of) commit messages
+    commit_diffs = dict()
+
+    merged_branches = [branch[2:]
+                       for branch in git('branch -r --merged dev') if re_ticket_branch.match(branch)]
+    unmerged_branches = [branch[2:]
+                         for branch in git('branch -r --no-merged dev') if re_ticket_branch.match(branch)]
 
     for branch in merged_branches:
         tn = int(re_ticket_branch.match(branch).group(1))
@@ -65,16 +71,18 @@ def match_ticket_branches(target_dir=None):
             ticket_nums[tn] = 'merged'
         else:
             branch_commits = git('log --oneline dev..%s' % branch)
-            # count the number of commits on dev since this branch that contain the ticket #
+            # count the number of commits on dev since this branch that contain
+            # the ticket #
             merge_base = git('merge-base', 'dev', branch)[0]
-            matching_dev_commits = git('log --oneline --grep="\[#%s\]" %s..dev' % (tn, merge_base))
+            matching_dev_commits = git(
+                'log --oneline --grep="\[#%s\]" %s..dev' % (tn, merge_base))
 
             if len(matching_dev_commits) >= len(branch_commits):
                 ticket_nums[tn] = 'merged'
             else:
                 ticket_nums[tn] = 'unmerged'
                 commit_diffs[tn] = '\t' + '\n\t'.join(['Branch has:'] + branch_commits +
-                                                 ['Dev has:'] + matching_dev_commits)
+                                                      ['Dev has:'] + matching_dev_commits)
 
     failure = False
 
@@ -82,18 +90,22 @@ def match_ticket_branches(target_dir=None):
     oauth_client = make_oauth_client()
 
     for tn in ticket_nums:
-        resp = oauth_client.request('http://sourceforge.net/rest/p/allura/tickets/%s/' % tn)
+        resp = oauth_client.request(
+            'http://sourceforge.net/rest/p/allura/tickets/%s/' % tn)
         #assert resp[0]['status'] == '200', (resp, tn)
         if resp[0]['status'] != '200':
             continue
         ticket = json.loads(resp[1])['ticket']
         if ticket is None:
             continue
-        is_closed = ticket['status'] in ('closed', 'validation', 'wont-fix', 'invalid')
+        is_closed = ticket['status'] in (
+            'closed', 'validation', 'wont-fix', 'invalid')
         is_merged = ticket_nums[tn] == 'merged'
 
         if is_closed != is_merged:
-            print('<http://sourceforge.net/p/allura/tickets/%s/> is status:"%s", but the branch "%s" is %s' % (tn, ticket['status'], branches_for_tickets[tn], ticket_nums[tn]))
+            print(
+                '<http://sourceforge.net/p/allura/tickets/%s/> is status:"%s", but the branch "%s" is %s' %
+                (tn, ticket['status'], branches_for_tickets[tn], ticket_nums[tn]))
             if tn in commit_diffs:
                 print(commit_diffs[tn])
             failure = True
@@ -117,7 +129,8 @@ def make_oauth_client():
     REQUEST_TOKEN_URL = 'http://sourceforge.net/rest/oauth/request_token'
     AUTHORIZE_URL = 'https://sourceforge.net/rest/oauth/authorize'
     ACCESS_TOKEN_URL = 'http://sourceforge.net/rest/oauth/access_token'
-    oauth_key = option('re', 'oauth_key', 'Forge API OAuth Key (https://sourceforge.net/auth/oauth/): ')
+    oauth_key = option('re', 'oauth_key',
+                       'Forge API OAuth Key (https://sourceforge.net/auth/oauth/): ')
     oauth_secret = option('re', 'oauth_secret', 'Forge API Oauth Secret: ')
     consumer = oauth.Consumer(oauth_key, oauth_secret)
 
@@ -130,7 +143,8 @@ def make_oauth_client():
         assert resp['status'] == '200', resp
 
         request_token = dict(urlparse.parse_qsl(content))
-        pin_url = "%s?oauth_token=%s" % (AUTHORIZE_URL, request_token['oauth_token'])
+        pin_url = "%s?oauth_token=%s" % (
+            AUTHORIZE_URL, request_token['oauth_token'])
         if getattr(webbrowser.get(), 'name', '') == 'links':
             # sandboxes
             print("Go to %s" % pin_url)
@@ -138,7 +152,8 @@ def make_oauth_client():
             webbrowser.open(pin_url)
         oauth_verifier = raw_input('What is the PIN? ')
 
-        token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
+        token = oauth.Token(
+            request_token['oauth_token'], request_token['oauth_token_secret'])
         token.set_verifier(oauth_verifier)
         client = oauth.Client(consumer, token)
         resp, content = client.request(ACCESS_TOKEN_URL, "GET")
@@ -154,17 +169,18 @@ def make_oauth_client():
 
 
 def git(*args, **kw):
-    if len(args)==1 and isinstance(args[0], basestring):
+    if len(args) == 1 and isinstance(args[0], basestring):
         argv = shlex.split(args[0])
     else:
         argv = list(args)
     if argv[0] != 'git':
         argv.insert(0, 'git')
-    p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    p = subprocess.Popen(argv, stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
     p.wait()
     output = p.stdout.readlines()
     if kw.get('strip_eol', True):
-        output = [ line.rstrip('\n') for line in output ]
+        output = [line.rstrip('\n') for line in output]
     return output
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/tracker-rip.py
----------------------------------------------------------------------
diff --git a/scripts/tracker-rip.py b/scripts/tracker-rip.py
index 719aaa6..aeef1eb 100755
--- a/scripts/tracker-rip.py
+++ b/scripts/tracker-rip.py
@@ -23,13 +23,13 @@ from urlparse import urljoin
 
 from allura.lib import rest_api
 
-SRC_CRED=dict(
-        api_key='c03efc6cca1cf78be9e9',
-        secret_key='575eda2f25f6490d8cfe5d02f2506c010112894d0ea10660e43157a87a7e620c61ac06397b028af1',
-        http_username=raw_input('LDAP username: '),
-        http_password=getpass.getpass('LDAP password: '))
-SRC_SERVER='https://newforge.sf.geek.net/'
-SRC_TOOL='/rest/p/forge/tickets/'
+SRC_CRED = dict(
+    api_key='c03efc6cca1cf78be9e9',
+    secret_key='575eda2f25f6490d8cfe5d02f2506c010112894d0ea10660e43157a87a7e620c61ac06397b028af1',
+    http_username=raw_input('LDAP username: '),
+    http_password=getpass.getpass('LDAP password: '))
+SRC_SERVER = 'https://newforge.sf.geek.net/'
+SRC_TOOL = '/rest/p/forge/tickets/'
 
 # Credentials for sf-overlords
 # DST_CRED=dict(
@@ -37,17 +37,17 @@ SRC_TOOL='/rest/p/forge/tickets/'
 #     secret_key='fcc48a0c31459e99a88cc42cdd7f908fad78b283ca30a86caac1ab65036ff71fc195a18e56534dc5')
 # DST_SERVER='http://sourceforge.net/'
 # DST_TOOL='/rest/p/allura/tickets/'
-DST_CRED=dict(
+DST_CRED = dict(
     api_key='aa7244645424513d9636',
     secret_key='cd1d97be98497f7b615b297aa2061177ddf6d42b95a8484193f84690486694234dbf817efc3b2d6e')
-DST_SERVER='http://localhost:8080/'
-DST_TOOL='/rest/p/test/bugs/'
+DST_SERVER = 'http://localhost:8080/'
+DST_TOOL = '/rest/p/test/bugs/'
 
-FAKE_TICKET={
+FAKE_TICKET = {
     u'created_date': u'2010-03-08 17:29:42.802000',
     u'assigned_to_id': u'',
     u'assigned_to': u'',
-    u'custom_fields': {'_component':'', '_size':0, '_priority':'', '_type':''},
+    u'custom_fields': {'_component': '', '_size': 0, '_priority': '', '_type': ''},
     u'description': u'Ticket was not present in source',
     u'milestone': u'',
     u'reported_by': u'',
@@ -57,6 +57,7 @@ FAKE_TICKET={
     u'summary': u'Placeholder ticket',
     u'super_id': u'None'}
 
+
 def main():
     src_cli = rest_api.RestClient(
         base_uri=SRC_SERVER,
@@ -81,6 +82,7 @@ def main():
             print '... migrate post %s:\n%r' % (post['slug'], post['text'])
             dst.create_post(dst_thread, post, slug_map)
 
+
 class TicketAPI(object):
 
     def __init__(self, client, path):
@@ -95,29 +97,34 @@ class TicketAPI(object):
         cur_ticket = min_ticket
         while True:
             if check and cur_ticket not in valid_tickets:
-                if cur_ticket > max_valid_ticket: break
+                if cur_ticket > max_valid_ticket:
+                    break
                 yield dict(FAKE_TICKET, ticket_num=cur_ticket)
                 cur_ticket += 1
                 continue
-            ticket = self.client.request('GET', self.ticket_path(cur_ticket))['ticket']
-            if ticket is None: break
+            ticket = self.client.request(
+                'GET', self.ticket_path(cur_ticket))['ticket']
+            if ticket is None:
+                break
             yield ticket
             cur_ticket += 1
-            if max_ticket and cur_ticket > max_ticket: break
+            if max_ticket and cur_ticket > max_ticket:
+                break
 
     def load_thread(self, ticket):
-        discussion = self.client.request('GET', self.discussion_path())['discussion']
+        discussion = self.client.request(
+            'GET', self.discussion_path())['discussion']
         for thd in discussion['threads']:
             if thd['subject'].startswith('#%d ' % ticket['ticket_num']):
                 break
         else:
             return None
         thread = self.client.request(
-            'GET',self.thread_path(thd['_id']))['thread']
+            'GET', self.thread_path(thd['_id']))['thread']
         return thread
 
     def iter_posts(self, thread):
-        for p in sorted(thread['posts'], key=lambda p:p['slug']):
+        for p in sorted(thread['posts'], key=lambda p: p['slug']):
             post = self.client.request(
                 'GET', self.post_path(thread['_id'], p['slug']))['post']
             yield post
@@ -140,7 +147,8 @@ class TicketAPI(object):
             ticket['milestone'] = ''
         if ticket['status'] not in 'open in-progress code-review validation closed'.split():
             ticket['status'] = 'open'
-        r = self.client.request('POST', self.new_ticket_path(), ticket_form=ticket)
+        r = self.client.request(
+            'POST', self.new_ticket_path(), ticket_form=ticket)
         self.client.request(
             'POST', self.ticket_path(r['ticket']['ticket_num'], 'save'),
             ticket_form=ticket)
@@ -175,17 +183,20 @@ class TicketAPI(object):
     def post_path(self, thread_id, post_slug, suffix=''):
         return '%s_discuss/thread/%s/%s/%s' % (self.path, thread_id, post_slug, suffix)
 
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
+
+def pm(etype, value, tb):  # pragma no cover
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-copy.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-copy.py b/scripts/wiki-copy.py
index 35e0077..0692852 100644
--- a/scripts/wiki-copy.py
+++ b/scripts/wiki-copy.py
@@ -35,7 +35,8 @@ def main():
                   help='URL of wiki API to copy from like http://fromserver.com/rest/p/test/wiki/')
     op.add_option('-t', '--to-wiki', action='store', dest='to_wiki',
                   help='URL of wiki API to copy to like http://toserver.com/rest/p/test/wiki/')
-    op.add_option('-D', '--debug', action='store_true', dest='debug', default=False)
+    op.add_option('-D', '--debug', action='store_true',
+                  dest='debug', default=False)
     (options, args) = op.parse_args(sys.argv[1:])
 
     base_url = options.to_wiki.split('/rest/')[0]
@@ -44,15 +45,16 @@ def main():
     wiki_data = urllib.urlopen(options.from_wiki).read()
     wiki_json = json.loads(wiki_data)['pages']
     for p in wiki_json:
-        from_url = options.from_wiki+urllib.quote(p)
-        to_url = options.to_wiki+urllib.quote(p)
+        from_url = options.from_wiki + urllib.quote(p)
+        to_url = options.to_wiki + urllib.quote(p)
         try:
             page_data = urllib.urlopen(from_url).read()
             page_json = json.loads(page_data)
             if options.debug:
                 print page_json['text']
                 break
-            resp = oauth_client.request(to_url, 'POST', body=urllib.urlencode(dict(text=page_json['text'].encode('utf-8'))))
+            resp = oauth_client.request(
+                to_url, 'POST', body=urllib.urlencode(dict(text=page_json['text'].encode('utf-8'))))
             if resp[0]['status'] == '200':
                 print "Posted {0} to {1}".format(page_json['title'], to_url)
             else:
@@ -71,11 +73,13 @@ def make_oauth_client(base_url):
     cp = ConfigParser()
     cp.read(config_file)
 
-    REQUEST_TOKEN_URL = base_url+'/rest/oauth/request_token'
-    AUTHORIZE_URL = base_url+'/rest/oauth/authorize'
-    ACCESS_TOKEN_URL = base_url+'/rest/oauth/access_token'
-    oauth_key = option(cp, base_url, 'oauth_key', 'Forge API OAuth Key (%s/auth/oauth/): ' % base_url)
-    oauth_secret = option(cp, base_url, 'oauth_secret', 'Forge API Oauth Secret: ')
+    REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token'
+    AUTHORIZE_URL = base_url + '/rest/oauth/authorize'
+    ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token'
+    oauth_key = option(cp, base_url, 'oauth_key',
+                       'Forge API OAuth Key (%s/auth/oauth/): ' % base_url)
+    oauth_secret = option(cp, base_url, 'oauth_secret',
+                          'Forge API Oauth Secret: ')
     consumer = oauth.Consumer(oauth_key, oauth_secret)
 
     try:
@@ -87,7 +91,8 @@ def make_oauth_client(base_url):
         assert resp['status'] == '200', resp
 
         request_token = dict(urlparse.parse_qsl(content))
-        pin_url = "%s?oauth_token=%s" % (AUTHORIZE_URL, request_token['oauth_token'])
+        pin_url = "%s?oauth_token=%s" % (
+            AUTHORIZE_URL, request_token['oauth_token'])
         if getattr(webbrowser.get(), 'name', '') == 'links':
             # sandboxes
             print("Go to %s" % pin_url)
@@ -95,7 +100,8 @@ def make_oauth_client(base_url):
             webbrowser.open(pin_url)
         oauth_verifier = raw_input('What is the PIN? ')
 
-        token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
+        token = oauth.Token(
+            request_token['oauth_token'], request_token['oauth_token_secret'])
         token.set_verifier(oauth_verifier)
         client = oauth.Client(consumer, token)
         resp, content = client.request(ACCESS_TOKEN_URL, "GET")

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-export.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-export.py b/scripts/wiki-export.py
index 55baa04..e096949 100755
--- a/scripts/wiki-export.py
+++ b/scripts/wiki-export.py
@@ -55,4 +55,4 @@ if __name__ == '__main__':
     if options.out_filename:
         out = open(options.out_filename, 'w')
 
-    exporter.export(out)
\ No newline at end of file
+    exporter.export(out)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-post.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-post.py b/scripts/wiki-post.py
index c51af33..07369f5 100755
--- a/scripts/wiki-post.py
+++ b/scripts/wiki-post.py
@@ -19,7 +19,8 @@
 
 
 from sys import stdin, stdout
-import hmac, hashlib
+import hmac
+import hashlib
 from datetime import datetime
 import os
 import urllib
@@ -29,6 +30,7 @@ import urllib
 from optparse import OptionParser
 from ConfigParser import ConfigParser
 
+
 def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     """
     Returns a bytestring version of 's', encoded as specified in 'encoding'.
@@ -48,7 +50,7 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
                 # know how to print itself properly. We shouldn't raise a
                 # further exception.
                 return ' '.join([smart_str(arg, encoding, strings_only,
-                        errors) for arg in s])
+                                           errors) for arg in s])
             return unicode(s).encode(encoding, errors)
     elif isinstance(s, unicode):
         r = s.encode(encoding, errors)
@@ -58,10 +60,12 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     else:
         return s
 
+
 def generate_smart_str(params):
     for (key, value) in params:
         yield smart_str(key), smart_str(value)
 
+
 def urlencode(params):
     """
     A version of Python's urllib.urlencode() function that can operate on
@@ -87,6 +91,7 @@ class Signer(object):
         params.append(('api_signature', digest))
         return params
 
+
 def main():
     usage = 'usage: %prog [options] [PageName [file]]'
     op = OptionParser(usage=usage)
@@ -112,7 +117,8 @@ def main():
         markdown = f.read()
 
     config = ConfigParser()
-    config.read([str(os.path.expanduser('~/.forge-api.ini')), str(options.config)])
+    config.read(
+        [str(os.path.expanduser('~/.forge-api.ini')), str(options.config)])
 
     api_key = None
     secret_key = None
@@ -126,13 +132,13 @@ def main():
     print url
 
     sign = Signer(secret_key, api_key)
-    params = [('text', markdown)] if method=='PUT' else []
+    params = [('text', markdown)] if method == 'PUT' else []
     params = sign(urlparse(url).path, params)
     try:
-        if method=='PUT':
+        if method == 'PUT':
             result = urlopen(url, urlencode(params))
         else:
-            result = urlopen(url+'?'+urlencode(params))
+            result = urlopen(url + '?' + urlencode(params))
         stdout.write(result.read())
     except HTTPError, e:
         stdout.write(e.read())


Mime
View raw message