You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by jo...@apache.org on 2014/01/10 22:22:57 UTC

[01/36] PEP8 cleanup

Updated Branches:
  refs/heads/cj/6484 f412cebcf -> 6845ccde9 (forced update)


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/scrub-allura-data.py
----------------------------------------------------------------------
diff --git a/scripts/scrub-allura-data.py b/scripts/scrub-allura-data.py
index 3b0918b..3864c42 100644
--- a/scripts/scrub-allura-data.py
+++ b/scripts/scrub-allura-data.py
@@ -37,7 +37,7 @@ def public(obj, project=None):
     role_anon = M.ProjectRole.by_name(name='*anonymous', project=project)
     if not role_anon:
         log.info('Missing *anonymous role for project "%s"' %
-                project.shortname)
+                 project.shortname)
         return False
     read = M.ACE.allow(role_anon._id, 'read')
     return read in obj.acl
@@ -72,7 +72,7 @@ def scrub_project(p, options):
             if ac.options.get('TicketMonitoringEmail'):
                 log.info('%s options.TicketMonitoringEmail from the %s/%s '
                          'tool on project "%s"' % (preamble, tool_name,
-                             mount_point, p.shortname))
+                                                   mount_point, p.shortname))
                 if not options.dry_run:
                     ac.options['TicketMonitoringEmail'] = None
             for tickets in utils.chunked_find(TM.Ticket, q):
@@ -86,7 +86,7 @@ def scrub_project(p, options):
             if counter > 0:
                 log.info('%s %s tickets from the %s/%s tool on '
                          'project "%s"' % (preamble, counter, tool_name,
-                             mount_point, p.shortname))
+                                           mount_point, p.shortname))
         elif tool_name == 'discussion':
             for forums in utils.chunked_find(DM.Forum, q):
                 for f in forums:
@@ -96,7 +96,7 @@ def scrub_project(p, options):
             if counter > 0:
                 log.info('%s %s forums from the %s/%s tool on '
                          'project "%s"' % (preamble, counter, tool_name,
-                             mount_point, p.shortname))
+                                           mount_point, p.shortname))
 
 
 def main(options):
@@ -121,25 +121,25 @@ def main(options):
             (preamble, M.User.query.find().count()))
     log.info('%s monitoring_email addresses from %s Forum documents' %
             (preamble, DM.Forum.query.find({"monitoring_email":
-                    {"$nin": [None, ""]}}).count()))
+                                            {"$nin": [None, ""]}}).count()))
 
     if not options.dry_run:
         M.EmailAddress.query.remove()
         M.User.query.update({}, {"$set": {"email_addresses": []}}, multi=True)
         DM.Forum.query.update({"monitoring_email": {"$nin": [None, ""]}},
-                {"$set": {"monitoring_email": None}}, multi=True)
+                              {"$set": {"monitoring_email": None}}, multi=True)
     return 0
 
 
 def parse_options():
     import argparse
     parser = argparse.ArgumentParser(
-            description='Removes private data from the Allura MongoDB.')
+        description='Removes private data from the Allura MongoDB.')
     parser.add_argument('--dry-run', dest='dry_run', default=False,
-            action='store_true',
-            help='Run in test mode (no updates will be applied).')
+                        action='store_true',
+                        help='Run in test mode (no updates will be applied).')
     parser.add_argument('--log', dest='log_level', default='INFO',
-            help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
+                        help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
     return parser.parse_args()
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/setup-scm-server.py
----------------------------------------------------------------------
diff --git a/scripts/setup-scm-server.py b/scripts/setup-scm-server.py
index 1daa2a0..050122c 100644
--- a/scripts/setup-scm-server.py
+++ b/scripts/setup-scm-server.py
@@ -22,6 +22,7 @@ from ConfigParser import ConfigParser, NoOptionError
 
 config = ConfigParser()
 
+
 def main():
     config.read('.setup-scm-cache')
     if not config.has_section('scm'):
@@ -45,15 +46,18 @@ def get_value(key, default):
         value = config.get('scm', key)
     except NoOptionError:
         value = raw_input('%s? [%s]' % key, default)
-        if not value: value = default
+        if not value:
+            value = default
         config.set('scm', key, value)
     return value
 
+
 def run(command):
     rc = os.system(command)
     assert rc == 0
     return rc
 
+
 def add_ldif(template, **values):
     fd, name = mkstemp()
     os.write(fd, template.substitute(values))
@@ -61,7 +65,7 @@ def add_ldif(template, **values):
     run('ldapadd -Y EXTERNAL -H ldapi:/// -f %s' % name)
     os.remove(name)
 
-backend_ldif=string.Template('''
+backend_ldif = string.Template('''
 # Load dynamic backend modules
 dn: cn=module,cn=config
 objectClass: olcModuleList
@@ -92,7 +96,7 @@ olcAccess: to * by dn="cn=admin,$domain" write by * read
 
 ''')
 
-frontend_ldif=string.Template('''
+frontend_ldif = string.Template('''
 # Create top-level object in domain
 dn: $domain
 objectClass: top

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/teamforge-import.py
----------------------------------------------------------------------
diff --git a/scripts/teamforge-import.py b/scripts/teamforge-import.py
index f86b063..9f601a0 100644
--- a/scripts/teamforge-import.py
+++ b/scripts/teamforge-import.py
@@ -53,20 +53,21 @@ http://www.open.collab.net/nonav/community/cif/csfe/50/javadoc/index.html?com/co
 '''
 
 options = None
-s = None # security token
-client = None # main api client
+s = None  # security token
+client = None  # main api client
 users = {}
 
 cj = CookieJar()
 loggedInOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
 
+
 def make_client(api_url, app):
     return Client(api_url + app + '?wsdl', location=api_url + app)
 
 
 def main():
     global options, s, client, users
-    defaults=dict(
+    defaults = dict(
         api_url=None,
         attachment_url='/sf/%s/do/%s/',
         default_wiki_text='PRODUCT NAME HERE',
@@ -86,7 +87,7 @@ def main():
         config = ConfigParser()
         config.read(options.config_file)
         defaults.update(
-            (k, eval(v)) for k,v in config.items('teamforge-import'))
+            (k, eval(v)) for k, v in config.items('teamforge-import'))
         optparser = get_parser(defaults)
         options, project_ids = optparser.parse_args()
 
@@ -99,12 +100,16 @@ def main():
         client = make_client(options.api_url, 'CollabNet')
         api_v = client.service.getApiVersion()
         if not api_v.startswith('5.4.'):
-            log.warning('Unexpected API Version %s.  May not work correctly.' % api_v)
+            log.warning('Unexpected API Version %s.  May not work correctly.' %
+                        api_v)
 
-        s = client.service.login(options.username, options.password or getpass('Password: '))
+        s = client.service.login(
+            options.username, options.password or getpass('Password: '))
         teamforge_v = client.service.getVersion(s)
         if not teamforge_v.startswith('5.4.'):
-            log.warning('Unexpected TeamForge Version %s.  May not work correctly.' % teamforge_v)
+            log.warning(
+                'Unexpected TeamForge Version %s.  May not work correctly.' %
+                teamforge_v)
 
     if options.load:
         if not options.neighborhood:
@@ -114,7 +119,8 @@ def main():
             nbhd = M.Neighborhood.query.get(name=options.neighborhood)
         except:
             log.exception('error querying mongo')
-            log.error('This should be run as "paster script production.ini ../scripts/teamforge-import.py -- ...options.."')
+            log.error(
+                'This should be run as "paster script production.ini ../scripts/teamforge-import.py -- ...options.."')
             return
         assert nbhd
 
@@ -135,7 +141,8 @@ def main():
         if options.extract:
             try:
                 project = client.service.getProjectData(s, pid)
-                log.info('Project: %s %s %s' % (project.id, project.title, project.path))
+                log.info('Project: %s %s %s' %
+                         (project.id, project.title, project.path))
                 out_dir = os.path.join(options.output_dir, project.id)
                 if not os.path.exists(out_dir):
                     os.mkdir(out_dir)
@@ -159,13 +166,15 @@ def main():
             except:
                 log.exception('Error creating %s' % pid)
 
+
 def load_users():
     ''' load the users data from file, if it hasn't been already '''
     global users
     user_filename = os.path.join(options.output_dir, 'users.json')
     if not users and os.path.exists(user_filename):
         with open(user_filename) as user_file:
-            users = json.load(user_file, object_hook=Object) # Object for attribute access
+            # Object for attribute access
+            users = json.load(user_file, object_hook=Object)
 
 
 def save_user(usernames):
@@ -179,7 +188,8 @@ def save_user(usernames):
             user_data = client.service.getUserData(s, username)
             users[username] = Object(user_data)
             if users[username].status != 'Active':
-                log.warn('user: %s status: %s' % (username, users[username].status))
+                log.warn('user: %s status: %s' %
+                         (username, users[username].status))
 
 
 def get_project(project):
@@ -187,7 +197,7 @@ def get_project(project):
     cats = make_client(options.api_url, 'CategorizationApp')
 
     data = client.service.getProjectData(s, project.id)
-    access_level = { 1: 'public', 4: 'private', 3: 'gated community'}[
+    access_level = {1: 'public', 4: 'private', 3: 'gated community'}[
         client.service.getProjectAccessLevel(s, project.id)
     ]
     admins = client.service.listProjectAdmins(s, project.id).dataRows
@@ -195,14 +205,14 @@ def get_project(project):
     groups = client.service.getProjectGroupList(s, project.id).dataRows
     categories = cats.service.getProjectCategories(s, project.id).dataRows
     save(json.dumps(dict(
-            data = dict(data),
-            access_level = access_level,
-            admins = map(dict, admins),
-            members = map(dict, members),
-            groups = map(dict, groups),
-            categories = map(dict, categories),
-        ), default=str),
-        project, project.id+'.json')
+        data=dict(data),
+        access_level=access_level,
+        admins=map(dict, admins),
+        members=map(dict, members),
+        groups=map(dict, groups),
+        categories=map(dict, categories),
+    ), default=str),
+        project, project.id + '.json')
 
     if len(groups):
         log.warn('Project has groups %s' % groups)
@@ -216,6 +226,7 @@ def get_project(project):
     save_user(u.userName for u in admins)
     save_user(u.userName for u in members)
 
+
 def get_user(orig_username):
     'returns an allura User object'
     sf_username = make_valid_sf_username(orig_username)
@@ -239,7 +250,7 @@ def get_user(orig_username):
                    email=user.email.lower().encode('utf-8'),
                    realname=user.fullName.encode('utf-8'),
                    status='A' if user.status == 'Active' else 'D',
-                   language=275, # english trove id
+                   language=275,  # english trove id
                    timezone=user.timeZone,
                    user_pw=''.join(random.sample(string.printable, 32)),
                    unix_pw=''.join(random.sample(string.printable, 32)),
@@ -247,11 +258,15 @@ def get_user(orig_username):
                    mail_siteupdates=0,
                    add_date=int(time.time()),
                    )
-        user_id = sqlalchemy.select([T.users.c.user_id], T.users.c.user_name==sf_username).execute().fetchone().user_id
+        user_id = sqlalchemy.select(
+            [T.users.c.user_id], T.users.c.user_name == sf_username).execute().fetchone().user_id
         npref = T.user_preferences.insert()
-        npref.execute(user_id=user_id, preference_name='country', preference_value='US')
-        npref.execute(user_id=user_id, preference_name='opt_research', preference_value=0)
-        npref.execute(user_id=user_id, preference_name='opt_thirdparty', preference_value=0)
+        npref.execute(user_id=user_id, preference_name='country',
+                      preference_value='US')
+        npref.execute(user_id=user_id,
+                      preference_name='opt_research', preference_value=0)
+        npref.execute(user_id=user_id,
+                      preference_name='opt_thirdparty', preference_value=0)
 
         new_audit = T.audit_trail_user.insert()
         new_audit.execute(
@@ -267,10 +282,11 @@ def get_user(orig_username):
     assert u
     return u
 
+
 def convert_project_shortname(teamforge_path):
     'convert from TeamForge to SF, and validate early'
     tf_shortname = teamforge_path.split('.')[-1]
-    sf_shortname = tf_shortname.replace('_','-')
+    sf_shortname = tf_shortname.replace('_', '-')
 
     # FIXME hardcoded translations
     sf_shortname = {
@@ -281,46 +297,51 @@ def convert_project_shortname(teamforge_path):
     }.get(sf_shortname, sf_shortname)
 
     if not 3 <= len(sf_shortname) <= 15:
-        raise ValueError('Project name length must be between 3 & 15, inclusive: %s (%s)' %
-                         (sf_shortname, len(sf_shortname)))
+        raise ValueError(
+            'Project name length must be between 3 & 15, inclusive: %s (%s)' %
+            (sf_shortname, len(sf_shortname)))
     return sf_shortname
 
 
 # FIXME hardcoded
 skip_perms_usernames = set([
-    'faisal_saeed','dsarkisian','debonairamit','nishanthiremath','Bhuvnesh','bluetooth','cnkurzke','makow2','jannes1','Joel_Hegberg','Farroc','brian_chen','eirikur',
-    'dmitry_flyorov','bipingm','MornayJo','ibv','b_weisshaar','k9srb','johnmmills','a_gomolitsky','filim','kapoor','ljzegers','jrukes','dwilson9','jlin','quickie',
-    'johnbell','nnikolenko','Gaetan','Giannetta','Katia','jackhan','jacobwangus','adwankar','dinobrusco','qbarnes','ilmojung','clifford_chan','nbaig','fhutchi1',
-    'rinofarina','baiyanbin','muralidhar','duanyiruo','bredding','mkolkey','manvith','nanduk','engyihan','deepsie','dabon','dino_jiang','mattrose','peter_j_wilhelm',
-    'emx2500','jmcguire','lfilimowski','guruppandit','abhilashisme','edwinhm','rabbi','ferrans','guna','kevin_robinson','adathiruthi','kochen','onehap','kalanithi',
-    'jamesn','obu001','chetanv','Avinash','HugoBoss','Han_Wei','mhooper','g16872','mfcarignano','jim_burke','kevin','arunkarra','adam_feng','pavan_scm','kostya_katz',
-    'ppazderka','eileenzhuang','pyammine','judyho','ashoykh','rdemento','ibrahim','min_wang','arvind_setlur','moorthy_karthik','daniel_nelson','dms','esnmurthy',
-    'rasa_bonyadlou','prashantjoshi','edkeating','billsaez','cambalindo','jims','bozkoyun','andry_deltsov','bpowers','manuel_milli','maryparsons','spriporov','yutianli',
-    'xiebin','tnemeth1','udayaps','zzzzuser','timberger','sbarve1','zarman','rwallace67','thangavelu_arum','yuhuaixie','tingup','sekchai','sasanplus','rupal','sebastien_hertz',
-    'sab8123','rony_lim','slava_kirillin','smwest','wendydu_yq','sco002','RonFred','spatnala','vd','Sunny','tthompson','sunijams','slaw','rodovich','zhangqingqi82','venki',
-    'yuntaom','xiaojin','walterciocosta','straus','Thomas','stupka','wangyu','yaowang','wisekb','tyler_louie','smartgarfield','shekar_mahalingam',
-    'venkata_akella','v_yellapragada','vavasthi','rpatel','zhengfang','sweetybala','vap','sergey','ymhuang','spatel78745'
+    'faisal_saeed', 'dsarkisian', 'debonairamit', 'nishanthiremath', 'Bhuvnesh', 'bluetooth', 'cnkurzke', 'makow2', 'jannes1', 'Joel_Hegberg', 'Farroc', 'brian_chen', 'eirikur',
+    'dmitry_flyorov', 'bipingm', 'MornayJo', 'ibv', 'b_weisshaar', 'k9srb', 'johnmmills', 'a_gomolitsky', 'filim', 'kapoor', 'ljzegers', 'jrukes', 'dwilson9', 'jlin', 'quickie',
+    'johnbell', 'nnikolenko', 'Gaetan', 'Giannetta', 'Katia', 'jackhan', 'jacobwangus', 'adwankar', 'dinobrusco', 'qbarnes', 'ilmojung', 'clifford_chan', 'nbaig', 'fhutchi1',
+    'rinofarina', 'baiyanbin', 'muralidhar', 'duanyiruo', 'bredding', 'mkolkey', 'manvith', 'nanduk', 'engyihan', 'deepsie', 'dabon', 'dino_jiang', 'mattrose', 'peter_j_wilhelm',
+    'emx2500', 'jmcguire', 'lfilimowski', 'guruppandit', 'abhilashisme', 'edwinhm', 'rabbi', 'ferrans', 'guna', 'kevin_robinson', 'adathiruthi', 'kochen', 'onehap', 'kalanithi',
+    'jamesn', 'obu001', 'chetanv', 'Avinash', 'HugoBoss', 'Han_Wei', 'mhooper', 'g16872', 'mfcarignano', 'jim_burke', 'kevin', 'arunkarra', 'adam_feng', 'pavan_scm', 'kostya_katz',
+    'ppazderka', 'eileenzhuang', 'pyammine', 'judyho', 'ashoykh', 'rdemento', 'ibrahim', 'min_wang', 'arvind_setlur', 'moorthy_karthik', 'daniel_nelson', 'dms', 'esnmurthy',
+    'rasa_bonyadlou', 'prashantjoshi', 'edkeating', 'billsaez', 'cambalindo', 'jims', 'bozkoyun', 'andry_deltsov', 'bpowers', 'manuel_milli', 'maryparsons', 'spriporov', 'yutianli',
+    'xiebin', 'tnemeth1', 'udayaps', 'zzzzuser', 'timberger', 'sbarve1', 'zarman', 'rwallace67', 'thangavelu_arum', 'yuhuaixie', 'tingup', 'sekchai', 'sasanplus', 'rupal', 'sebastien_hertz',
+    'sab8123', 'rony_lim', 'slava_kirillin', 'smwest', 'wendydu_yq', 'sco002', 'RonFred', 'spatnala', 'vd', 'Sunny', 'tthompson', 'sunijams', 'slaw', 'rodovich', 'zhangqingqi82', 'venki',
+    'yuntaom', 'xiaojin', 'walterciocosta', 'straus', 'Thomas', 'stupka', 'wangyu', 'yaowang', 'wisekb', 'tyler_louie', 'smartgarfield', 'shekar_mahalingam',
+    'venkata_akella', 'v_yellapragada', 'vavasthi', 'rpatel', 'zhengfang', 'sweetybala', 'vap', 'sergey', 'ymhuang', 'spatel78745'
 ])
 
+
 def create_project(pid, nbhd):
     M.session.artifact_orm_session._get().skip_mod_date = True
-    data = loadjson(pid, pid+'.json')
-    #pprint(data)
+    data = loadjson(pid, pid + '.json')
+    # pprint(data)
     log.info('Loading: %s %s %s' % (pid, data.data.title, data.data.path))
     shortname = convert_project_shortname(data.data.path)
 
-    project = M.Project.query.get(shortname=shortname, neighborhood_id=nbhd._id)
+    project = M.Project.query.get(
+        shortname=shortname, neighborhood_id=nbhd._id)
     if not project:
         private = (data.access_level == 'private')
         log.debug('Creating %s private=%s' % (shortname, private))
-        one_admin = [u.userName for u in data.admins if u.status == 'Active'][0]
+        one_admin = [
+            u.userName for u in data.admins if u.status == 'Active'][0]
         project = nbhd.register_project(shortname,
                                         get_user(one_admin),
                                         project_name=data.data.title,
                                         private_project=private)
     project.notifications_disabled = True
     project.short_description = data.data.description
-    project.last_updated = datetime.strptime(data.data.lastModifiedDate, '%Y-%m-%d %H:%M:%S')
+    project.last_updated = datetime.strptime(
+        data.data.lastModifiedDate, '%Y-%m-%d %H:%M:%S')
     M.main_orm_session.flush(project)
     # TODO: push last_updated to gutenberg?
     # TODO: try to set createdDate?
@@ -335,7 +356,7 @@ def create_project(pid, nbhd):
         user = get_user(admin.userName)
         c.user = user
         pr = M.ProjectRole.by_user(user, project=project, upsert=True)
-        pr.roles = [ role_admin._id ]
+        pr.roles = [role_admin._id]
         ThreadLocalORMSession.flush_all()
     role_developer = M.ProjectRole.by_name('Developer', project)
     for member in data.members:
@@ -346,19 +367,20 @@ def create_project(pid, nbhd):
             continue
         user = get_user(member.userName)
         pr = M.ProjectRole.by_user(user, project=project, upsert=True)
-        pr.roles = [ role_developer._id ]
+        pr.roles = [role_developer._id]
         ThreadLocalORMSession.flush_all()
-    project.labels = [cat.path.split('projects/categorization.root.')[1] for cat in data.categories]
+    project.labels = [cat.path.split('projects/categorization.root.')[1]
+                      for cat in data.categories]
     icon_file = 'emsignia-MOBILITY-red.png'
     if 'nsn' in project.labels or 'msi' in project.labels:
         icon_file = 'emsignia-SOLUTIONS-blue.gif'
     if project.icon:
         M.ProjectFile.remove(dict(project_id=project._id, category='icon'))
-    with open(os.path.join('..','scripts',icon_file)) as fp:
+    with open(os.path.join('..', 'scripts', icon_file)) as fp:
         M.ProjectFile.save_image(
             icon_file, fp, content_type=utils.guess_mime_type(icon_file),
-            square=True, thumbnail_size=(48,48),
-            thumbnail_meta=dict(project_id=project._id,category='icon'))
+            square=True, thumbnail_size=(48, 48),
+            thumbnail_meta=dict(project_id=project._id, category='icon'))
     ThreadLocalORMSession.flush_all()
 
     dirs = os.listdir(os.path.join(options.output_dir, pid))
@@ -378,11 +400,14 @@ def create_project(pid, nbhd):
     ThreadLocalORMSession.flush_all()
     return project
 
+
 def import_wiki(project, pid, nbhd):
     from forgewiki import model as WM
+
     def upload_attachments(page, pid, beginning):
         dirpath = os.path.join(options.output_dir, pid, 'wiki', beginning)
-        if not os.path.exists(dirpath): return
+        if not os.path.exists(dirpath):
+            return
         files = os.listdir(dirpath)
         for f in files:
             with open(os.path.join(options.output_dir, pid, 'wiki', beginning, f)) as fp:
@@ -437,11 +462,13 @@ def import_wiki(project, pid, nbhd):
                 page_data = loadjson(pid, 'wiki', page)
                 content = load(pid, 'wiki', markdown_file)
                 if page == 'HomePage.json':
-                    globals = WM.Globals.query.get(app_config_id=wiki_app.config._id)
+                    globals = WM.Globals.query.get(
+                        app_config_id=wiki_app.config._id)
                     if globals is not None:
                         globals.root = page_data.title
                     else:
-                        globals = WM.Globals(app_config_id=wiki_app.config._id, root=page_data.title)
+                        globals = WM.Globals(
+                            app_config_id=wiki_app.config._id, root=page_data.title)
                 p = WM.Page.upsert(page_data.title)
                 p.viewable_by = ['all']
                 p.text = wiki2markdown(content)
@@ -451,6 +478,7 @@ def import_wiki(project, pid, nbhd):
                     p.commit()
     ThreadLocalORMSession.flush_all()
 
+
 def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
     from forgediscussion import model as DM
     discuss_app = project.app_instance('discussion')
@@ -471,21 +499,25 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
         M.ACE.allow(role_admin, 'configure'),
         M.ACE.allow(role_admin, 'admin')]
     ThreadLocalORMSession.flush_all()
-    DM.Forum.query.remove(dict(app_config_id=discuss_app.config._id,shortname='general'))
+    DM.Forum.query.remove(
+        dict(app_config_id=discuss_app.config._id, shortname='general'))
     forums = os.listdir(os.path.join(options.output_dir, pid, 'forum'))
     for forum in forums:
         ending = forum[-5:]
         forum_name = forum[:-5]
         if '.json' == ending and forum_name in forums:
             forum_data = loadjson(pid, 'forum', forum)
-            fo = DM.Forum.query.get(shortname=forum_name, app_config_id=discuss_app.config._id)
+            fo = DM.Forum.query.get(
+                shortname=forum_name, app_config_id=discuss_app.config._id)
             if not fo:
-                fo = DM.Forum(app_config_id=discuss_app.config._id, shortname=forum_name)
+                fo = DM.Forum(app_config_id=discuss_app.config._id,
+                              shortname=forum_name)
             fo.name = forum_data.title
             fo.description = forum_data.description
             fo_num_topics = 0
             fo_num_posts = 0
-            topics = os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name))
+            topics = os.listdir(os.path.join(options.output_dir, pid, 'forum',
+                                forum_name))
             for topic in topics:
                 ending = topic[-5:]
                 topic_name = topic[:-5]
@@ -508,37 +540,47 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
                             discussion_id=fo._id,
                             import_id=topic_data.id,
                             app_config_id=discuss_app.config._id)
-                    to.import_id=topic_data.id
+                    to.import_id = topic_data.id
                     to_num_replies = 0
                     oldest_post = None
                     newest_post = None
-                    posts = sorted(os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name, topic_name)))
+                    posts = sorted(
+                        os.listdir(os.path.join(options.output_dir, pid, 'forum', forum_name, topic_name)))
                     for post in posts:
                         ending = post[-5:]
                         post_name = post[:-5]
                         if '.json' == ending:
                             to_num_replies += 1
-                            post_data = loadjson(pid, 'forum', forum_name, topic_name, post)
+                            post_data = loadjson(pid, 'forum',
+                                                 forum_name, topic_name, post)
                             p = DM.ForumPost.query.get(
-                                _id='%s%s@import' % (post_name,str(discuss_app.config._id)),
+                                _id='%s%s@import' % (
+                                    post_name, str(discuss_app.config._id)),
                                 thread_id=to._id,
                                 discussion_id=fo._id,
                                 app_config_id=discuss_app.config._id)
 
                             if not p:
                                 p = DM.ForumPost(
-                                    _id='%s%s@import' % (post_name,str(discuss_app.config._id)),
+                                    _id='%s%s@import' % (
+                                        post_name, str(
+                                            discuss_app.config._id)),
                                     thread_id=to._id,
                                     discussion_id=fo._id,
                                     app_config_id=discuss_app.config._id)
-                            create_date = datetime.strptime(post_data.createdDate, '%Y-%m-%d %H:%M:%S')
+                            create_date = datetime.strptime(
+                                post_data.createdDate, '%Y-%m-%d %H:%M:%S')
                             p.timestamp = create_date
-                            p.author_id = str(get_user(post_data.createdByUserName)._id)
-                            p.text = convert_post_content(frs_mapping, sf_project_shortname, post_data.content, nbhd)
+                            p.author_id = str(
+                                get_user(post_data.createdByUserName)._id)
+                            p.text = convert_post_content(
+                                frs_mapping, sf_project_shortname, post_data.content, nbhd)
                             p.status = 'ok'
                             if post_data.replyToId:
-                                p.parent_id = '%s%s@import' % (post_data.replyToId,str(discuss_app.config._id))
-                            slug, full_slug = p.make_slugs(parent = p.parent, timestamp = create_date)
+                                p.parent_id = '%s%s@import' % (
+                                    post_data.replyToId, str(discuss_app.config._id))
+                            slug, full_slug = p.make_slugs(
+                                parent=p.parent, timestamp=create_date)
                             p.slug = slug
                             p.full_slug = full_slug
                             if oldest_post == None or oldest_post.timestamp > create_date:
@@ -555,6 +597,7 @@ def import_discussion(project, pid, frs_mapping, sf_project_shortname, nbhd):
             fo.num_posts = fo_num_posts
             ThreadLocalORMSession.flush_all()
 
+
 def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
     from forgeblog import model as BM
     posts = os.listdir(os.path.join(options.output_dir, pid, 'news'))
@@ -567,7 +610,8 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
         for post in posts:
             if '.json' == post[-5:]:
                 post_data = loadjson(pid, 'news', post)
-                create_date = datetime.strptime(post_data.createdOn, '%Y-%m-%d %H:%M:%S')
+                create_date = datetime.strptime(
+                    post_data.createdOn, '%Y-%m-%d %H:%M:%S')
                 p = BM.BlogPost.query.get(title=post_data.title,
                                           timestamp=create_date,
                                           app_config_id=news_app.config._id)
@@ -575,7 +619,8 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
                     p = BM.BlogPost(title=post_data.title,
                                     timestamp=create_date,
                                     app_config_id=news_app.config._id)
-                p.text = convert_post_content(frs_mapping, sf_project_shortname, post_data.body, nbhd)
+                p.text = convert_post_content(
+                    frs_mapping, sf_project_shortname, post_data.body, nbhd)
                 p.mod_date = create_date
                 p.state = 'published'
                 if not p.slug:
@@ -584,15 +629,16 @@ def import_news(project, pid, frs_mapping, sf_project_shortname, nbhd):
                     p.commit()
                     ThreadLocalORMSession.flush_all()
                     M.Thread.new(discussion_id=p.app_config.discussion_id,
-                           ref_id=p.index_id(),
-                           subject='%s discussion' % p.title)
+                                 ref_id=p.index_id(),
+                                 subject='%s discussion' % p.title)
                 user = get_user(post_data.createdByUsername)
-                p.history().first().author=dict(
+                p.history().first().author = dict(
                     id=user._id,
                     username=user.username,
                     display_name=user.get_pref('display_name'))
                 ThreadLocalORMSession.flush_all()
 
+
 def check_unsupported_tools(project):
     docs = make_client(options.api_url, 'DocumentApp')
     doc_count = 0
@@ -601,21 +647,28 @@ def check_unsupported_tools(project):
             continue
         doc_count += 1
     if doc_count:
-        log.warn('Migrating documents is not supported, but found %s docs' % doc_count)
+        log.warn('Migrating documents is not supported, but found %s docs' %
+                 doc_count)
 
     scm = make_client(options.api_url, 'ScmApp')
     for repo in scm.service.getRepositoryList(s, project.id).dataRows:
-        log.warn('Migrating SCM repos is not supported, but found %s' % repo.repositoryPath)
+        log.warn('Migrating SCM repos is not supported, but found %s' %
+                 repo.repositoryPath)
 
     tasks = make_client(options.api_url, 'TaskApp')
-    task_count = len(tasks.service.getTaskList(s, project.id, filters=None).dataRows)
+    task_count = len(
+        tasks.service.getTaskList(s, project.id, filters=None).dataRows)
     if task_count:
-        log.warn('Migrating tasks is not supported, but found %s tasks' % task_count)
+        log.warn('Migrating tasks is not supported, but found %s tasks' %
+                 task_count)
 
     tracker = make_client(options.api_url, 'TrackerApp')
-    tracker_count = len(tracker.service.getArtifactList(s, project.id, filters=None).dataRows)
+    tracker_count = len(
+        tracker.service.getArtifactList(s, project.id, filters=None).dataRows)
     if tracker_count:
-        log.warn('Migrating trackers is not supported, but found %s tracker artifacts' % task_count)
+        log.warn(
+            'Migrating trackers is not supported, but found %s tracker artifacts' %
+            task_count)
 
 
 def load(project_id, *paths):
@@ -624,10 +677,12 @@ def load(project_id, *paths):
         content = input.read()
     return unicode(content, 'utf-8')
 
+
 def loadjson(*args):
     # Object for attribute access
     return json.loads(load(*args), object_hook=Object)
 
+
 def save(content, project, *paths):
     out_file = os.path.join(options.output_dir, project.id, *paths)
     if not os.path.exists(os.path.dirname(out_file)):
@@ -635,6 +690,7 @@ def save(content, project, *paths):
     with open(out_file, 'w') as out:
         out.write(content.encode('utf-8'))
 
+
 def download_file(tool, url_path, *filepaths):
     if tool == 'wiki':
         action = 'viewAttachment'
@@ -676,6 +732,8 @@ h1 = re.compile(r'^!!!', re.MULTILINE)
 h2 = re.compile(r'^!!', re.MULTILINE)
 h3 = re.compile(r'^!', re.MULTILINE)
 re_stats = re.compile(r'#+ .* [Ss]tatistics\n+(.*\[sf:.*?Statistics\].*)+')
+
+
 def wiki2markdown(markup):
     '''
     Partial implementation of http://help.collab.net/index.jsp?topic=/teamforge520/reference/wiki-wikisyntax.html
@@ -707,6 +765,8 @@ def wiki2markdown(markup):
     return markup
 
 re_rel = re.compile(r'\b(rel\d+)\b')
+
+
 def convert_post_content(frs_mapping, sf_project_shortname, text, nbhd):
     def rel_handler(matchobj):
         relno = matchobj.group(1)
@@ -728,6 +788,7 @@ def find_image_references(markup):
         if ext in ('jpg', 'gif', 'png'):
             yield snippet
 
+
 def get_news(project):
     '''
     Extracts news posts
@@ -737,9 +798,11 @@ def get_news(project):
     # find the forums
     posts = app.service.getNewsPostList(s, project.id)
     for post in posts.dataRows:
-        save(json.dumps(dict(post), default=str), project, 'news', post.id+'.json')
+        save(json.dumps(dict(post), default=str),
+             project, 'news', post.id + '.json')
         save_user(post.createdByUsername)
 
+
 def get_discussion(project):
     '''
     Extracts discussion forums and posts
@@ -751,15 +814,18 @@ def get_discussion(project):
     for forum in forums.dataRows:
         forumname = forum.path.split('.')[-1]
         log.info('Retrieving data for forum: %s' % forumname)
-        save(json.dumps(dict(forum), default=str), project, 'forum', forumname+'.json')
+        save(json.dumps(dict(forum), default=str), project, 'forum',
+             forumname + '.json')
         # topic in this forum
         topics = app.service.getTopicList(s, forum.id)
         for topic in topics.dataRows:
-            save(json.dumps(dict(topic), default=str), project, 'forum', forumname, topic.id+'.json')
+            save(json.dumps(dict(topic), default=str), project, 'forum',
+                 forumname, topic.id + '.json')
             # posts in this topic
             posts = app.service.getPostList(s, topic.id)
             for post in posts.dataRows:
-                save(json.dumps(dict(post), default=str), project, 'forum', forumname, topic.id, post.id+'.json')
+                save(json.dumps(dict(post), default=str), project, 'forum',
+                     forumname, topic.id, post.id + '.json')
                 save_user(post.createdByUserName)
 
 
@@ -774,7 +840,8 @@ def get_homepage_wiki(project):
     for wiki_page in wiki_pages.dataRows:
         wiki_page = wiki.service.getWikiPageData(s, wiki_page.id)
         pagename = wiki_page.path.split('/')[-1]
-        save(json.dumps(dict(wiki_page), default=str), project, 'wiki', pagename+'.json')
+        save(json.dumps(dict(wiki_page), default=str),
+             project, 'wiki', pagename + '.json')
         if not wiki_page.wikiText:
             log.debug('skip blank wiki page %s' % wiki_page.path)
             continue
@@ -802,20 +869,23 @@ def get_homepage_wiki(project):
                 img_url = img_ref
             else:
                 img_url = project.path + '/wiki/' + img_ref
-            download_file('wiki', img_url, project.id, 'wiki', 'homepage', filename)
+            download_file('wiki', img_url, project.id,
+                          'wiki', 'homepage', filename)
 
     for path, text in pages.iteritems():
         if options.default_wiki_text in text:
             log.debug('skipping default wiki page %s' % path)
         else:
-            save(text, project, 'wiki', path+'.markdown')
+            save(text, project, 'wiki', path + '.markdown')
             for img_ref in find_image_references(text):
                 filename = img_ref.split('/')[-1]
                 if '://' in img_ref:
                     img_url = img_ref
                 else:
                     img_url = project.path + '/wiki/' + img_ref
-                download_file('wiki', img_url, project.id, 'wiki', path, filename)
+                download_file('wiki', img_url, project.id,
+                              'wiki', path, filename)
+
 
 def _dir_sql(created_on, project, dir_name, rel_path):
     assert options.neighborhood_shortname
@@ -834,35 +904,40 @@ def _dir_sql(created_on, project, dir_name, rel_path):
     """ % (created_on, convert_project_shortname(project.path), options.neighborhood_shortname, dir_name, parent_directory)
     return sql
 
+
 def get_files(project):
     frs = make_client(options.api_url, 'FrsApp')
-    valid_pfs_filename = re.compile(r'(?![. ])[-_ +.,=#~@!()\[\]a-zA-Z0-9]+(?<! )$')
-    pfs_output_dir = os.path.join(os.path.abspath(options.output_dir), 'PFS', convert_project_shortname(project.path))
+    valid_pfs_filename = re.compile(
+        r'(?![. ])[-_ +.,=#~@!()\[\]a-zA-Z0-9]+(?<! )$')
+    pfs_output_dir = os.path.join(
+        os.path.abspath(options.output_dir), 'PFS', convert_project_shortname(project.path))
     sql_updates = ''
 
     def handle_path(obj, prev_path):
-        path_component = obj.title.strip().replace('/', ' ').replace('&','').replace(':','')
+        path_component = obj.title.strip().replace(
+            '/', ' ').replace('&', '').replace(':', '')
         path = os.path.join(prev_path, path_component)
         if not valid_pfs_filename.match(path_component):
             log.error('Invalid filename: "%s"' % path)
         save(json.dumps(dict(obj), default=str),
-            project, 'frs', path+'.json')
+             project, 'frs', path + '.json')
         return path
 
     frs_mapping = {}
 
     for pkg in frs.service.getPackageList(s, project.id).dataRows:
         pkg_path = handle_path(pkg, '')
-        pkg_details = frs.service.getPackageData(s, pkg.id) # download count
+        pkg_details = frs.service.getPackageData(s, pkg.id)  # download count
         save(json.dumps(dict(pkg_details), default=str),
-             project, 'frs', pkg_path+'_details.json')
+             project, 'frs', pkg_path + '_details.json')
 
         for rel in frs.service.getReleaseList(s, pkg.id).dataRows:
             rel_path = handle_path(rel, pkg_path)
             frs_mapping[rel['id']] = rel_path
-            rel_details = frs.service.getReleaseData(s, rel.id) # download count
+            # download count
+            rel_details = frs.service.getReleaseData(s, rel.id)
             save(json.dumps(dict(rel_details), default=str),
-                 project, 'frs', rel_path+'_details.json')
+                 project, 'frs', rel_path + '_details.json')
 
             for file in frs.service.getFrsFileList(s, rel.id).dataRows:
                 details = frs.service.getFrsFileData(s, file.id)
@@ -875,19 +950,23 @@ def get_files(project):
                                 default=str),
                      project,
                      'frs',
-                     file_path+'.json'
+                     file_path + '.json'
                      )
                 if not options.skip_frs_download:
-                    download_file('frs', rel.path + '/' + file.id, pfs_output_dir, file_path)
+                    download_file('frs', rel.path + '/' + file.id,
+                                  pfs_output_dir, file_path)
                     mtime = int(mktime(details.lastModifiedDate.timetuple()))
-                    os.utime(os.path.join(pfs_output_dir, file_path), (mtime, mtime))
+                    os.utime(os.path.join(pfs_output_dir, file_path),
+                             (mtime, mtime))
 
             # releases
             created_on = int(mktime(rel.createdOn.timetuple()))
             mtime = int(mktime(rel.lastModifiedOn.timetuple()))
             if os.path.exists(os.path.join(pfs_output_dir, rel_path)):
-                os.utime(os.path.join(pfs_output_dir, rel_path), (mtime, mtime))
-            sql_updates += _dir_sql(created_on, project, rel.title.strip(), pkg_path)
+                os.utime(os.path.join(pfs_output_dir, rel_path),
+                         (mtime, mtime))
+            sql_updates += _dir_sql(created_on, project,
+                                    rel.title.strip(), pkg_path)
         # packages
         created_on = int(mktime(pkg.createdOn.timetuple()))
         mtime = int(mktime(pkg.lastModifiedOn.timetuple()))
@@ -922,10 +1001,10 @@ def get_parser(defaults):
     optparser.add_option(
         '--api-url', dest='api_url', help='e.g. https://hostname/ce-soap50/services/')
     optparser.add_option(
-            '--attachment-url', dest='attachment_url')
+        '--attachment-url', dest='attachment_url')
     optparser.add_option(
-            '--default-wiki-text', dest='default_wiki_text',
-            help='used in determining if a wiki page text is default or changed')
+        '--default-wiki-text', dest='default_wiki_text',
+        help='used in determining if a wiki page text is default or changed')
     optparser.add_option(
         '-u', '--username', dest='username')
     optparser.add_option(
@@ -954,24 +1033,26 @@ def get_parser(defaults):
     return optparser
 
 re_username = re.compile(r"^[a-z\-0-9]+$")
+
+
 def make_valid_sf_username(orig_username):
-    sf_username = orig_username.replace('_','-').lower()
+    sf_username = orig_username.replace('_', '-').lower()
 
     # FIXME username translation is hardcoded here:
     sf_username = dict(
-        rlevy = 'ramilevy',
-        mkeisler = 'mkeisler',
-        bthale = 'bthale',
-        mmuller = 'mattjustmull',
-        MalcolmDwyer = 'slagheap',
-        tjyang = 'tjyang',
-        manaic = 'maniac76',
-        srinid = 'cnudav',
-        es = 'est016',
-        david_peyer = 'david-mmi',
-        okruse = 'ottokruse',
-        jvp = 'jvpmoto',
-        dmorelli = 'dmorelli',
+        rlevy='ramilevy',
+        mkeisler='mkeisler',
+        bthale='bthale',
+        mmuller='mattjustmull',
+        MalcolmDwyer='slagheap',
+        tjyang='tjyang',
+        manaic='maniac76',
+        srinid='cnudav',
+        es='est016',
+        david_peyer='david-mmi',
+        okruse='ottokruse',
+        jvp='jvpmoto',
+        dmorelli='dmorelli',
     ).get(sf_username, sf_username + '-mmi')
 
     if not re_username.match(sf_username):
@@ -982,7 +1063,7 @@ def make_valid_sf_username(orig_username):
                   sf_username, adjusted_username)
         sf_username = adjusted_username
     if len(sf_username) > 15:
-        adjusted_username = sf_username[0:15-4] + '-mmi'
+        adjusted_username = sf_username[0:15 - 4] + '-mmi'
         log.error('invalid sf_username length: %s   Changing it to %s',
                   sf_username, adjusted_username)
         sf_username = adjusted_username
@@ -993,19 +1074,21 @@ if __name__ == '__main__':
     log.setLevel(logging.DEBUG)
     main()
 
+
 def test_make_valid_sf_username():
     tests = {
         # basic
-        'foo':'foo-mmi',
+        'foo': 'foo-mmi',
         # lookup
-        'rlevy':'ramilevy',
+        'rlevy': 'ramilevy',
         # too long
         'u012345678901234567890': 'u0123456789-mmi',
         'foo^213': 'foo213-mmi'
-        }
-    for k,v in tests.iteritems():
+    }
+    for k, v in tests.iteritems():
         assert make_valid_sf_username(k) == v
 
+
 def test_convert_post_content():
     nbhd = Object()
     nbhd.url_prefix = '/motorola/'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/test-branches-against-tickets.py
----------------------------------------------------------------------
diff --git a/scripts/test-branches-against-tickets.py b/scripts/test-branches-against-tickets.py
index 1905880..a742f03 100755
--- a/scripts/test-branches-against-tickets.py
+++ b/scripts/test-branches-against-tickets.py
@@ -44,12 +44,18 @@ def match_ticket_branches(target_dir=None):
 
     git('remote prune origin')
 
-    branches_for_tickets = dict() # maps ticket numbers to the actual branch e.g., int(42) -> 'origin/rc/42'
-    ticket_nums = dict() # maps ticket numbers to 'merged' or 'unmerged' according to the matching branch
-    commit_diffs = dict() # maps ticket numbers to differences in (number of) commit messages
-
-    merged_branches = [ branch[2:] for branch in git('branch -r --merged dev') if re_ticket_branch.match(branch) ]
-    unmerged_branches = [ branch[2:] for branch in git('branch -r --no-merged dev') if re_ticket_branch.match(branch) ]
+    # maps ticket numbers to the actual branch e.g., int(42) -> 'origin/rc/42'
+    branches_for_tickets = dict()
+    # maps ticket numbers to 'merged' or 'unmerged' according to the matching
+    # branch
+    ticket_nums = dict()
+    # maps ticket numbers to differences in (number of) commit messages
+    commit_diffs = dict()
+
+    merged_branches = [branch[2:]
+                       for branch in git('branch -r --merged dev') if re_ticket_branch.match(branch)]
+    unmerged_branches = [branch[2:]
+                         for branch in git('branch -r --no-merged dev') if re_ticket_branch.match(branch)]
 
     for branch in merged_branches:
         tn = int(re_ticket_branch.match(branch).group(1))
@@ -65,16 +71,18 @@ def match_ticket_branches(target_dir=None):
             ticket_nums[tn] = 'merged'
         else:
             branch_commits = git('log --oneline dev..%s' % branch)
-            # count the number of commits on dev since this branch that contain the ticket #
+            # count the number of commits on dev since this branch that contain
+            # the ticket #
             merge_base = git('merge-base', 'dev', branch)[0]
-            matching_dev_commits = git('log --oneline --grep="\[#%s\]" %s..dev' % (tn, merge_base))
+            matching_dev_commits = git(
+                'log --oneline --grep="\[#%s\]" %s..dev' % (tn, merge_base))
 
             if len(matching_dev_commits) >= len(branch_commits):
                 ticket_nums[tn] = 'merged'
             else:
                 ticket_nums[tn] = 'unmerged'
                 commit_diffs[tn] = '\t' + '\n\t'.join(['Branch has:'] + branch_commits +
-                                                 ['Dev has:'] + matching_dev_commits)
+                                                      ['Dev has:'] + matching_dev_commits)
 
     failure = False
 
@@ -82,18 +90,22 @@ def match_ticket_branches(target_dir=None):
     oauth_client = make_oauth_client()
 
     for tn in ticket_nums:
-        resp = oauth_client.request('http://sourceforge.net/rest/p/allura/tickets/%s/' % tn)
+        resp = oauth_client.request(
+            'http://sourceforge.net/rest/p/allura/tickets/%s/' % tn)
         #assert resp[0]['status'] == '200', (resp, tn)
         if resp[0]['status'] != '200':
             continue
         ticket = json.loads(resp[1])['ticket']
         if ticket is None:
             continue
-        is_closed = ticket['status'] in ('closed', 'validation', 'wont-fix', 'invalid')
+        is_closed = ticket['status'] in (
+            'closed', 'validation', 'wont-fix', 'invalid')
         is_merged = ticket_nums[tn] == 'merged'
 
         if is_closed != is_merged:
-            print('<http://sourceforge.net/p/allura/tickets/%s/> is status:"%s", but the branch "%s" is %s' % (tn, ticket['status'], branches_for_tickets[tn], ticket_nums[tn]))
+            print(
+                '<http://sourceforge.net/p/allura/tickets/%s/> is status:"%s", but the branch "%s" is %s' %
+                (tn, ticket['status'], branches_for_tickets[tn], ticket_nums[tn]))
             if tn in commit_diffs:
                 print(commit_diffs[tn])
             failure = True
@@ -117,7 +129,8 @@ def make_oauth_client():
     REQUEST_TOKEN_URL = 'http://sourceforge.net/rest/oauth/request_token'
     AUTHORIZE_URL = 'https://sourceforge.net/rest/oauth/authorize'
     ACCESS_TOKEN_URL = 'http://sourceforge.net/rest/oauth/access_token'
-    oauth_key = option('re', 'oauth_key', 'Forge API OAuth Key (https://sourceforge.net/auth/oauth/): ')
+    oauth_key = option('re', 'oauth_key',
+                       'Forge API OAuth Key (https://sourceforge.net/auth/oauth/): ')
     oauth_secret = option('re', 'oauth_secret', 'Forge API Oauth Secret: ')
     consumer = oauth.Consumer(oauth_key, oauth_secret)
 
@@ -130,7 +143,8 @@ def make_oauth_client():
         assert resp['status'] == '200', resp
 
         request_token = dict(urlparse.parse_qsl(content))
-        pin_url = "%s?oauth_token=%s" % (AUTHORIZE_URL, request_token['oauth_token'])
+        pin_url = "%s?oauth_token=%s" % (
+            AUTHORIZE_URL, request_token['oauth_token'])
         if getattr(webbrowser.get(), 'name', '') == 'links':
             # sandboxes
             print("Go to %s" % pin_url)
@@ -138,7 +152,8 @@ def make_oauth_client():
             webbrowser.open(pin_url)
         oauth_verifier = raw_input('What is the PIN? ')
 
-        token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
+        token = oauth.Token(
+            request_token['oauth_token'], request_token['oauth_token_secret'])
         token.set_verifier(oauth_verifier)
         client = oauth.Client(consumer, token)
         resp, content = client.request(ACCESS_TOKEN_URL, "GET")
@@ -154,17 +169,18 @@ def make_oauth_client():
 
 
 def git(*args, **kw):
-    if len(args)==1 and isinstance(args[0], basestring):
+    if len(args) == 1 and isinstance(args[0], basestring):
         argv = shlex.split(args[0])
     else:
         argv = list(args)
     if argv[0] != 'git':
         argv.insert(0, 'git')
-    p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    p = subprocess.Popen(argv, stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
     p.wait()
     output = p.stdout.readlines()
     if kw.get('strip_eol', True):
-        output = [ line.rstrip('\n') for line in output ]
+        output = [line.rstrip('\n') for line in output]
     return output
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/tracker-rip.py
----------------------------------------------------------------------
diff --git a/scripts/tracker-rip.py b/scripts/tracker-rip.py
index 719aaa6..aeef1eb 100755
--- a/scripts/tracker-rip.py
+++ b/scripts/tracker-rip.py
@@ -23,13 +23,13 @@ from urlparse import urljoin
 
 from allura.lib import rest_api
 
-SRC_CRED=dict(
-        api_key='c03efc6cca1cf78be9e9',
-        secret_key='575eda2f25f6490d8cfe5d02f2506c010112894d0ea10660e43157a87a7e620c61ac06397b028af1',
-        http_username=raw_input('LDAP username: '),
-        http_password=getpass.getpass('LDAP password: '))
-SRC_SERVER='https://newforge.sf.geek.net/'
-SRC_TOOL='/rest/p/forge/tickets/'
+SRC_CRED = dict(
+    api_key='c03efc6cca1cf78be9e9',
+    secret_key='575eda2f25f6490d8cfe5d02f2506c010112894d0ea10660e43157a87a7e620c61ac06397b028af1',
+    http_username=raw_input('LDAP username: '),
+    http_password=getpass.getpass('LDAP password: '))
+SRC_SERVER = 'https://newforge.sf.geek.net/'
+SRC_TOOL = '/rest/p/forge/tickets/'
 
 # Credentials for sf-overlords
 # DST_CRED=dict(
@@ -37,17 +37,17 @@ SRC_TOOL='/rest/p/forge/tickets/'
 #     secret_key='fcc48a0c31459e99a88cc42cdd7f908fad78b283ca30a86caac1ab65036ff71fc195a18e56534dc5')
 # DST_SERVER='http://sourceforge.net/'
 # DST_TOOL='/rest/p/allura/tickets/'
-DST_CRED=dict(
+DST_CRED = dict(
     api_key='aa7244645424513d9636',
     secret_key='cd1d97be98497f7b615b297aa2061177ddf6d42b95a8484193f84690486694234dbf817efc3b2d6e')
-DST_SERVER='http://localhost:8080/'
-DST_TOOL='/rest/p/test/bugs/'
+DST_SERVER = 'http://localhost:8080/'
+DST_TOOL = '/rest/p/test/bugs/'
 
-FAKE_TICKET={
+FAKE_TICKET = {
     u'created_date': u'2010-03-08 17:29:42.802000',
     u'assigned_to_id': u'',
     u'assigned_to': u'',
-    u'custom_fields': {'_component':'', '_size':0, '_priority':'', '_type':''},
+    u'custom_fields': {'_component': '', '_size': 0, '_priority': '', '_type': ''},
     u'description': u'Ticket was not present in source',
     u'milestone': u'',
     u'reported_by': u'',
@@ -57,6 +57,7 @@ FAKE_TICKET={
     u'summary': u'Placeholder ticket',
     u'super_id': u'None'}
 
+
 def main():
     src_cli = rest_api.RestClient(
         base_uri=SRC_SERVER,
@@ -81,6 +82,7 @@ def main():
             print '... migrate post %s:\n%r' % (post['slug'], post['text'])
             dst.create_post(dst_thread, post, slug_map)
 
+
 class TicketAPI(object):
 
     def __init__(self, client, path):
@@ -95,29 +97,34 @@ class TicketAPI(object):
         cur_ticket = min_ticket
         while True:
             if check and cur_ticket not in valid_tickets:
-                if cur_ticket > max_valid_ticket: break
+                if cur_ticket > max_valid_ticket:
+                    break
                 yield dict(FAKE_TICKET, ticket_num=cur_ticket)
                 cur_ticket += 1
                 continue
-            ticket = self.client.request('GET', self.ticket_path(cur_ticket))['ticket']
-            if ticket is None: break
+            ticket = self.client.request(
+                'GET', self.ticket_path(cur_ticket))['ticket']
+            if ticket is None:
+                break
             yield ticket
             cur_ticket += 1
-            if max_ticket and cur_ticket > max_ticket: break
+            if max_ticket and cur_ticket > max_ticket:
+                break
 
     def load_thread(self, ticket):
-        discussion = self.client.request('GET', self.discussion_path())['discussion']
+        discussion = self.client.request(
+            'GET', self.discussion_path())['discussion']
         for thd in discussion['threads']:
             if thd['subject'].startswith('#%d ' % ticket['ticket_num']):
                 break
         else:
             return None
         thread = self.client.request(
-            'GET',self.thread_path(thd['_id']))['thread']
+            'GET', self.thread_path(thd['_id']))['thread']
         return thread
 
     def iter_posts(self, thread):
-        for p in sorted(thread['posts'], key=lambda p:p['slug']):
+        for p in sorted(thread['posts'], key=lambda p: p['slug']):
             post = self.client.request(
                 'GET', self.post_path(thread['_id'], p['slug']))['post']
             yield post
@@ -140,7 +147,8 @@ class TicketAPI(object):
             ticket['milestone'] = ''
         if ticket['status'] not in 'open in-progress code-review validation closed'.split():
             ticket['status'] = 'open'
-        r = self.client.request('POST', self.new_ticket_path(), ticket_form=ticket)
+        r = self.client.request(
+            'POST', self.new_ticket_path(), ticket_form=ticket)
         self.client.request(
             'POST', self.ticket_path(r['ticket']['ticket_num'], 'save'),
             ticket_form=ticket)
@@ -175,17 +183,20 @@ class TicketAPI(object):
     def post_path(self, thread_id, post_slug, suffix=''):
         return '%s_discuss/thread/%s/%s/%s' % (self.path, thread_id, post_slug, suffix)
 
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
+
+def pm(etype, value, tb):  # pragma no cover
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-copy.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-copy.py b/scripts/wiki-copy.py
index 35e0077..0692852 100644
--- a/scripts/wiki-copy.py
+++ b/scripts/wiki-copy.py
@@ -35,7 +35,8 @@ def main():
                   help='URL of wiki API to copy from like http://fromserver.com/rest/p/test/wiki/')
     op.add_option('-t', '--to-wiki', action='store', dest='to_wiki',
                   help='URL of wiki API to copy to like http://toserver.com/rest/p/test/wiki/')
-    op.add_option('-D', '--debug', action='store_true', dest='debug', default=False)
+    op.add_option('-D', '--debug', action='store_true',
+                  dest='debug', default=False)
     (options, args) = op.parse_args(sys.argv[1:])
 
     base_url = options.to_wiki.split('/rest/')[0]
@@ -44,15 +45,16 @@ def main():
     wiki_data = urllib.urlopen(options.from_wiki).read()
     wiki_json = json.loads(wiki_data)['pages']
     for p in wiki_json:
-        from_url = options.from_wiki+urllib.quote(p)
-        to_url = options.to_wiki+urllib.quote(p)
+        from_url = options.from_wiki + urllib.quote(p)
+        to_url = options.to_wiki + urllib.quote(p)
         try:
             page_data = urllib.urlopen(from_url).read()
             page_json = json.loads(page_data)
             if options.debug:
                 print page_json['text']
                 break
-            resp = oauth_client.request(to_url, 'POST', body=urllib.urlencode(dict(text=page_json['text'].encode('utf-8'))))
+            resp = oauth_client.request(
+                to_url, 'POST', body=urllib.urlencode(dict(text=page_json['text'].encode('utf-8'))))
             if resp[0]['status'] == '200':
                 print "Posted {0} to {1}".format(page_json['title'], to_url)
             else:
@@ -71,11 +73,13 @@ def make_oauth_client(base_url):
     cp = ConfigParser()
     cp.read(config_file)
 
-    REQUEST_TOKEN_URL = base_url+'/rest/oauth/request_token'
-    AUTHORIZE_URL = base_url+'/rest/oauth/authorize'
-    ACCESS_TOKEN_URL = base_url+'/rest/oauth/access_token'
-    oauth_key = option(cp, base_url, 'oauth_key', 'Forge API OAuth Key (%s/auth/oauth/): ' % base_url)
-    oauth_secret = option(cp, base_url, 'oauth_secret', 'Forge API Oauth Secret: ')
+    REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token'
+    AUTHORIZE_URL = base_url + '/rest/oauth/authorize'
+    ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token'
+    oauth_key = option(cp, base_url, 'oauth_key',
+                       'Forge API OAuth Key (%s/auth/oauth/): ' % base_url)
+    oauth_secret = option(cp, base_url, 'oauth_secret',
+                          'Forge API Oauth Secret: ')
     consumer = oauth.Consumer(oauth_key, oauth_secret)
 
     try:
@@ -87,7 +91,8 @@ def make_oauth_client(base_url):
         assert resp['status'] == '200', resp
 
         request_token = dict(urlparse.parse_qsl(content))
-        pin_url = "%s?oauth_token=%s" % (AUTHORIZE_URL, request_token['oauth_token'])
+        pin_url = "%s?oauth_token=%s" % (
+            AUTHORIZE_URL, request_token['oauth_token'])
         if getattr(webbrowser.get(), 'name', '') == 'links':
             # sandboxes
             print("Go to %s" % pin_url)
@@ -95,7 +100,8 @@ def make_oauth_client(base_url):
             webbrowser.open(pin_url)
         oauth_verifier = raw_input('What is the PIN? ')
 
-        token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
+        token = oauth.Token(
+            request_token['oauth_token'], request_token['oauth_token_secret'])
         token.set_verifier(oauth_verifier)
         client = oauth.Client(consumer, token)
         resp, content = client.request(ACCESS_TOKEN_URL, "GET")

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-export.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-export.py b/scripts/wiki-export.py
index 55baa04..e096949 100755
--- a/scripts/wiki-export.py
+++ b/scripts/wiki-export.py
@@ -55,4 +55,4 @@ if __name__ == '__main__':
     if options.out_filename:
         out = open(options.out_filename, 'w')
 
-    exporter.export(out)
\ No newline at end of file
+    exporter.export(out)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/wiki-post.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-post.py b/scripts/wiki-post.py
index c51af33..07369f5 100755
--- a/scripts/wiki-post.py
+++ b/scripts/wiki-post.py
@@ -19,7 +19,8 @@
 
 
 from sys import stdin, stdout
-import hmac, hashlib
+import hmac
+import hashlib
 from datetime import datetime
 import os
 import urllib
@@ -29,6 +30,7 @@ import urllib
 from optparse import OptionParser
 from ConfigParser import ConfigParser
 
+
 def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     """
     Returns a bytestring version of 's', encoded as specified in 'encoding'.
@@ -48,7 +50,7 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
                 # know how to print itself properly. We shouldn't raise a
                 # further exception.
                 return ' '.join([smart_str(arg, encoding, strings_only,
-                        errors) for arg in s])
+                                           errors) for arg in s])
             return unicode(s).encode(encoding, errors)
     elif isinstance(s, unicode):
         r = s.encode(encoding, errors)
@@ -58,10 +60,12 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     else:
         return s
 
+
 def generate_smart_str(params):
     for (key, value) in params:
         yield smart_str(key), smart_str(value)
 
+
 def urlencode(params):
     """
     A version of Python's urllib.urlencode() function that can operate on
@@ -87,6 +91,7 @@ class Signer(object):
         params.append(('api_signature', digest))
         return params
 
+
 def main():
     usage = 'usage: %prog [options] [PageName [file]]'
     op = OptionParser(usage=usage)
@@ -112,7 +117,8 @@ def main():
         markdown = f.read()
 
     config = ConfigParser()
-    config.read([str(os.path.expanduser('~/.forge-api.ini')), str(options.config)])
+    config.read(
+        [str(os.path.expanduser('~/.forge-api.ini')), str(options.config)])
 
     api_key = None
     secret_key = None
@@ -126,13 +132,13 @@ def main():
     print url
 
     sign = Signer(secret_key, api_key)
-    params = [('text', markdown)] if method=='PUT' else []
+    params = [('text', markdown)] if method == 'PUT' else []
     params = sign(urlparse(url).path, params)
     try:
-        if method=='PUT':
+        if method == 'PUT':
             result = urlopen(url, urlencode(params))
         else:
-            result = urlopen(url+'?'+urlencode(params))
+            result = urlopen(url + '?' + urlencode(params))
         stdout.write(result.read())
     except HTTPError, e:
         stdout.write(e.read())


[16/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_diff.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_diff.py b/Allura/allura/tests/test_diff.py
index 2dcb8a6..20f1859 100644
--- a/Allura/allura/tests/test_diff.py
+++ b/Allura/allura/tests/test_diff.py
@@ -27,8 +27,8 @@ class TestHtmlSideBySideDiff(unittest.TestCase):
 
     def test_render_change(self):
         html = self.diff._render_change(
-                'aline', 'aline <span class="diff-add">bline</span>',
-                1, 2,'aclass', 'bclass')
+            'aline', 'aline <span class="diff-add">bline</span>',
+            1, 2, 'aclass', 'bclass')
         expected = '''
 <tr>
   <td class="lineno">1</td>

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_dispatch.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_dispatch.py b/Allura/allura/tests/test_dispatch.py
index cd779d9..19f9652 100644
--- a/Allura/allura/tests/test_dispatch.py
+++ b/Allura/allura/tests/test_dispatch.py
@@ -19,6 +19,7 @@ from allura.tests import TestController
 
 app = None
 
+
 class TestDispatch(TestController):
 
     validate_skip = True
@@ -27,10 +28,7 @@ class TestDispatch(TestController):
         r = self.app.get('/dispatch/foo/')
         assert r.body == 'index foo', r
         r = self.app.get('/dispatch/foo/bar')
-        assert r.body ==  "default(foo)(('bar',))", r
+        assert r.body == "default(foo)(('bar',))", r
         self.app.get('/not_found', status=404)
         self.app.get('/dispatch/', status=404)
         # self.app.get('/hello/foo/bar', status=404)
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_globals.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_globals.py b/Allura/allura/tests/test_globals.py
index 789af0e..6627e56 100644
--- a/Allura/allura/tests/test_globals.py
+++ b/Allura/allura/tests/test_globals.py
@@ -19,7 +19,8 @@
 
 
 import re
-import os, allura
+import os
+import allura
 import unittest
 import hashlib
 from mock import patch
@@ -47,52 +48,59 @@ def setUp():
     setup_basic_test()
     setup_with_tools()
 
+
 @td.with_wiki
 def setup_with_tools():
     setup_global_objects()
 
+
 @td.with_wiki
 def test_app_globals():
     g.oid_session()
     g.oid_session()
     with h.push_context('test', 'wiki', neighborhood='Projects'):
-        assert g.app_static('css/wiki.css') == '/nf/_static_/wiki/css/wiki.css', g.app_static('css/wiki.css')
-        assert g.url('/foo', a='foo bar') == 'http://localhost/foo?a=foo+bar', g.url('/foo', a='foo bar')
+        assert g.app_static(
+            'css/wiki.css') == '/nf/_static_/wiki/css/wiki.css', g.app_static('css/wiki.css')
+        assert g.url(
+            '/foo', a='foo bar') == 'http://localhost/foo?a=foo+bar', g.url('/foo', a='foo bar')
         assert g.url('/foo') == 'http://localhost/foo', g.url('/foo')
 
 
-@with_setup(teardown=setUp) # reset everything we changed
+@with_setup(teardown=setUp)  # reset everything we changed
 def test_macro_projects():
     file_name = 'neo-icon-set-454545-256x350.png'
-    file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+    file_path = os.path.join(
+        allura.__path__[0], 'nf', 'allura', 'images', file_name)
 
     p_nbhd = M.Neighborhood.query.get(name='Projects')
     p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
     c.project = p_test
     icon_file = open(file_path)
     M.ProjectFile.save_image(
-                file_name, icon_file, content_type='image/png',
-                square=True, thumbnail_size=(48,48),
-                thumbnail_meta=dict(project_id=c.project._id,category='icon'))
+        file_name, icon_file, content_type='image/png',
+        square=True, thumbnail_size=(48, 48),
+        thumbnail_meta=dict(project_id=c.project._id, category='icon'))
     icon_file.close()
-    p_test2 = M.Project.query.get(shortname='test2', neighborhood_id=p_nbhd._id)
+    p_test2 = M.Project.query.get(
+        shortname='test2', neighborhood_id=p_nbhd._id)
     c.project = p_test2
     icon_file = open(file_path)
     M.ProjectFile.save_image(
-                file_name, icon_file, content_type='image/png',
-                square=True, thumbnail_size=(48,48),
-                thumbnail_meta=dict(project_id=c.project._id,category='icon'))
+        file_name, icon_file, content_type='image/png',
+        square=True, thumbnail_size=(48, 48),
+        thumbnail_meta=dict(project_id=c.project._id, category='icon'))
     icon_file.close()
-    p_sub1 =  M.Project.query.get(shortname='test/sub1', neighborhood_id=p_nbhd._id)
+    p_sub1 = M.Project.query.get(
+        shortname='test/sub1', neighborhood_id=p_nbhd._id)
     c.project = p_sub1
     icon_file = open(file_path)
     M.ProjectFile.save_image(
-                file_name, icon_file, content_type='image/png',
-                square=True, thumbnail_size=(48,48),
-                thumbnail_meta=dict(project_id=c.project._id,category='icon'))
+        file_name, icon_file, content_type='image/png',
+        square=True, thumbnail_size=(48, 48),
+        thumbnail_meta=dict(project_id=c.project._id, category='icon'))
     icon_file.close()
-    p_test.labels = [ 'test', 'root' ]
-    p_sub1.labels = [ 'test', 'sub1' ]
+    p_test.labels = ['test', 'root']
+    p_sub1.labels = ['test', 'sub1']
     # Make one project private
     p_test.private = False
     p_sub1.private = False
@@ -132,7 +140,8 @@ def test_macro_projects():
         assert '<img alt="A Subproject Logo"' in r, r
         r = g.markdown_wiki.convert('[[projects show_total=True sort=random]]')
         assert '<p class="macro_projects_total">3 Projects</p>' in r, r
-        r = g.markdown_wiki.convert('[[projects show_total=True private=True sort=random]]')
+        r = g.markdown_wiki.convert(
+            '[[projects show_total=True private=True sort=random]]')
         assert '<p class="macro_projects_total">1 Projects</p>' in r, r
         assert '<img alt="Test 2 Logo"' in r, r
         assert '<img alt="Test Project Logo"' not in r, r
@@ -149,14 +158,19 @@ def test_macro_download_button():
     p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
     with h.push_config(c, project=p_test):
         r = g.markdown_wiki.convert('[[download_button]]')
-    assert_equal(r, '<div class="markdown_content"><p><span class="download-button-%s" style="margin-bottom: 1em; display: block;"></span></p>\n</div>' % p_test._id)
+    assert_equal(
+        r, '<div class="markdown_content"><p><span class="download-button-%s" style="margin-bottom: 1em; display: block;"></span></p>\n</div>' %
+        p_test._id)
+
 
 def test_macro_gittip_button():
     p_nbhd = M.Neighborhood.query.get(name='Projects')
     p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
     with h.push_config(c, project=p_test):
         r = g.markdown_wiki.convert('[[gittip_button username=test]]')
-    assert_equal(r, u'<div class="markdown_content"><p><iframe height="22pt" src="https://www.gittip.com/test/widget.html" style="border: 0; margin: 0; padding: 0;" width="48pt"></iframe></p>\n</div>')
+    assert_equal(
+        r, u'<div class="markdown_content"><p><iframe height="22pt" src="https://www.gittip.com/test/widget.html" style="border: 0; margin: 0; padding: 0;" width="48pt"></iframe></p>\n</div>')
+
 
 def test_macro_neighborhood_feeds():
     p_nbhd = M.Neighborhood.query.get(name='Projects')
@@ -168,7 +182,7 @@ def test_macro_neighborhood_feeds():
         # Make project private & verify we don't see its new feed items
         anon = M.User.anonymous()
         p_test.acl.insert(0, M.ACE.deny(
-                M.ProjectRole.anonymous(p_test)._id, 'read'))
+            M.ProjectRole.anonymous(p_test)._id, 'read'))
         ThreadLocalORMSession.flush_all()
         pg = WM.Page.query.get(title='Home', app_config_id=c.app.config._id)
         pg.text = 'Change'
@@ -177,7 +191,8 @@ def test_macro_neighborhood_feeds():
         r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]')
         new_len = len(r)
         assert new_len == orig_len
-        p = BM.BlogPost(title='test me', neighborhood_id=p_test.neighborhood_id)
+        p = BM.BlogPost(title='test me',
+                        neighborhood_id=p_test.neighborhood_id)
         p.text = 'test content'
         p.state = 'published'
         p.make_slug()
@@ -188,7 +203,8 @@ def test_macro_neighborhood_feeds():
             r = g.markdown_wiki.convert('[[neighborhood_blog_posts]]')
         assert 'test content' in r
 
-@with_setup(setUp, setUp) # start clean and reset everything we change
+
+@with_setup(setUp, setUp)  # start clean and reset everything we change
 def test_macro_members():
     p_nbhd = M.Neighborhood.query.get(name='Projects')
     p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
@@ -197,31 +213,35 @@ def test_macro_members():
     ThreadLocalORMSession.flush_all()
     r = g.markdown_wiki.convert('[[members limit=2]]')
     assert_equal(r, '<div class="markdown_content"><h6>Project Members:</h6>\n'
-        '<ul class="md-users-list">\n'
-        '<li><a href="/u/test-admin/">Test Admin</a> (admin)</li><li><a href="/u/test-user/">Test User</a></li>\n'
-        '<li class="md-users-list-more"><a href="/p/test/_members">All Members</a></li>\n'
-        '</ul>\n'
-        '</div>')
+                 '<ul class="md-users-list">\n'
+                 '<li><a href="/u/test-admin/">Test Admin</a> (admin)</li><li><a href="/u/test-user/">Test User</a></li>\n'
+                 '<li class="md-users-list-more"><a href="/p/test/_members">All Members</a></li>\n'
+                 '</ul>\n'
+                 '</div>')
+
 
-@with_setup(teardown=setUp) # reset everything we changed
+@with_setup(teardown=setUp)  # reset everything we changed
 def test_macro_members_escaping():
     user = M.User.by_username('test-admin')
     user.display_name = u'Test Admin <script>'
     r = g.markdown_wiki.convert('[[members]]')
     assert_equal(r, u'<div class="markdown_content"><h6>Project Members:</h6>\n'
-        u'<ul class="md-users-list">\n'
-        u'<li><a href="/u/test-admin/">Test Admin &lt;script&gt;</a> (admin)</li>\n'
-        u'</ul>\n</div>')
+                 u'<ul class="md-users-list">\n'
+                 u'<li><a href="/u/test-admin/">Test Admin &lt;script&gt;</a> (admin)</li>\n'
+                 u'</ul>\n</div>')
 
-@with_setup(teardown=setUp) # reset everything we changed
+
+@with_setup(teardown=setUp)  # reset everything we changed
 def test_macro_project_admins():
     user = M.User.by_username('test-admin')
     user.display_name = u'Test Ådmin <script>'
     with h.push_context('test', neighborhood='Projects'):
         r = g.markdown_wiki.convert('[[project_admins]]')
-    assert_equal(r, u'<div class="markdown_content"><h6>Project Admins:</h6>\n<ul class="md-users-list">\n<li><a href="/u/test-admin/">Test \xc5dmin &lt;script&gt;</a></li>\n</ul>\n</div>')
+    assert_equal(
+        r, u'<div class="markdown_content"><h6>Project Admins:</h6>\n<ul class="md-users-list">\n<li><a href="/u/test-admin/">Test \xc5dmin &lt;script&gt;</a></li>\n</ul>\n</div>')
 
-@with_setup(teardown=setUp) # reset everything we changed
+
+@with_setup(teardown=setUp)  # reset everything we changed
 def test_macro_project_admins_one_br():
     p_nbhd = M.Neighborhood.query.get(name='Projects')
     p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
@@ -266,10 +286,12 @@ def test_macro_include_extra_br():
 
 
 def test_macro_embed():
-    r = g.markdown_wiki.convert('[[embed url=http://www.youtube.com/watch?v=kOLpSPEA72U]]')
+    r = g.markdown_wiki.convert(
+        '[[embed url=http://www.youtube.com/watch?v=kOLpSPEA72U]]')
     assert '''<div class="grid-20"><iframe height="270" src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed" width="480"></iframe></div>''' in r
     r = g.markdown_wiki.convert('[[embed url=http://vimeo.com/46163090]]')
-    assert_equal(r, '<div class="markdown_content"><p>[[embed url=http://vimeo.com/46163090]]</p></div>')
+    assert_equal(
+        r, '<div class="markdown_content"><p>[[embed url=http://vimeo.com/46163090]]</p></div>')
 
 
 def test_markdown_toc():
@@ -299,15 +321,19 @@ def test_wiki_artifact_links():
         text = g.markdown.convert('See [test:wiki:Home]')
         assert '<a class="alink" href="/p/test/wiki/Home/">[test:wiki:Home]</a>' in text, text
 
+
 def test_markdown_links():
-    text = g.markdown.convert('Read [here](http://foobar.sf.net/) about our project')
+    text = g.markdown.convert(
+        'Read [here](http://foobar.sf.net/) about our project')
     assert_in('href="http://foobar.sf.net/">here</a> about', text)
 
     text = g.markdown.convert('Read [here](/p/foobar/blah) about our project')
     assert_in('href="/p/foobar/blah">here</a> about', text)
 
     text = g.markdown.convert('Read <http://foobar.sf.net/> about our project')
-    assert_in('href="http://foobar.sf.net/">http://foobar.sf.net/</a> about', text)
+    assert_in(
+        'href="http://foobar.sf.net/">http://foobar.sf.net/</a> about', text)
+
 
 def test_markdown_and_html():
     with h.push_context('test', neighborhood='Projects'):
@@ -317,11 +343,13 @@ def test_markdown_and_html():
 
 def test_markdown_within_html():
     with h.push_context('test', neighborhood='Projects'):
-        r = g.markdown_wiki.convert('<div style="float:left" markdown>**blah**</div>')
+        r = g.markdown_wiki.convert(
+            '<div style="float:left" markdown>**blah**</div>')
     assert '''<div style="float: left;">
 <p><strong>blah</strong></p>
 </div>''' in r, r
 
+
 def test_markdown_with_html_comments():
     text = g.markdown.convert('test <!-- comment -->')
     assert '<div class="markdown_content"><p>test <!-- comment --></p></div>' == text, text
@@ -342,7 +370,8 @@ def test_markdown_basics():
         text = g.markdown.convert('# Foo!\n[Rooted]')
         assert '<a href=' not in text, text
 
-    assert '<br' in g.markdown.convert('Multi\nLine'), g.markdown.convert('Multi\nLine')
+    assert '<br' in g.markdown.convert(
+        'Multi\nLine'), g.markdown.convert('Multi\nLine')
     assert '<br' not in g.markdown.convert('Multi\n\nLine')
 
     g.markdown.convert("<class 'foo'>")  # should not raise an exception
@@ -354,7 +383,7 @@ Some text in a regular paragraph
     for i in range(10):
         print i
 ''')
-    assert 'http://localhost/' in  g.forge_markdown(email=True).convert('[Home]')
+    assert 'http://localhost/' in g.forge_markdown(email=True).convert('[Home]')
     assert 'class="codehilite"' in g.markdown.convert('''
 ~~~~
 def foo(): pass
@@ -370,7 +399,8 @@ def test_markdown_autolink():
     # beginning of doc
     assert_in('<a href=', g.markdown.convert('http://sf.net abc'))
     # beginning of a line
-    assert_in('<br />\n<a href="http://', g.markdown.convert('foobar\nhttp://sf.net abc'))
+    assert_in('<br />\n<a href="http://',
+              g.markdown.convert('foobar\nhttp://sf.net abc'))
     # no conversion of these urls:
     assert_in('a blahttp://sdf.com z',
               g.markdown.convert('a blahttp://sdf.com z'))
@@ -384,7 +414,8 @@ def test_markdown_autolink():
 def test_markdown_autolink_with_escape():
     # \_ is unnecessary but valid markdown escaping and should be considered as a regular underscore
     # (it occurs during html2text conversion during project migrations)
-    r = g.markdown.convert('a http://www.phpmyadmin.net/home\_page/security/\#target b')
+    r = g.markdown.convert(
+        'a http://www.phpmyadmin.net/home\_page/security/\#target b')
     assert 'href="http://www.phpmyadmin.net/home_page/security/#target"' in r, r
 
 
@@ -428,7 +459,8 @@ def test_sort_updated():
     with h.push_context(p_nbhd.neighborhood_project._id):
         r = g.markdown_wiki.convert('[[projects sort=last_updated]]')
         project_names = get_project_names(r)
-        updated_at = get_projects_property_in_the_same_order(project_names, 'last_updated')
+        updated_at = get_projects_property_in_the_same_order(
+            project_names, 'last_updated')
         assert updated_at == sorted(updated_at, reverse=True)
 
 
@@ -445,7 +477,8 @@ def test_filtering():
     with h.push_config(c,
                        project=p_nbhd.neighborhood_project,
                        user=M.User.by_username('test-admin')):
-        r = g.markdown_wiki.convert('[[projects category="%s"]]' % random_trove.fullpath)
+        r = g.markdown_wiki.convert(
+            '[[projects category="%s"]]' % random_trove.fullpath)
         project_names = get_project_names(r)
         assert_equal([test_project.name], project_names)
 
@@ -464,9 +497,11 @@ def test_projects_macro():
         assert two_column_style not in r
 
         # test project download button
-        r = g.markdown_wiki.convert('[[projects display_mode=list show_download_button=True]]')
+        r = g.markdown_wiki.convert(
+            '[[projects display_mode=list show_download_button=True]]')
         assert 'download-button' in r
-        r = g.markdown_wiki.convert('[[projects display_mode=list show_download_button=False]]')
+        r = g.markdown_wiki.convert(
+            '[[projects display_mode=list show_download_button=False]]')
         assert 'download-button' not in r
 
 
@@ -481,6 +516,7 @@ def test_limit_tools_macro():
         r = g.markdown_wiki.convert('[[projects grid_view_tools=wiki,admin]]')
         assert '<span>Admin</span>' in r
 
+
 @td.with_user_project('test-admin')
 @td.with_user_project('test-user-1')
 def test_myprojects_macro():
@@ -512,7 +548,8 @@ def test_hideawards_macro():
     award.full = u'Award full'
     award.created_by_neighborhood_id = p_nbhd._id
 
-    project = M.Project.query.get(neighborhood_id=p_nbhd._id, shortname=u'test')
+    project = M.Project.query.get(
+        neighborhood_id=p_nbhd._id, shortname=u'test')
 
     award_grant = M.AwardGrant(award=award,
                                granted_by_neighborhood=p_nbhd,
@@ -526,6 +563,7 @@ def test_hideawards_macro():
         r = g.markdown_wiki.convert('[[projects show_awards_banner=False]]')
         assert '<div class="feature">Award short</div>' not in r, r
 
+
 def get_project_names(r):
     """
     Extracts a list of project names from a wiki page HTML.
@@ -536,6 +574,7 @@ def get_project_names(r):
     re_proj_names = re.compile('<h2><a[^>]+>(.+)<\/a><\/h2>')
     return [e for e in re_proj_names.findall(r)]
 
+
 def get_projects_property_in_the_same_order(names, prop):
     """
     Returns a list of projects properties `prop` in the same order as
@@ -543,11 +582,12 @@ def get_projects_property_in_the_same_order(names, prop):
     It is required because results of the query are not in the same order as names.
     """
     projects = M.Project.query.find(dict(name={'$in': names})).all()
-    projects_dict = dict([(p['name'],p[prop]) for p in projects])
+    projects_dict = dict([(p['name'], p[prop]) for p in projects])
     return [projects_dict[name] for name in names]
 
 
 class TestCachedMarkdown(unittest.TestCase):
+
     def setUp(self):
         self.md = ForgeMarkdown()
         self.post = M.Post()
@@ -555,7 +595,8 @@ class TestCachedMarkdown(unittest.TestCase):
         self.expected_html = u'<p><strong>bold</strong></p>'
 
     def test_bad_source_field_name(self):
-        self.assertRaises(AttributeError, self.md.cached_convert, self.post, 'no_such_field')
+        self.assertRaises(AttributeError, self.md.cached_convert,
+                          self.post, 'no_such_field')
 
     def test_missing_cache_field(self):
         delattr(self.post, 'text_cache')
@@ -577,7 +618,7 @@ class TestCachedMarkdown(unittest.TestCase):
         self.assertEqual(html, self.expected_html)
         self.assertEqual(html, self.post.text_cache.html)
         self.assertEqual(hashlib.md5(self.post.text).hexdigest(),
-                self.post.text_cache.md5)
+                         self.post.text_cache.md5)
         self.assertTrue(self.post.text_cache.render_time > 0)
 
     @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
@@ -588,7 +629,7 @@ class TestCachedMarkdown(unittest.TestCase):
         self.assertNotEqual(old, html)
         self.assertEqual(html, self.post.text_cache.html)
         self.assertEqual(hashlib.md5(self.post.text).hexdigest(),
-                self.post.text_cache.md5)
+                         self.post.text_cache.md5)
         self.assertTrue(self.post.text_cache.render_time > 0)
 
     @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_helpers.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_helpers.py b/Allura/allura/tests/test_helpers.py
index e00231f..259ecba 100644
--- a/Allura/allura/tests/test_helpers.py
+++ b/Allura/allura/tests/test_helpers.py
@@ -46,6 +46,7 @@ def setUp(self):
 
 
 class TestMakeSafePathPortion(TestCase):
+
     def setUp(self):
         self.f = h.make_safe_path_portion
 
@@ -78,7 +79,8 @@ def test_really_unicode():
     here_dir = path.dirname(__file__)
     s = h.really_unicode('\xef\xbb\xbf<?xml version="1.0" encoding="utf-8" ?>')
     assert s.startswith(u'\ufeff')
-    s = h.really_unicode(open(path.join(here_dir, 'data/unicode_test.txt')).read())
+    s = h.really_unicode(
+        open(path.join(here_dir, 'data/unicode_test.txt')).read())
     assert isinstance(s, unicode)
     # try non-ascii string in legacy 8bit encoding
     h.really_unicode(u'\u0410\u0401'.encode('cp1251'))
@@ -86,12 +88,14 @@ def test_really_unicode():
     s = h._attempt_encodings('foo', ['LKDJFLDK'])
     assert isinstance(s, unicode)
 
+
 def test_render_genshi_plaintext():
     here_dir = path.dirname(__file__)
     tpl = path.join(here_dir, 'data/genshi_hello_tmpl')
     text = h.render_genshi_plaintext(tpl, object='world')
     eq_(u'Hello, world!\n', text)
 
+
 def test_find_project():
     proj, rest = h.find_project('/p/test/foo')
     assert_equals(proj.shortname, 'test')
@@ -99,15 +103,18 @@ def test_find_project():
     proj, rest = h.find_project('/p/testable/foo')
     assert proj is None
 
+
 def test_make_users():
     r = h.make_users([None]).next()
     assert r.username == '*anonymous', r
 
+
 def test_make_roles():
     h.set_context('test', 'wiki', neighborhood='Projects')
     pr = M.ProjectRole.anonymous()
     assert h.make_roles([pr._id]).next() == pr
 
+
 @td.with_wiki
 def test_make_app_admin_only():
     h.set_context('test', 'wiki', neighborhood='Projects')
@@ -139,6 +146,7 @@ def test_make_app_admin_only():
     assert not c.app.is_visible_to(dev)
     assert c.app.is_visible_to(admin)
 
+
 @td.with_wiki
 def test_context_setters():
     h.set_context('test', 'wiki', neighborhood='Projects')
@@ -172,15 +180,17 @@ def test_context_setters():
     assert not hasattr(c, 'project')
     assert not hasattr(c, 'app')
 
+
 def test_encode_keys():
-    kw = h.encode_keys({u'foo':5})
+    kw = h.encode_keys({u'foo': 5})
     assert type(kw.keys()[0]) != unicode
 
+
 def test_ago():
     from datetime import datetime, timedelta
     import time
     assert_equals(h.ago(datetime.utcnow() - timedelta(days=2)), '2 days ago')
-    assert_equals(h.ago_ts(time.time() - 60*60*2), '2 hours ago')
+    assert_equals(h.ago_ts(time.time() - 60 * 60 * 2), '2 hours ago')
     d_str = (datetime.utcnow() - timedelta(hours=3)).isoformat()
     assert_equals(h.ago_string(d_str), '3 hours ago')
     assert_equals(h.ago_string('bad format'), 'unknown')
@@ -197,9 +207,11 @@ def test_urlquote_unicode():
     h.urlquote(u'\u0410')
     h.urlquoteplus(u'\u0410')
 
+
 def test_sharded_path():
     assert_equals(h.sharded_path('foobar'), 'f/fo')
 
+
 def test_paging_sanitizer():
     test_data = {
         # input (limit, page, total, zero-based): output (limit, page)
@@ -215,12 +227,17 @@ def test_paging_sanitizer():
     for input, output in test_data.iteritems():
         assert (h.paging_sanitizer(*input)) == output
 
+
 def test_render_any_markup_empty():
     assert_equals(h.render_any_markup('foo', ''), '<p><em>Empty File</em></p>')
 
+
 def test_render_any_markup_plain():
-    assert_equals(h.render_any_markup('readme.txt', '<b>blah</b>\n<script>alert(1)</script>\nfoo'),
-                  '<pre>&lt;b&gt;blah&lt;/b&gt;\n&lt;script&gt;alert(1)&lt;/script&gt;\nfoo</pre>')
+    assert_equals(
+        h.render_any_markup(
+            'readme.txt', '<b>blah</b>\n<script>alert(1)</script>\nfoo'),
+        '<pre>&lt;b&gt;blah&lt;/b&gt;\n&lt;script&gt;alert(1)&lt;/script&gt;\nfoo</pre>')
+
 
 def test_render_any_markup_formatting():
     assert_equals(h.render_any_markup('README.md', '### foo\n'
@@ -270,6 +287,7 @@ def test_get_first():
     assert_equals(h.get_first({'title': []}, 'title'), None)
     assert_equals(h.get_first({'title': ['Value']}, 'title'), 'Value')
 
+
 @patch('allura.lib.search.c')
 def test_inject_user(context):
     user = Mock(username='user01')
@@ -283,7 +301,9 @@ def test_inject_user(context):
     assert_equals(result, 'reported_by_s:"admin1" OR assigned_to_s:"admin1"')
     context.user = Mock(username='*anonymous')
     result = inject_user('reported_by_s:$USER OR assigned_to_s:$USER')
-    assert_equals(result, 'reported_by_s:"*anonymous" OR assigned_to_s:"*anonymous"')
+    assert_equals(
+        result, 'reported_by_s:"*anonymous" OR assigned_to_s:"*anonymous"')
+
 
 def test_datetimeformat():
     from datetime import date
@@ -291,8 +311,10 @@ def test_datetimeformat():
 
 
 def test_split_select_field_options():
-    assert_equals(h.split_select_field_options('"test message" test2'), ['test message', 'test2'])
-    assert_equals(h.split_select_field_options('"test message test2'), ['test', 'message', 'test2'])
+    assert_equals(h.split_select_field_options('"test message" test2'),
+                  ['test message', 'test2'])
+    assert_equals(h.split_select_field_options('"test message test2'),
+                  ['test', 'message', 'test2'])
 
 
 def test_notifications_disabled():
@@ -331,14 +353,20 @@ http://blah.com/?x=y&a=b - not escaped either
 
     dd.assert_equal(h.plain2markdown(text), expected)
 
-    dd.assert_equal(h.plain2markdown('a foo  bar\n\n    code here?', preserve_multiple_spaces=True),
-                'a foo&nbsp; bar\n\n&nbsp;&nbsp;&nbsp; code here?')
+    dd.assert_equal(
+        h.plain2markdown('a foo  bar\n\n    code here?',
+                         preserve_multiple_spaces=True),
+        'a foo&nbsp; bar\n\n&nbsp;&nbsp;&nbsp; code here?')
 
-    dd.assert_equal(h.plain2markdown('\ttab before (stuff)', preserve_multiple_spaces=True),
-                 '&nbsp;&nbsp;&nbsp; tab before \(stuff\)')
+    dd.assert_equal(
+        h.plain2markdown('\ttab before (stuff)',
+                         preserve_multiple_spaces=True),
+        '&nbsp;&nbsp;&nbsp; tab before \(stuff\)')
 
-    dd.assert_equal(h.plain2markdown('\ttab before (stuff)', preserve_multiple_spaces=False),
-                 'tab before \(stuff\)')
+    dd.assert_equal(
+        h.plain2markdown('\ttab before (stuff)',
+                         preserve_multiple_spaces=False),
+        'tab before \(stuff\)')
 
 
 @td.without_module('html2text')
@@ -376,17 +404,24 @@ back\\\\\-slash escaped
 
     dd.assert_equal(h.plain2markdown(text), expected)
 
-    dd.assert_equal(h.plain2markdown('a foo  bar\n\n    code here?', preserve_multiple_spaces=True),
-                'a foo&nbsp; bar\n\n&nbsp;&nbsp;&nbsp; code here?')
+    dd.assert_equal(
+        h.plain2markdown('a foo  bar\n\n    code here?',
+                         preserve_multiple_spaces=True),
+        'a foo&nbsp; bar\n\n&nbsp;&nbsp;&nbsp; code here?')
 
-    dd.assert_equal(h.plain2markdown('\ttab before (stuff)', preserve_multiple_spaces=True),
-                 '&nbsp;&nbsp;&nbsp; tab before \(stuff\)')
+    dd.assert_equal(
+        h.plain2markdown('\ttab before (stuff)',
+                         preserve_multiple_spaces=True),
+        '&nbsp;&nbsp;&nbsp; tab before \(stuff\)')
 
-    dd.assert_equal(h.plain2markdown('\ttab before (stuff)', preserve_multiple_spaces=False),
-                 'tab before \(stuff\)')
+    dd.assert_equal(
+        h.plain2markdown('\ttab before (stuff)',
+                         preserve_multiple_spaces=False),
+        'tab before \(stuff\)')
 
 
 class TestUrlOpen(TestCase):
+
     @patch('allura.lib.helpers.urllib2')
     def test_no_error(self, urllib2):
         r = h.urlopen('myurl')
@@ -396,6 +431,7 @@ class TestUrlOpen(TestCase):
     @patch('allura.lib.helpers.urllib2.urlopen')
     def test_socket_timeout(self, urlopen):
         import socket
+
         def side_effect(url, timeout=None):
             raise socket.timeout()
         urlopen.side_effect = side_effect
@@ -405,6 +441,7 @@ class TestUrlOpen(TestCase):
     @patch('allura.lib.helpers.urllib2.urlopen')
     def test_handled_http_error(self, urlopen):
         from urllib2 import HTTPError
+
         def side_effect(url, timeout=None):
             raise HTTPError('url', 408, 'timeout', None, None)
         urlopen.side_effect = side_effect
@@ -414,6 +451,7 @@ class TestUrlOpen(TestCase):
     @patch('allura.lib.helpers.urllib2.urlopen')
     def test_unhandled_http_error(self, urlopen):
         from urllib2 import HTTPError
+
         def side_effect(url, timeout=None):
             raise HTTPError('url', 404, 'timeout', None, None)
         urlopen.side_effect = side_effect
@@ -428,12 +466,15 @@ def test_absurl_no_request():
 @patch.object(h, 'request',
               new=Request.blank('/p/test/foobar', base_url='https://www.mysite.com/p/test/foobar'))
 def test_absurl_with_request():
-    assert_equals(h.absurl('/p/test/foobar'), 'https://www.mysite.com/p/test/foobar')
+    assert_equals(h.absurl('/p/test/foobar'),
+                  'https://www.mysite.com/p/test/foobar')
 
 
 def test_daterange():
-    assert_equals(list(h.daterange(datetime(2013, 1, 1), datetime(2013, 1, 4))),
-                 [datetime(2013, 1, 1), datetime(2013, 1, 2), datetime(2013, 1, 3)])
+    assert_equals(
+        list(h.daterange(datetime(2013, 1, 1), datetime(2013, 1, 4))),
+        [datetime(2013, 1, 1), datetime(2013, 1, 2), datetime(2013, 1, 3)])
+
 
 @patch.object(h, 'request',
               new=Request.blank('/p/test/foobar', base_url='https://www.mysite.com/p/test/foobar'))
@@ -446,7 +487,9 @@ def test_login_overlay():
         with h.login_overlay(exceptions=['foobar']):
             raise HTTPUnauthorized()
 
+
 class TestIterEntryPoints(TestCase):
+
     def _make_ep(self, name, cls):
         m = Mock()
         m.name = name
@@ -457,17 +500,20 @@ class TestIterEntryPoints(TestCase):
     @patch.dict(h.tg.config, {'disable_entry_points.allura': 'myapp'})
     def test_disabled(self, pkg_resources):
         pkg_resources.iter_entry_points.return_value = [
-                self._make_ep('myapp', object)]
+            self._make_ep('myapp', object)]
         self.assertEqual([], list(h.iter_entry_points('allura')))
 
     @patch('allura.lib.helpers.pkg_resources')
     def test_subclassed_ep(self, pkg_resources):
-        class App(object): pass
-        class BetterApp(App): pass
+        class App(object):
+            pass
+
+        class BetterApp(App):
+            pass
 
         pkg_resources.iter_entry_points.return_value = [
-                self._make_ep('myapp', App),
-                self._make_ep('myapp', BetterApp)]
+            self._make_ep('myapp', App),
+            self._make_ep('myapp', BetterApp)]
 
         eps = list(h.iter_entry_points('allura'))
         self.assertEqual(len(eps), 1)
@@ -475,16 +521,21 @@ class TestIterEntryPoints(TestCase):
 
     @patch('allura.lib.helpers.pkg_resources')
     def test_ambiguous_eps(self, pkg_resources):
-        class App(object): pass
-        class BetterApp(App): pass
-        class BestApp(object): pass
+        class App(object):
+            pass
+
+        class BetterApp(App):
+            pass
+
+        class BestApp(object):
+            pass
 
         pkg_resources.iter_entry_points.return_value = [
-                self._make_ep('myapp', App),
-                self._make_ep('myapp', BetterApp),
-                self._make_ep('myapp', BestApp)]
+            self._make_ep('myapp', App),
+            self._make_ep('myapp', BetterApp),
+            self._make_ep('myapp', BestApp)]
 
         self.assertRaisesRegexp(ImportError,
-                'Ambiguous \[allura\] entry points detected. '
-                'Multiple entry points with name "myapp".',
-                list, h.iter_entry_points('allura'))
+                                'Ambiguous \[allura\] entry points detected. '
+                                'Multiple entry points with name "myapp".',
+                                list, h.iter_entry_points('allura'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_mail_util.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_mail_util.py b/Allura/allura/tests/test_mail_util.py
index f88b46c..8473cec 100644
--- a/Allura/allura/tests/test_mail_util.py
+++ b/Allura/allura/tests/test_mail_util.py
@@ -30,13 +30,13 @@ from alluratest.controller import setup_basic_test, setup_global_objects
 from allura.lib.utils import ConfigProxy
 
 from allura.lib.mail_util import (
-        parse_address,
-        parse_message,
-        Header,
-        is_autoreply,
-        identify_sender,
-        _parse_message_id,
-    )
+    parse_address,
+    parse_message,
+    Header,
+    is_autoreply,
+    identify_sender,
+    _parse_message_id,
+)
 from allura.lib.exceptions import AddressException
 from allura.tests import decorators as td
 
@@ -44,6 +44,7 @@ config = ConfigProxy(
     common_suffix='forgemail.domain',
     return_path='forgemail.return_path')
 
+
 class TestReactor(unittest.TestCase):
 
     def setUp(self):
@@ -70,7 +71,8 @@ class TestReactor(unittest.TestCase):
 
     @td.with_wiki
     def test_parse_address_good(self):
-        topic, project, app = parse_address('foo@wiki.test.p' + config.common_suffix)
+        topic, project, app = parse_address(
+            'foo@wiki.test.p' + config.common_suffix)
         assert_equal(topic, 'foo')
         assert_equal(project.shortname, 'test')
         assert_equal(app.__class__.__name__, 'ForgeWikiApp')
@@ -96,15 +98,15 @@ class TestReactor(unittest.TestCase):
 Дворцов и башен; корабли
 Толпой со всех концов земли
 К богатым пристаням стремятся;'''.encode(charset),
-                        'plain',
-                        charset)
+                      'plain',
+                      charset)
         p2 = MIMEText(u'''<p>По оживлённым берегам
 Громады стройные теснятся
 Дворцов и башен; корабли
 Толпой со всех концов земли
 К богатым пристаням стремятся;</p>'''.encode(charset),
-                        'plain',
-                        charset)
+                      'plain',
+                      charset)
         msg1 = MIMEMultipart()
         msg1['Message-ID'] = '<fo...@bar.com>'
         msg1.attach(p1)
@@ -112,7 +114,8 @@ class TestReactor(unittest.TestCase):
         s_msg = msg1.as_string()
         msg2 = parse_message(s_msg)
         for part in msg2['parts']:
-            if part['payload'] is None: continue
+            if part['payload'] is None:
+                continue
             assert isinstance(part['payload'], unicode)
 
 
@@ -133,7 +136,8 @@ class TestHeader(object):
 
     def test_name_addr(self):
         our_header = Header(u'"теснятся"', u'<da...@b.com>')
-        assert_equal(str(our_header), '=?utf-8?b?ItGC0LXRgdC90Y/RgtGB0Y8i?= <da...@b.com>')
+        assert_equal(str(our_header),
+                     '=?utf-8?b?ItGC0LXRgdC90Y/RgtGB0Y8i?= <da...@b.com>')
 
 
 class TestIsAutoreply(object):
@@ -181,27 +185,34 @@ class TestIsAutoreply(object):
         self.msg['headers']['Return-Path'] = '<>'
         assert_true(is_autoreply(self.msg))
 
+
 class TestIdentifySender(object):
+
     @mock.patch('allura.model.EmailAddress')
     def test_arg(self, EA):
-        EA.canonical = lambda e:e
-        EA.query.get.side_effect = [mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+        EA.canonical = lambda e: e
+        EA.query.get.side_effect = [
+            mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
         assert_equal(identify_sender(None, 'arg', None, None), 'user')
         EA.query.get.assert_called_once_with(_id='arg')
 
     @mock.patch('allura.model.EmailAddress')
     def test_header(self, EA):
-        EA.canonical = lambda e:e
-        EA.query.get.side_effect = [None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
-        assert_equal(identify_sender(None, 'arg', {'From': 'from'}, None), 'user')
-        assert_equal(EA.query.get.call_args_list, [mock.call(_id='arg'), mock.call(_id='from')])
+        EA.canonical = lambda e: e
+        EA.query.get.side_effect = [
+            None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+        assert_equal(
+            identify_sender(None, 'arg', {'From': 'from'}, None), 'user')
+        assert_equal(EA.query.get.call_args_list,
+                     [mock.call(_id='arg'), mock.call(_id='from')])
 
     @mock.patch('allura.model.User')
     @mock.patch('allura.model.EmailAddress')
     def test_no_header(self, EA, User):
         anon = User.anonymous()
-        EA.canonical = lambda e:e
-        EA.query.get.side_effect = [None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+        EA.canonical = lambda e: e
+        EA.query.get.side_effect = [
+            None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
         assert_equal(identify_sender(None, 'arg', {}, None), anon)
         assert_equal(EA.query.get.call_args_list, [mock.call(_id='arg')])
 
@@ -209,14 +220,16 @@ class TestIdentifySender(object):
     @mock.patch('allura.model.EmailAddress')
     def test_no_match(self, EA, User):
         anon = User.anonymous()
-        EA.canonical = lambda e:e
+        EA.canonical = lambda e: e
         EA.query.get.side_effect = [None, None]
-        assert_equal(identify_sender(None, 'arg', {'From': 'from'}, None), anon)
-        assert_equal(EA.query.get.call_args_list, [mock.call(_id='arg'), mock.call(_id='from')])
+        assert_equal(
+            identify_sender(None, 'arg', {'From': 'from'}, None), anon)
+        assert_equal(EA.query.get.call_args_list,
+                     [mock.call(_id='arg'), mock.call(_id='from')])
 
 
 def test_parse_message_id():
     assert_equal(_parse_message_id('<de...@libjpeg-turbo.p.sourceforge.net>, </p...@libjpeg-turbo.p.sourceforge.net>'), [
-            'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.sourceforge.net',
-            'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.sourceforge.net',
-        ])
+        'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.sourceforge.net',
+        'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.sourceforge.net',
+    ])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_markdown.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_markdown.py b/Allura/allura/tests/test_markdown.py
index a398087..7c6d164 100644
--- a/Allura/allura/tests/test_markdown.py
+++ b/Allura/allura/tests/test_markdown.py
@@ -24,6 +24,7 @@ from allura.lib import markdown_extensions as mde
 
 
 class TestTracRef1(unittest.TestCase):
+
     @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
     def test_no_such_artifact(self, lookup):
         lookup.return_value = None
@@ -42,11 +43,14 @@ class TestTracRef1(unittest.TestCase):
         shortlink = mock.Mock(url='/p/project/tool/artifact')
         shortlink.ref.artifact.deleted = False
         lookup.return_value = shortlink
-        self.assertEqual(mde.TracRef1().sub('#100'), '[#100](/p/project/tool/artifact)')
-        self.assertEqual(mde.TracRef1().sub('r123'), '[r123](/p/project/tool/artifact)')
+        self.assertEqual(mde.TracRef1().sub('#100'),
+                         '[#100](/p/project/tool/artifact)')
+        self.assertEqual(mde.TracRef1().sub('r123'),
+                         '[r123](/p/project/tool/artifact)')
 
 
 class TestTracRef2(unittest.TestCase):
+
     @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
     def test_no_such_artifact(self, lookup):
         lookup.return_value = None
@@ -63,32 +67,37 @@ class TestTracRef2(unittest.TestCase):
         lookup.return_value = shortlink
         pattern = mde.TracRef2()
         pattern.get_comment_slug = lambda *args: 'abc'
-        self.assertEqual(pattern.sub('ticket:100'), '[ticket:100](/p/project/tool/artifact/)')
-        self.assertEqual(pattern.sub('[ticket:100]'), '[[ticket:100](/p/project/tool/artifact/)]')
+        self.assertEqual(pattern.sub('ticket:100'),
+                         '[ticket:100](/p/project/tool/artifact/)')
+        self.assertEqual(pattern.sub('[ticket:100]'),
+                         '[[ticket:100](/p/project/tool/artifact/)]')
         self.assertEqual(pattern.sub('comment:13:ticket:100'),
-                '[comment:13:ticket:100](/p/project/tool/artifact/#abc)')
+                         '[comment:13:ticket:100](/p/project/tool/artifact/#abc)')
         pattern.get_comment_slug = lambda *args: None
         self.assertEqual(pattern.sub('comment:13:ticket:100'),
-                '[comment:13:ticket:100](/p/project/tool/artifact/)')
+                         '[comment:13:ticket:100](/p/project/tool/artifact/)')
 
 
 class TestTracRef3(unittest.TestCase):
+
     def test_no_app_context(self):
-        self.assertEqual(mde.TracRef3(None).sub('source:file.py'), 'source:file.py')
+        self.assertEqual(mde.TracRef3(None)
+                         .sub('source:file.py'), 'source:file.py')
 
     def test_legit_refs(self):
         app = mock.Mock(url='/p/project/tool/')
         self.assertEqual(mde.TracRef3(app).sub('source:file.py'),
-                '[source:file.py](/p/project/tool/HEAD/tree/file.py)')
+                         '[source:file.py](/p/project/tool/HEAD/tree/file.py)')
         self.assertEqual(mde.TracRef3(app).sub('source:file.py@123'),
-                '[source:file.py@123](/p/project/tool/123/tree/file.py)')
+                         '[source:file.py@123](/p/project/tool/123/tree/file.py)')
         self.assertEqual(mde.TracRef3(app).sub('source:file.py@123#L456'),
-                '[source:file.py@123#L456](/p/project/tool/123/tree/file.py#l456)')
+                         '[source:file.py@123#L456](/p/project/tool/123/tree/file.py#l456)')
         self.assertEqual(mde.TracRef3(app).sub('source:file.py#L456'),
-                '[source:file.py#L456](/p/project/tool/HEAD/tree/file.py#l456)')
+                         '[source:file.py#L456](/p/project/tool/HEAD/tree/file.py#l456)')
 
 
 class TestPatternReplacingProcessor(unittest.TestCase):
+
     @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
     def test_run(self, lookup):
         shortlink = mock.Mock(url='/p/project/tool/artifact')
@@ -102,6 +111,7 @@ class TestPatternReplacingProcessor(unittest.TestCase):
 
 
 class TestCommitMessageExtension(unittest.TestCase):
+
     @mock.patch('allura.lib.markdown_extensions.TracRef2.get_comment_slug')
     @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
     def test_convert(self, lookup, get_comment_slug):
@@ -132,6 +142,7 @@ Not *strong* or _underlined_."""
 * <a href="/p/project/tool/2/tree/test.py#l3">source:test.py@2#L3</a></p>
 <p>Not *strong* or _underlined_.</p></div>"""
 
-        md = ForgeMarkdown(extensions=[mde.CommitMessageExtension(app), 'nl2br'],
-                output_format='html4')
+        md = ForgeMarkdown(
+            extensions=[mde.CommitMessageExtension(app), 'nl2br'],
+            output_format='html4')
         self.assertEqual(md.convert(text), expected_html)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_plugin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_plugin.py b/Allura/allura/tests/test_plugin.py
index 66d4715..1c00f61 100644
--- a/Allura/allura/tests/test_plugin.py
+++ b/Allura/allura/tests/test_plugin.py
@@ -49,7 +49,7 @@ class TestProjectRegistrationProvider(object):
     def test_suggest_name(self):
         f = self.provider.suggest_name
         assert_equals(f('A More Than Fifteen Character Name', Mock()),
-                'amorethanfifteencharactername')
+                      'amorethanfifteencharactername')
 
     @patch('allura.model.Project')
     def test_shortname_validator(self, Project):
@@ -58,14 +58,18 @@ class TestProjectRegistrationProvider(object):
         v = self.provider.shortname_validator.to_python
 
         v('thisislegit', neighborhood=nbhd)
-        assert_raises(ProjectShortnameInvalid, v, 'not valid', neighborhood=nbhd)
-        assert_raises(ProjectShortnameInvalid, v, 'this-is-valid-but-too-long', neighborhood=nbhd)
-        assert_raises(ProjectShortnameInvalid, v, 'this is invalid and too long', neighborhood=nbhd)
+        assert_raises(ProjectShortnameInvalid, v,
+                      'not valid', neighborhood=nbhd)
+        assert_raises(ProjectShortnameInvalid, v,
+                      'this-is-valid-but-too-long', neighborhood=nbhd)
+        assert_raises(ProjectShortnameInvalid, v,
+                      'this is invalid and too long', neighborhood=nbhd)
         Project.query.get.return_value = Mock()
         assert_raises(ProjectConflict, v, 'thisislegit', neighborhood=nbhd)
 
 
 class TestThemeProvider(object):
+
     @patch('allura.model.notification.SiteNotification')
     @patch('pylons.response')
     @patch('pylons.request')
@@ -103,7 +107,8 @@ class TestThemeProvider(object):
         note.impressions = 2
         request.cookies = {'site-notification': 'deadbeef-1-false'}
         assert_is(ThemeProvider().get_site_notification(), note)
-        response.set_cookie.assert_called_once_with('site-notification', 'deadbeef-2-False', max_age=timedelta(days=365))
+        response.set_cookie.assert_called_once_with(
+            'site-notification', 'deadbeef-2-False', max_age=timedelta(days=365))
 
     @patch('allura.model.notification.SiteNotification')
     @patch('pylons.response')
@@ -124,7 +129,8 @@ class TestThemeProvider(object):
         note.impressions = 1
         request.cookies = {'site-notification': '0ddba11-1000-true'}
         assert_is(ThemeProvider().get_site_notification(), note)
-        response.set_cookie.assert_called_once_with('site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))
+        response.set_cookie.assert_called_once_with(
+            'site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))
 
     @patch('allura.model.notification.SiteNotification')
     @patch('pylons.response')
@@ -135,7 +141,8 @@ class TestThemeProvider(object):
         note.impressions = 0
         request.cookies = {}
         assert_is(ThemeProvider().get_site_notification(), note)
-        response.set_cookie.assert_called_once_with('site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))
+        response.set_cookie.assert_called_once_with(
+            'site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))
 
     @patch('allura.model.notification.SiteNotification')
     @patch('pylons.response')
@@ -146,4 +153,5 @@ class TestThemeProvider(object):
         note.impressions = 0
         request.cookies = {'site-notification': 'deadbeef-1000-true-bad'}
         assert_is(ThemeProvider().get_site_notification(), note)
-        response.set_cookie.assert_called_once_with('site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))
+        response.set_cookie.assert_called_once_with(
+            'site-notification', 'deadbeef-1-False', max_age=timedelta(days=365))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_scripttask.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_scripttask.py b/Allura/allura/tests/test_scripttask.py
index d97f9ef..6273ee0 100644
--- a/Allura/allura/tests/test_scripttask.py
+++ b/Allura/allura/tests/test_scripttask.py
@@ -22,9 +22,11 @@ from allura.scripts.scripttask import ScriptTask
 
 
 class TestScriptTask(unittest.TestCase):
+
     def setUp(self):
         class TestScriptTask(ScriptTask):
             _parser = mock.Mock()
+
             @classmethod
             def parser(cls):
                 return cls._parser

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_security.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_security.py b/Allura/allura/tests/test_security.py
index 0acc5ca..1445e91 100644
--- a/Allura/allura/tests/test_security.py
+++ b/Allura/allura/tests/test_security.py
@@ -32,16 +32,19 @@ def _allow(obj, role, perm):
     ThreadLocalODMSession.flush_all()
     Credentials.get().clear()
 
+
 def _deny(obj, role, perm):
     obj.acl.insert(0, M.ACE.deny(role._id, perm))
     ThreadLocalODMSession.flush_all()
     Credentials.get().clear()
 
+
 def _add_to_group(user, role):
     M.ProjectRole.by_user(user, upsert=True).roles.append(role._id)
     ThreadLocalODMSession.flush_all()
     Credentials.get().clear()
 
+
 class TestSecurity(TestController):
 
     validate_skip = True
@@ -50,21 +53,28 @@ class TestSecurity(TestController):
     def test_anon(self):
         self.app.get('/security/*anonymous/forbidden', status=302)
         self.app.get('/security/*anonymous/needs_auth', status=302)
-        self.app.get('/security/*anonymous/needs_project_access_fail', status=302)
-        self.app.get('/security/*anonymous/needs_artifact_access_fail', status=302)
+        self.app.get('/security/*anonymous/needs_project_access_fail',
+                     status=302)
+        self.app.get(
+            '/security/*anonymous/needs_artifact_access_fail', status=302)
 
     @td.with_wiki
     def test_auth(self):
         self.app.get('/security/test-admin/forbidden', status=403)
         self.app.get('/security/test-admin/needs_auth', status=200)
-        self.app.get('/security/test-admin/needs_project_access_fail', status=403)
-        self.app.get('/security/test-admin/needs_project_access_ok', status=200)
+        self.app.get('/security/test-admin/needs_project_access_fail',
+                     status=403)
+        self.app.get('/security/test-admin/needs_project_access_ok',
+                     status=200)
         # This should fail b/c test-user doesn't have the permission
-        self.app.get('/security/test-user/needs_artifact_access_fail', extra_environ=dict(username='test-user'), status=403)
+        self.app.get('/security/test-user/needs_artifact_access_fail',
+                     extra_environ=dict(username='test-user'), status=403)
         # This should succeed b/c users with the 'admin' permission on a
         # project implicitly have all permissions to everything in the project
-        self.app.get('/security/test-admin/needs_artifact_access_fail', status=200)
-        self.app.get('/security/test-admin/needs_artifact_access_ok', status=200)
+        self.app.get(
+            '/security/test-admin/needs_artifact_access_fail', status=200)
+        self.app.get('/security/test-admin/needs_artifact_access_ok',
+                     status=200)
 
     @td.with_wiki
     def test_all_allowed(self):
@@ -77,21 +87,29 @@ class TestSecurity(TestController):
         anon_role = M.ProjectRole.by_name('*anonymous')
         test_user = M.User.by_username('test-user')
 
-        assert_equal(all_allowed(wiki, admin_role), set(['configure', 'read', 'create', 'edit', 'unmoderated_post', 'post', 'moderate', 'admin', 'delete']))
-        assert_equal(all_allowed(wiki, dev_role), set(['read', 'create', 'edit', 'unmoderated_post', 'post', 'moderate', 'delete']))
-        assert_equal(all_allowed(wiki, member_role), set(['read', 'create', 'edit', 'unmoderated_post', 'post']))
-        assert_equal(all_allowed(wiki, auth_role), set(['read', 'post', 'unmoderated_post']))
+        assert_equal(all_allowed(wiki, admin_role), set(
+            ['configure', 'read', 'create', 'edit', 'unmoderated_post', 'post', 'moderate', 'admin', 'delete']))
+        assert_equal(all_allowed(wiki, dev_role), set(
+            ['read', 'create', 'edit', 'unmoderated_post', 'post', 'moderate', 'delete']))
+        assert_equal(all_allowed(wiki, member_role),
+                     set(['read', 'create', 'edit', 'unmoderated_post', 'post']))
+        assert_equal(all_allowed(wiki, auth_role),
+                     set(['read', 'post', 'unmoderated_post']))
         assert_equal(all_allowed(wiki, anon_role), set(['read']))
-        assert_equal(all_allowed(wiki, test_user), set(['read', 'post', 'unmoderated_post']))
+        assert_equal(all_allowed(wiki, test_user),
+                     set(['read', 'post', 'unmoderated_post']))
 
         _add_to_group(test_user, member_role)
 
-        assert_equal(all_allowed(wiki, test_user), set(['read', 'create', 'edit', 'unmoderated_post', 'post']))
+        assert_equal(all_allowed(wiki, test_user),
+                     set(['read', 'create', 'edit', 'unmoderated_post', 'post']))
 
         _deny(wiki, auth_role, 'unmoderated_post')
 
-        assert_equal(all_allowed(wiki, member_role), set(['read', 'create', 'edit', 'post']))
-        assert_equal(all_allowed(wiki, test_user), set(['read', 'create', 'edit', 'post']))
+        assert_equal(all_allowed(wiki, member_role),
+                     set(['read', 'create', 'edit', 'post']))
+        assert_equal(all_allowed(wiki, test_user),
+                     set(['read', 'create', 'edit', 'post']))
 
     @td.with_wiki
     def test_deny_vs_allow(self):
@@ -104,7 +122,6 @@ class TestSecurity(TestController):
         auth_role = M.ProjectRole.by_name('*authenticated')
         test_user = M.User.by_username('test-user')
 
-
         # confirm that *anon has expected access
         assert has_access(page, 'read', anon_role)()
         assert has_access(page, 'post', anon_role)()
@@ -114,7 +131,8 @@ class TestSecurity(TestController):
         assert has_access(page, 'read', test_user)()
         assert has_access(page, 'post', test_user)()
         assert has_access(page, 'unmoderated_post', test_user)()
-        assert_equal(all_allowed(page, test_user), set(['read', 'post', 'unmoderated_post']))
+        assert_equal(all_allowed(page, test_user),
+                     set(['read', 'post', 'unmoderated_post']))
 
         _deny(page, auth_role, 'read')
 
@@ -126,11 +144,11 @@ class TestSecurity(TestController):
         # FIXME: all_allowed doesn't respect blocked user feature
         #assert_equal(all_allowed(page, test_user), set(['post', 'unmoderated_post']))
 
-
         assert has_access(wiki, 'read', test_user)()
         assert has_access(wiki, 'post', test_user)()
         assert has_access(wiki, 'unmoderated_post', test_user)()
-        assert_equal(all_allowed(wiki, test_user), set(['read', 'post', 'unmoderated_post']))
+        assert_equal(all_allowed(wiki, test_user),
+                     set(['read', 'post', 'unmoderated_post']))
 
         _deny(wiki, anon_role, 'read')
         _allow(wiki, auth_role, 'read')
@@ -169,6 +187,7 @@ class TestSecurity(TestController):
         wiki = c.project.app_instance('wiki')
         user = M.User.by_username('test-user')
         assert has_access(wiki, 'read', user)()
-        wiki.acl.append(M.ACE.deny(M.ProjectRole.by_user(user, upsert=True)._id, 'read', 'Spammer'))
+        wiki.acl.append(
+            M.ACE.deny(M.ProjectRole.by_user(user, upsert=True)._id, 'read', 'Spammer'))
         Credentials.get().clear()
         assert not has_access(wiki, 'read', user)()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_tasks.py b/Allura/allura/tests/test_tasks.py
index d908252..101ed8a 100644
--- a/Allura/allura/tests/test_tasks.py
+++ b/Allura/allura/tests/test_tasks.py
@@ -53,6 +53,7 @@ from allura.lib.decorators import event_handler, task
 
 
 class TestRepoTasks(unittest.TestCase):
+
     @mock.patch('allura.tasks.repo_tasks.c.app')
     @mock.patch('allura.tasks.repo_tasks.g.post_event')
     def test_clone_posts_event_on_failure(self, post_event, app):
@@ -73,7 +74,7 @@ class TestEventTasks(unittest.TestCase):
 
     def test_fire_event(self):
         event_tasks.event('my_event', self, 1, 2, a=5)
-        assert self.called_with == [((1,2), {'a':5}) ], self.called_with
+        assert self.called_with == [((1, 2), {'a': 5})], self.called_with
 
     def test_compound_error(self):
         '''test_compound_exception -- make sure our multi-exception return works
@@ -83,7 +84,7 @@ class TestEventTasks(unittest.TestCase):
         setup_global_objects()
         t = raise_exc.post()
         with LogCapture(level=logging.ERROR) as l, \
-             mock.patch.dict(tg.config, {'monq.raise_errors': False}):  # match normal non-test behavior
+                mock.patch.dict(tg.config, {'monq.raise_errors': False}):  # match normal non-test behavior
             t()
         # l.check() would be nice, but string is too detailed to check
         assert_equal(l.records[0].name, 'allura.model.monq_model')
@@ -95,6 +96,7 @@ class TestEventTasks(unittest.TestCase):
         for x in range(10):
             assert ('assert %d' % x) in t.result
 
+
 class TestIndexTasks(unittest.TestCase):
 
     def setUp(self):
@@ -109,33 +111,36 @@ class TestIndexTasks(unittest.TestCase):
 
             old_shortlinks = M.Shortlink.query.find().count()
             old_solr_size = len(g.solr.db)
-            artifacts = [ _TestArtifact() for x in range(5) ]
+            artifacts = [_TestArtifact() for x in range(5)]
             for i, a in enumerate(artifacts):
                 a._shorthand_id = 't%d' % i
                 a.text = 'This is a reference to [t3]'
-            arefs = [ M.ArtifactReference.from_artifact(a) for a in artifacts ]
-            ref_ids = [ r._id for r in arefs ]
+            arefs = [M.ArtifactReference.from_artifact(a) for a in artifacts]
+            ref_ids = [r._id for r in arefs]
             M.artifact_orm_session.flush()
             index_tasks.add_artifacts(ref_ids)
             new_shortlinks = M.Shortlink.query.find().count()
             new_solr_size = len(g.solr.db)
-            assert old_shortlinks + 5 == new_shortlinks, 'Shortlinks not created'
-            assert old_solr_size + 5 == new_solr_size, "Solr additions didn't happen"
+            assert old_shortlinks + \
+                5 == new_shortlinks, 'Shortlinks not created'
+            assert old_solr_size + \
+                5 == new_solr_size, "Solr additions didn't happen"
             M.main_orm_session.flush()
             M.main_orm_session.clear()
             a = _TestArtifact.query.get(_shorthand_id='t3')
             assert len(a.backrefs) == 5, a.backrefs
             assert_equal(find_slinks.call_args_list,
-                    [mock.call(a.index().get('text')) for a in artifacts])
+                         [mock.call(a.index().get('text')) for a in artifacts])
 
     @td.with_wiki
     @mock.patch('allura.tasks.index_tasks.g.solr')
     def test_del_artifacts(self, solr):
         old_shortlinks = M.Shortlink.query.find().count()
-        artifacts = [ _TestArtifact(_shorthand_id='ta_%s' % x) for x in range(5) ]
+        artifacts = [_TestArtifact(_shorthand_id='ta_%s' % x)
+                     for x in range(5)]
         M.artifact_orm_session.flush()
-        arefs = [ M.ArtifactReference.from_artifact(a) for a in artifacts ]
-        ref_ids = [ r._id for r in arefs ]
+        arefs = [M.ArtifactReference.from_artifact(a) for a in artifacts]
+        ref_ids = [r._id for r in arefs]
         M.artifact_orm_session.flush()
         index_tasks.add_artifacts(ref_ids)
         M.main_orm_session.flush()
@@ -145,9 +150,9 @@ class TestIndexTasks(unittest.TestCase):
         assert solr.add.call_count == 1
         sort_key = operator.itemgetter('id')
         assert_equal(
-                sorted(solr.add.call_args[0][0], key=sort_key),
-                sorted([search.solarize(ref.artifact) for ref in arefs],
-                        key=sort_key))
+            sorted(solr.add.call_args[0][0], key=sort_key),
+            sorted([search.solarize(ref.artifact) for ref in arefs],
+                   key=sort_key))
         index_tasks.del_artifacts(ref_ids)
         M.main_orm_session.flush()
         M.main_orm_session.clear()
@@ -164,14 +169,15 @@ class TestMailTasks(unittest.TestCase):
         setup_global_objects()
 
     # these tests go down through the mail_util.SMTPClient.sendmail method
-    # since usage is generally through the task, and not using mail_util directly
+    # since usage is generally through the task, and not using mail_util
+    # directly
 
     def test_send_email_ascii_with_user_lookup(self):
         c.user = M.User.by_username('test-admin')
         with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(c.user._id) ],
+                destinations=[str(c.user._id)],
                 text=u'This is a test',
                 reply_to=u'noreply@sf.net',
                 subject=u'Test subject',
@@ -187,13 +193,14 @@ class TestMailTasks(unittest.TestCase):
             # plain
             assert_in('This is a test', body)
             # html
-            assert_in('<div class="markdown_content"><p>This is a test</p></div>', body)
+            assert_in(
+                '<div class="markdown_content"><p>This is a test</p></div>', body)
 
     def test_send_email_nonascii(self):
         with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
             mail_tasks.sendmail(
                 fromaddr=u'"По" <fo...@bar.com>',
-                destinations=[ 'blah@blah.com' ],
+                destinations=['blah@blah.com'],
                 text=u'Громады стройные теснятся',
                 reply_to=u'noreply@sf.net',
                 subject=u'По оживлённым берегам',
@@ -207,12 +214,15 @@ class TestMailTasks(unittest.TestCase):
 
             # The address portion must not be encoded, only the name portion can be.
             # Also it is apparently not necessary to have the double-quote separators present
-            #   when the name portion is encoded.  That is, the encoding below is just По and not "По"
+            # when the name portion is encoded.  That is, the encoding below is
+            # just По and not "По"
             assert_in('From: =?utf-8?b?0J/Qvg==?= <fo...@bar.com>', body)
-            assert_in('Subject: =?utf-8?b?0J/QviDQvtC20LjQstC70ZHQvdC90YvQvCDQsdC10YDQtdCz0LDQvA==?=', body)
+            assert_in(
+                'Subject: =?utf-8?b?0J/QviDQvtC20LjQstC70ZHQvdC90YvQvCDQsdC10YDQtdCz0LDQvA==?=', body)
             assert_in('Content-Type: text/plain; charset="utf-8"', body)
             assert_in('Content-Transfer-Encoding: base64', body)
-            assert_in(b64encode(u'Громады стройные теснятся'.encode('utf-8')), body)
+            assert_in(
+                b64encode(u'Громады стройные теснятся'.encode('utf-8')), body)
 
     def test_send_email_with_disabled_user(self):
         c.user = M.User.by_username('test-admin')
@@ -223,7 +233,7 @@ class TestMailTasks(unittest.TestCase):
         with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(destination_user._id) ],
+                destinations=[str(destination_user._id)],
                 text=u'This is a test',
                 reply_to=u'noreply@sf.net',
                 subject=u'Test subject',
@@ -242,7 +252,7 @@ class TestMailTasks(unittest.TestCase):
         with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(destination_user._id) ],
+                destinations=[str(destination_user._id)],
                 text=u'This is a test',
                 reply_to=u'noreply@sf.net',
                 subject=u'Test subject',
@@ -299,7 +309,7 @@ class TestMailTasks(unittest.TestCase):
             _client.reset_mock()
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(c.user._id) ],
+                destinations=[str(c.user._id)],
                 text=u'This is a test',
                 reply_to=u'123@tickets.test.p.sf.net',
                 subject=u'Test subject',
@@ -332,7 +342,7 @@ class TestMailTasks(unittest.TestCase):
             _client.reset_mock()
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(c.user._id) ],
+                destinations=[str(c.user._id)],
                 text=u'This is a test',
                 reply_to=u'noreply@sf.net',
                 subject=u'Test subject',
@@ -409,10 +419,12 @@ class TestNotificationTasks(unittest.TestCase):
                 assert deliver.called_with('42', '52', 'none')
                 assert fire_ready.called_with()
 
+
 @event_handler('my_event')
 def _my_event(event_type, testcase, *args, **kwargs):
     testcase.called_with.append((args, kwargs))
 
+
 @task
 def raise_exc():
     errs = []
@@ -423,12 +435,17 @@ def raise_exc():
             errs.append(sys.exc_info())
     raise CompoundError(*errs)
 
+
 class _TestArtifact(M.Artifact):
     _shorthand_id = FieldProperty(str)
     text = FieldProperty(str)
-    def url(self): return ''
+
+    def url(self):
+        return ''
+
     def shorthand_id(self):
         return getattr(self, '_shorthand_id', self._id)
+
     def index(self):
         return dict(
             super(_TestArtifact, self).index(),
@@ -451,11 +468,13 @@ class TestExportTasks(unittest.TestCase):
         exportable = mock.Mock(exportable=True)
         not_exportable = mock.Mock(exportable=False)
         BE = export_tasks.BulkExport()
-        self.assertEqual(BE.filter_exportable([None, exportable, not_exportable]), [exportable])
+        self.assertEqual(
+            BE.filter_exportable([None, exportable, not_exportable]), [exportable])
 
     def test_bulk_export_filter_successful(self):
         BE = export_tasks.BulkExport()
-        self.assertEqual(BE.filter_successful(['foo', None, '0']), ['foo', '0'])
+        self.assertEqual(
+            BE.filter_successful(['foo', None, '0']), ['foo', '0'])
 
     def test_get_export_path(self):
         BE = export_tasks.BulkExport()
@@ -481,7 +500,8 @@ class TestExportTasks(unittest.TestCase):
         tasks = M.MonQTask.query.find(
             dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
         assert_equal(len(tasks), 1)
-        assert_equal(tasks[0].kwargs['subject'], 'Bulk export for project test completed')
+        assert_equal(tasks[0].kwargs['subject'],
+                     'Bulk export for project test completed')
         assert_equal(tasks[0].kwargs['fromaddr'], 'noreply@sourceforge.net')
         assert_equal(tasks[0].kwargs['reply_to'], 'noreply@sourceforge.net')
         text = tasks[0].kwargs['text']

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_utils.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_utils.py b/Allura/allura/tests/test_utils.py
index b8b5de7..4812cb7 100644
--- a/Allura/allura/tests/test_utils.py
+++ b/Allura/allura/tests/test_utils.py
@@ -36,6 +36,7 @@ from allura.lib import utils
 
 @patch.dict('allura.lib.utils.tg.config', clear=True, foo='bar', baz='true')
 class TestConfigProxy(unittest.TestCase):
+
     def setUp(self):
         self.cp = utils.ConfigProxy(mybaz="baz")
 
@@ -68,7 +69,8 @@ class TestChunkedIterator(unittest.TestCase):
         assert len(chunks[0]) == 2, chunks[0]
 
     def test_filter_on_sort_key(self):
-        query = {'username': {'$in': ['sample-user-1', 'sample-user-2', 'sample-user-3']}}
+        query = {'username':
+                 {'$in': ['sample-user-1', 'sample-user-2', 'sample-user-3']}}
         chunks = list(utils.chunked_find(M.User,
                                          query,
                                          2,
@@ -82,6 +84,7 @@ class TestChunkedIterator(unittest.TestCase):
 
 
 class TestChunkedList(unittest.TestCase):
+
     def test_chunked_list(self):
         l = range(10)
         chunks = list(utils.chunked_list(l, 3))
@@ -116,7 +119,7 @@ class TestAntispam(unittest.TestCase):
         self.assertRaises(
             ValueError,
             utils.AntiSpam.validate_request,
-            r, now=time.time()+24*60*60+1)
+            r, now=time.time() + 24 * 60 * 60 + 1)
 
     def test_invalid_future(self):
         form = dict(a='1', b='2')
@@ -124,7 +127,7 @@ class TestAntispam(unittest.TestCase):
         self.assertRaises(
             ValueError,
             utils.AntiSpam.validate_request,
-            r, now=time.time()-10)
+            r, now=time.time() - 10)
 
     def test_invalid_spinner(self):
         form = dict(a='1', b='2')
@@ -141,14 +144,16 @@ class TestAntispam(unittest.TestCase):
 
     def _encrypt_form(self, **kwargs):
         encrypted_form = dict(
-            (self.a.enc(k), v) for k,v in kwargs.items())
+            (self.a.enc(k), v) for k, v in kwargs.items())
         encrypted_form.setdefault(self.a.enc('honey0'), '')
         encrypted_form.setdefault(self.a.enc('honey1'), '')
         encrypted_form['spinner'] = self.a.spinner_text
         encrypted_form['timestamp'] = self.a.timestamp_text
         return encrypted_form
 
+
 class TestTruthyCallable(unittest.TestCase):
+
     def test_everything(self):
         def wrapper_func(bool_flag):
             def predicate(bool_flag=bool_flag):
@@ -184,6 +189,7 @@ class TestCaseInsensitiveDict(unittest.TestCase):
 
 
 class TestLineAnchorCodeHtmlFormatter(unittest.TestCase):
+
     def test_render(self):
         code = '#!/usr/bin/env python\n'\
                'print "Hello, world!"'
@@ -198,6 +204,7 @@ class TestLineAnchorCodeHtmlFormatter(unittest.TestCase):
 
 
 class TestIsTextFile(unittest.TestCase):
+
     def test_is_text_file(self):
         here_dir = path.dirname(__file__)
         assert utils.is_text_file(open(path.join(
@@ -216,7 +223,7 @@ class TestCodeStats(unittest.TestCase):
     def test_generate_code_stats(self):
         blob = Mock()
         blob.text = \
-"""class Person(object):
+            """class Person(object):
 
     def __init__(self, name='Alice'):
         self.name = name
@@ -231,6 +238,7 @@ class TestCodeStats(unittest.TestCase):
         assert stats['data_line_count'] == 5
         assert stats['code_size'] == len(blob.text)
 
+
 class TestHTMLSanitizer(unittest.TestCase):
 
     def test_html_sanitizer_iframe(self):
@@ -240,5 +248,7 @@ class TestHTMLSanitizer(unittest.TestCase):
 
     def test_html_sanitizer_youtube_iframe(self):
         p = utils.ForgeHTMLSanitizer('utf-8', '')
-        p.feed('<div><iframe src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed"></iframe></div>')
-        assert_equal(p.output(),'<div><iframe src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed"></iframe></div>')
+        p.feed(
+            '<div><iframe src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed"></iframe></div>')
+        assert_equal(
+            p.output(), '<div><iframe src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed"></iframe></div>')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_validators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_validators.py b/Allura/allura/tests/test_validators.py
index f3ba1ee..2e6bb02 100644
--- a/Allura/allura/tests/test_validators.py
+++ b/Allura/allura/tests/test_validators.py
@@ -50,6 +50,7 @@ class TestJsonFile(unittest.TestCase):
     val = v.JsonFile
 
     class FieldStorage(object):
+
         def __init__(self, content):
             self.value = content
 
@@ -65,6 +66,7 @@ class TestUserMapFile(unittest.TestCase):
     val = v.UserMapJsonFile()
 
     class FieldStorage(object):
+
         def __init__(self, content):
             self.value = content
 
@@ -86,7 +88,8 @@ class TestUserValidator(unittest.TestCase):
     val = v.UserValidator
 
     def test_valid(self):
-        self.assertEqual(M.User.by_username('root'), self.val.to_python('root'))
+        self.assertEqual(M.User.by_username('root'),
+                         self.val.to_python('root'))
 
     def test_invalid(self):
         with self.assertRaises(fe.Invalid) as cm:
@@ -158,7 +161,8 @@ class TestTaskValidator(unittest.TestCase):
     val = v.TaskValidator
 
     def test_valid(self):
-        self.assertEqual(dummy_task, self.val.to_python('allura.tests.test_validators.dummy_task'))
+        self.assertEqual(
+            dummy_task, self.val.to_python('allura.tests.test_validators.dummy_task'))
 
     def test_invalid_name(self):
         with self.assertRaises(fe.Invalid) as cm:
@@ -168,7 +172,8 @@ class TestTaskValidator(unittest.TestCase):
     def test_import_failure(self):
         with self.assertRaises(fe.Invalid) as cm:
             self.val.to_python('allura.does.not.exist')
-        self.assertEqual(str(cm.exception), 'Could not import "allura.does.not.exist"')
+        self.assertEqual(str(cm.exception),
+                         'Could not import "allura.does.not.exist"')
 
     def test_attr_lookup_failure(self):
         with self.assertRaises(fe.Invalid) as cm:
@@ -178,7 +183,8 @@ class TestTaskValidator(unittest.TestCase):
     def test_not_a_task(self):
         with self.assertRaises(fe.Invalid) as cm:
             self.val.to_python('allura.tests.test_validators.setUp')
-        self.assertEqual(str(cm.exception), '"allura.tests.test_validators.setUp" is not a task.')
+        self.assertEqual(str(cm.exception),
+                         '"allura.tests.test_validators.setUp" is not a task.')
 
 
 class TestPathValidator(unittest.TestCase):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_zarkov_helpers.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_zarkov_helpers.py b/Allura/allura/tests/test_zarkov_helpers.py
index 125aac0..36a14d4 100644
--- a/Allura/allura/tests/test_zarkov_helpers.py
+++ b/Allura/allura/tests/test_zarkov_helpers.py
@@ -26,6 +26,7 @@ import mock
 
 from allura.lib import zarkov_helpers as zh
 
+
 class TestZarkovClient(unittest.TestCase):
 
     def setUp(self):
@@ -33,9 +34,9 @@ class TestZarkovClient(unittest.TestCase):
         ctx = mock.Mock()
         self.socket = mock.Mock()
         ctx.socket = mock.Mock(return_value=self.socket)
-        PUSH=mock.Mock()
+        PUSH = mock.Mock()
         with mock.patch('allura.lib.zarkov_helpers.zmq') as zmq:
-            zmq.PUSH=PUSH
+            zmq.PUSH = PUSH
             zmq.Context.instance.return_value = ctx
             self.client = zh.ZarkovClient(addr)
         zmq.Context.instance.assert_called_once_with()
@@ -45,11 +46,12 @@ class TestZarkovClient(unittest.TestCase):
     def test_event(self):
         self.client.event('test', dict(user='testuser'))
         obj = bson.BSON.encode(dict(
-                type='test',
-                context=dict(user='testuser'),
-                extra=None))
+            type='test',
+            context=dict(user='testuser'),
+            extra=None))
         self.socket.send.assert_called_once_with(obj)
 
+
 class TestZeroFill(unittest.TestCase):
 
     def setUp(self):
@@ -61,11 +63,11 @@ class TestZeroFill(unittest.TestCase):
         self.ts_ms_end = ts_end * 1000.0
         self.zarkov_data = dict(
             a=dict(
-                a1=[ (self.ts_ms_begin, 1000), (self.ts_ms_end, 1000) ],
-                a2=[ (self.ts_ms_begin, 1000), (self.ts_ms_end, 1000) ] ),
+                a1=[(self.ts_ms_begin, 1000), (self.ts_ms_end, 1000)],
+                a2=[(self.ts_ms_begin, 1000), (self.ts_ms_end, 1000)]),
             b=dict(
-                b1=[ (self.ts_ms_begin, 2000), (self.ts_ms_end, 2000) ],
-                b2=[ (self.ts_ms_begin, 2000), (self.ts_ms_end, 2000) ] ))
+                b1=[(self.ts_ms_begin, 2000), (self.ts_ms_end, 2000)],
+                b2=[(self.ts_ms_begin, 2000), (self.ts_ms_end, 2000)]))
 
     def test_to_utc_timestamp(self):
         self.assertEqual(
@@ -86,9 +88,9 @@ class TestZeroFill(unittest.TestCase):
         self.assertEqual(result[-3][1], 1000)
         days_ms = 24 * 3600 * 1000
         min_delta = 28 * days_ms
-        max_delta= 31 * days_ms
+        max_delta = 31 * days_ms
         for p1, p2 in zip(result, result[1:]):
-            delta = p2[0]-p1[0]
+            delta = p2[0] - p1[0]
             assert min_delta <= delta <= max_delta, delta
 
     def test_zero_fill_time_series_date(self):
@@ -98,7 +100,7 @@ class TestZeroFill(unittest.TestCase):
         self.assertEqual(len(result), 489)
         days_ms = 24 * 3600 * 1000
         for p1, p2 in zip(result, result[1:]):
-            delta = p2[0]-p1[0]
+            delta = p2[0] - p1[0]
             assert delta == days_ms
 
     def test_zero_fill_zarkov_month_dt(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/__init__.py b/Allura/allura/tests/unit/__init__.py
index 9e9e73d..27eb1e5 100644
--- a/Allura/allura/tests/unit/__init__.py
+++ b/Allura/allura/tests/unit/__init__.py
@@ -37,6 +37,7 @@ class MockPatchTestCase(object):
 
 
 class WithDatabase(MockPatchTestCase):
+
     def setUp(self):
         super(WithDatabase, self).setUp()
         clear_all_database_tables()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/controllers/test_discussion_moderation_controller.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/controllers/test_discussion_moderation_controller.py b/Allura/allura/tests/unit/controllers/test_discussion_moderation_controller.py
index 520f691..e56f2bc 100644
--- a/Allura/allura/tests/unit/controllers/test_discussion_moderation_controller.py
+++ b/Allura/allura/tests/unit/controllers/test_discussion_moderation_controller.py
@@ -36,7 +36,7 @@ class TestWhenModerating(WithDatabase):
         super(TestWhenModerating, self).setUp()
         post = create_post('mypost')
         discussion_controller = Mock(
-            discussion = Mock(_id=post.discussion_id),
+            discussion=Mock(_id=post.discussion_id),
         )
         self.controller = ModerationController(discussion_controller)
 
@@ -45,7 +45,8 @@ class TestWhenModerating(WithDatabase):
         self.moderate_post(approve=True)
         post = self.get_post()
         assert_equal(post.status, 'ok')
-        assert_equal(post.thread.last_post_date.strftime("%Y-%m-%d %H:%M:%S"), mod_date.strftime("%Y-%m-%d %H:%M:%S"))
+        assert_equal(post.thread.last_post_date.strftime("%Y-%m-%d %H:%M:%S"),
+                     mod_date.strftime("%Y-%m-%d %H:%M:%S"))
 
     def test_that_it_can_mark_as_spam(self):
         self.moderate_post(spam=True)
@@ -56,8 +57,9 @@ class TestWhenModerating(WithDatabase):
         assert_equal(self.get_post(), None)
 
     def moderate_post(self, **kwargs):
-        self.controller.save_moderation(post=[dict(checked=True, _id=self.get_post()._id)],
-                                 **kwargs)
+        self.controller.save_moderation(
+            post=[dict(checked=True, _id=self.get_post()._id)],
+            **kwargs)
         ThreadLocalORMSession.flush_all()
 
     def get_post(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/factories.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/factories.py b/Allura/allura/tests/unit/factories.py
index 20c33d1..6acb54f 100644
--- a/Allura/allura/tests/unit/factories.py
+++ b/Allura/allura/tests/unit/factories.py
@@ -66,7 +66,7 @@ def create_post(slug):
     author = create_user(username='someguy')
     return Post(slug=slug,
                 thread_id=thread._id,
-                full_slug='%s:%s' % (thread._id,slug),
+                full_slug='%s:%s' % (thread._id, slug),
                 discussion_id=discussion._id,
                 author_id=author._id)
 
@@ -84,4 +84,3 @@ def create_discussion():
 @flush_on_return
 def create_user(**kw):
     return User(**kw)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/patches.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/patches.py b/Allura/allura/tests/unit/patches.py
index 128d406..41cb403 100644
--- a/Allura/allura/tests/unit/patches.py
+++ b/Allura/allura/tests/unit/patches.py
@@ -50,4 +50,3 @@ def fake_redirect_patch(test_case):
 
 def fake_request_patch(test_case):
     return patch('allura.controllers.discuss.request')
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/spam/test_akismet.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/spam/test_akismet.py b/Allura/allura/tests/unit/spam/test_akismet.py
index 571eb92..1b7215a 100644
--- a/Allura/allura/tests/unit/spam/test_akismet.py
+++ b/Allura/allura/tests/unit/spam/test_akismet.py
@@ -27,9 +27,11 @@ from allura.lib.spam.akismetfilter import AKISMET_AVAILABLE, AkismetSpamFilter
 
 @unittest.skipIf(not AKISMET_AVAILABLE, "Akismet not available")
 class TestAkismet(unittest.TestCase):
+
     @mock.patch('allura.lib.spam.akismetfilter.akismet')
     def setUp(self, akismet_lib):
         self.akismet = AkismetSpamFilter({})
+
         def side_effect(*args, **kw):
             # side effect to test that data being sent to
             # akismet can be successfully urlencoded
@@ -37,7 +39,7 @@ class TestAkismet(unittest.TestCase):
         self.akismet.service.comment_check = mock.Mock(side_effect=side_effect)
         self.fake_artifact = mock.Mock(**{'url.return_value': 'artifact url'})
         self.fake_user = mock.Mock(display_name=u'Søme User',
-                email_addresses=['user@domain'])
+                                   email_addresses=['user@domain'])
         self.fake_headers = dict(
             REMOTE_ADDR='fallback ip',
             X_FORWARDED_FOR='some ip',
@@ -56,10 +58,11 @@ class TestAkismet(unittest.TestCase):
     def test_check(self, request, c):
         request.headers = self.fake_headers
         c.user = None
-        self.akismet.service.comment_check.side_effect({'side_effect':''})
+        self.akismet.service.comment_check.side_effect({'side_effect': ''})
         self.akismet.check(self.content)
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=self.expected_data, build_data=False)
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=self.expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -68,8 +71,9 @@ class TestAkismet(unittest.TestCase):
         c.user = None
         self.akismet.check(self.content, content_type='some content type')
         self.expected_data['comment_type'] = 'some content type'
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=self.expected_data, build_data=False)
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=self.expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -79,8 +83,9 @@ class TestAkismet(unittest.TestCase):
         self.akismet.check(self.content, artifact=self.fake_artifact)
         expected_data = self.expected_data
         expected_data['permalink'] = 'artifact url'
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=expected_data, build_data=False)
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -90,9 +95,10 @@ class TestAkismet(unittest.TestCase):
         self.akismet.check(self.content, user=self.fake_user)
         expected_data = self.expected_data
         expected_data.update(comment_author=u'Søme User'.encode('utf8'),
-                comment_author_email='user@domain')
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=expected_data, build_data=False)
+                             comment_author_email='user@domain')
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -102,9 +108,10 @@ class TestAkismet(unittest.TestCase):
         self.akismet.check(self.content)
         expected_data = self.expected_data
         expected_data.update(comment_author=u'Søme User'.encode('utf8'),
-                comment_author_email='user@domain')
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=expected_data, build_data=False)
+                             comment_author_email='user@domain')
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -115,8 +122,9 @@ class TestAkismet(unittest.TestCase):
         request.remote_addr = self.fake_headers['REMOTE_ADDR']
         c.user = None
         self.akismet.check(self.content)
-        self.akismet.service.comment_check.assert_called_once_with(self.content,
-                data=self.expected_data, build_data=False)
+        self.akismet.service.comment_check.assert_called_once_with(
+            self.content,
+            data=self.expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -124,7 +132,8 @@ class TestAkismet(unittest.TestCase):
         request.headers = self.fake_headers
         c.user = None
         self.akismet.submit_spam(self.content)
-        self.akismet.service.submit_spam.assert_called_once_with(self.content, data=self.expected_data, build_data=False)
+        self.akismet.service.submit_spam.assert_called_once_with(
+            self.content, data=self.expected_data, build_data=False)
 
     @mock.patch('allura.lib.spam.akismetfilter.c')
     @mock.patch('allura.lib.spam.akismetfilter.request')
@@ -132,4 +141,5 @@ class TestAkismet(unittest.TestCase):
         request.headers = self.fake_headers
         c.user = None
         self.akismet.submit_ham(self.content)
-        self.akismet.service.submit_ham.assert_called_once_with(self.content, data=self.expected_data, build_data=False)
+        self.akismet.service.submit_ham.assert_called_once_with(
+            self.content, data=self.expected_data, build_data=False)


[14/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/tests/functional/test_root.py b/ForgeActivity/forgeactivity/tests/functional/test_root.py
index eda71be..cafdfe4 100644
--- a/ForgeActivity/forgeactivity/tests/functional/test_root.py
+++ b/ForgeActivity/forgeactivity/tests/functional/test_root.py
@@ -29,6 +29,7 @@ from allura.tests import decorators as td
 
 
 class TestActivityController(TestController):
+
     def setUp(self, *args, **kwargs):
         super(TestActivityController, self).setUp(*args, **kwargs)
         self._enabled = config.get('activitystream.enabled', 'false')
@@ -58,37 +59,37 @@ class TestActivityController(TestController):
         from activitystream.storage.base import StoredActivity
         from bson import ObjectId
         director.get_timeline.return_value = [StoredActivity(**{
-            "_id" : ObjectId("529fa331033c5e6406d8b338"),
-            "obj" : {
-                    "activity_extras" : {
-                            "allura_id" : "Post:971389ad979eaafa658beb807bf4629d30f5f642.tickets@test.p.sourceforge.net",
-                            "summary" : "Just wanted to leave a comment on this..."
-                    },
-                    "activity_url" : "/p/test/tickets/_discuss/thread/08e74efd/ed7c/",
-                    "activity_name" : "a comment"
+            "_id": ObjectId("529fa331033c5e6406d8b338"),
+            "obj": {
+                "activity_extras": {
+                    "allura_id": "Post:971389ad979eaafa658beb807bf4629d30f5f642.tickets@test.p.sourceforge.net",
+                    "summary": "Just wanted to leave a comment on this..."
+                },
+                "activity_url": "/p/test/tickets/_discuss/thread/08e74efd/ed7c/",
+                "activity_name": "a comment"
             },
-            "target" : {
-                    "activity_extras" : {
-                            "allura_id" : "Ticket:529f57a6033c5e5985db2efa",
-                            "summary" : "Make activitystream timeline look better"
-                    },
-                    "activity_url" : "/p/test/tickets/34/",
-                    "activity_name" : "ticket #34"
+            "target": {
+                "activity_extras": {
+                    "allura_id": "Ticket:529f57a6033c5e5985db2efa",
+                    "summary": "Make activitystream timeline look better"
+                },
+                "activity_url": "/p/test/tickets/34/",
+                "activity_name": "ticket #34"
             },
-            "actor" : {
-                    "activity_extras" : {
-                            "icon_url" : "/u/test-admin/user_icon",
-                            "allura_id" : "User:521f96cb033c5e2587adbdff"
-                    },
-                    "activity_url" : "/u/test-admin/",
-                    "activity_name" : "Administrator 1",
-                    "node_id" : "User:521f96cb033c5e2587adbdff"
+            "actor": {
+                "activity_extras": {
+                    "icon_url": "/u/test-admin/user_icon",
+                    "allura_id": "User:521f96cb033c5e2587adbdff"
+                },
+                "activity_url": "/u/test-admin/",
+                "activity_name": "Administrator 1",
+                "node_id": "User:521f96cb033c5e2587adbdff"
             },
-            "verb" : "posted",
-            "published" : dateutil.parser.parse("2013-12-04T21:48:19.817"),
-            "score" : 1386193699,
-            "node_id" : "Project:527a6584033c5e62126f5a60",
-            "owner_id" : "Project:527a6584033c5e62126f5a60"
+            "verb": "posted",
+            "published": dateutil.parser.parse("2013-12-04T21:48:19.817"),
+            "score": 1386193699,
+            "node_id": "Project:527a6584033c5e62126f5a60",
+            "owner_id": "Project:527a6584033c5e62126f5a60"
         })]
         r = self.app.get('/p/test/activity/')
         timeline = r.html.find('ul', 'timeline')
@@ -156,7 +157,7 @@ class TestActivityController(TestController):
     @td.with_user_project('test-user-1')
     def test_background_aggregation(self):
         self.app.get('/u/test-admin/activity/follow?follow=True',
-                extra_environ=dict(username='test-user-1'))
+                     extra_environ=dict(username='test-user-1'))
         # new ticket, creates activity
         d = {'ticket_form.summary': 'New Ticket'}
         self.app.post('/bugs/save_ticket', params=d)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/widgets/follow.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/widgets/follow.py b/ForgeActivity/forgeactivity/widgets/follow.py
index ce05ef7..f5a15d7 100644
--- a/ForgeActivity/forgeactivity/widgets/follow.py
+++ b/ForgeActivity/forgeactivity/widgets/follow.py
@@ -22,8 +22,8 @@ import ew.jinja2_ew as ew
 
 
 class FollowToggle(ew.SimpleForm):
-    template='jinja:forgeactivity:templates/widgets/follow.html'
-    defaults=dict(
+    template = 'jinja:forgeactivity:templates/widgets/follow.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
         thing='project',
         action='follow',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/setup.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/setup.py b/ForgeActivity/setup.py
index bbc5372..45c546e 100644
--- a/ForgeActivity/setup.py
+++ b/ForgeActivity/setup.py
@@ -22,7 +22,8 @@ setup(name='ForgeActivity',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/command/base.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/command/base.py b/ForgeBlog/forgeblog/command/base.py
index c7bf329..8e67cc9 100644
--- a/ForgeBlog/forgeblog/command/base.py
+++ b/ForgeBlog/forgeblog/command/base.py
@@ -17,5 +17,6 @@
 
 from allura.command.base import Command
 
+
 class BlogCommand(Command):
     group_name = 'ForgeBlog'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/command/rssfeeds.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/command/rssfeeds.py b/ForgeBlog/forgeblog/command/rssfeeds.py
index eeac4b2..ce02487 100644
--- a/ForgeBlog/forgeblog/command/rssfeeds.py
+++ b/ForgeBlog/forgeblog/command/rssfeeds.py
@@ -36,8 +36,8 @@ from allura.lib import exceptions
 from allura.lib.helpers import exceptionless
 from allura.lib.helpers import plain2markdown
 
-## Everything in this file depends on html2text,
-## so import attempt is placed in global scope.
+# Everything in this file depends on html2text,
+# so import attempt is placed in global scope.
 try:
     import html2text
 except ImportError:
@@ -46,6 +46,7 @@ except ImportError:
 
 html2text.BODY_WIDTH = 0
 
+
 class RssFeedsCommand(base.BlogCommand):
     summary = 'Rss feed client'
     parser = base.BlogCommand.standard_parser(verbose=True)
@@ -60,12 +61,15 @@ class RssFeedsCommand(base.BlogCommand):
         # activity, User.url() will be called. This method defers to an
         # AuthenticationProvider, which depends on a request being setup in
         # the current thread. So, we set one up here.
-        import pylons, webob
+        import pylons
+        import webob
         pylons.request._push_object(webob.Request.blank('/'))
 
         self.basic_setup()
-        self.process_feed = exceptionless(None, log=allura_base.log)(self.process_feed)
-        self.process_entry = exceptionless(None, log=allura_base.log)(self.process_entry)
+        self.process_feed = exceptionless(
+            None, log=allura_base.log)(self.process_feed)
+        self.process_entry = exceptionless(
+            None, log=allura_base.log)(self.process_entry)
 
         user = M.User.query.get(username=self.options.username)
         c.user = user
@@ -78,10 +82,11 @@ class RssFeedsCommand(base.BlogCommand):
     def prepare_feeds(self):
         feed_dict = {}
         if self.options.appid != '':
-            gl_app = BM.Globals.query.get(app_config_id=ObjectId(self.options.appid))
+            gl_app = BM.Globals.query.get(
+                app_config_id=ObjectId(self.options.appid))
             if not gl_app:
-                raise exceptions.NoSuchGlobalsError("The globals %s " \
-                     "could not be found in the database" % self.options.appid)
+                raise exceptions.NoSuchGlobalsError("The globals %s "
+                                                    "could not be found in the database" % self.options.appid)
             if len(gl_app.external_feeds) > 0:
                 feed_dict[gl_app.app_config_id] = gl_app.external_feeds
         else:
@@ -111,7 +116,8 @@ class RssFeedsCommand(base.BlogCommand):
     def process_entry(self, e, appid):
         title = e.title
         allura_base.log.info(" ...entry '%s'", title)
-        parsed_content = filter(None, e.get('content') or [e.get('summary_detail')])
+        parsed_content = filter(
+            None, e.get('content') or [e.get('summary_detail')])
         if parsed_content:
             content = u''
             for ct in parsed_content:
@@ -124,18 +130,19 @@ class RssFeedsCommand(base.BlogCommand):
                     content += markdown_content
         else:
             content = plain2markdown(getattr(e, 'summary',
-                                        getattr(e, 'subtitle',
-                                            getattr(e, 'title'))))
+                                             getattr(e, 'subtitle',
+                                                     getattr(e, 'title'))))
 
         content += u' [link](%s)' % e.link
         updated = datetime.utcfromtimestamp(calendar.timegm(e.updated_parsed))
 
         base_slug = BM.BlogPost.make_base_slug(title, updated)
-        b_count = BM.BlogPost.query.find(dict(slug=base_slug, app_config_id=appid)).count()
+        b_count = BM.BlogPost.query.find(
+            dict(slug=base_slug, app_config_id=appid)).count()
         if b_count == 0:
             post = BM.BlogPost(title=title, text=content, timestamp=updated,
-                            app_config_id=appid,
-                            state='published')
-            post.neighborhood_id=c.project.neighborhood_id
+                               app_config_id=appid,
+                               state='published')
+            post.neighborhood_id = c.project.neighborhood_id
             post.make_slug()
             post.commit()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/main.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/main.py b/ForgeBlog/forgeblog/main.py
index bd0f821..1bb4ced 100644
--- a/ForgeBlog/forgeblog/main.py
+++ b/ForgeBlog/forgeblog/main.py
@@ -57,8 +57,9 @@ from forgeblog import widgets
 
 log = logging.getLogger(__name__)
 
+
 class W:
-    thread=w.Thread(
+    thread = w.Thread(
         page=None, limit=None, page_size=None, count=None,
         style='linear')
     pager = widgets.BlogPager()
@@ -73,30 +74,31 @@ class W:
     search_results = SearchResults()
     help_modal = SearchHelp()
 
+
 class ForgeBlogApp(Application):
     __version__ = version.__version__
-    tool_label='Blog'
-    tool_description="""
+    tool_label = 'Blog'
+    tool_description = """
         Share exciting news and progress updates with your
         community.
     """
-    default_mount_label='Blog'
-    default_mount_point='blog'
+    default_mount_label = 'Blog'
+    default_mount_point = 'blog'
     permissions = ['configure', 'read', 'write',
-                    'unmoderated_post', 'post', 'moderate', 'admin']
+                   'unmoderated_post', 'post', 'moderate', 'admin']
     permissions_desc = {
         'read': 'View blog entries.',
         'write': 'Create new blog entry.',
         'admin': 'Set permissions. Enable/disable commenting.',
     }
-    ordinal=14
+    ordinal = 14
     exportable = True
     config_options = Application.config_options
     default_external_feeds = []
-    icons={
-        24:'images/blog_24.png',
-        32:'images/blog_32.png',
-        48:'images/blog_48.png'
+    icons = {
+        24: 'images/blog_24.png',
+        32: 'images/blog_32.png',
+        48: 'images/blog_48.png'
     }
 
     def __init__(self, project, config):
@@ -114,12 +116,14 @@ class ForgeBlogApp(Application):
             else:
                 external_feeds = self.default_external_feeds
             return external_feeds
+
         def fset(self, new_external_feeds):
             globals = BM.Globals.query.get(app_config_id=self.config._id)
             if globals is not None:
                 globals.external_feeds = new_external_feeds
             elif len(new_external_feeds) > 0:
-                globals = BM.Globals(app_config_id=self.config._id, external_feeds=new_external_feeds)
+                globals = BM.Globals(
+                    app_config_id=self.config._id, external_feeds=new_external_feeds)
             if globals is not None:
                 session(globals).flush()
 
@@ -132,7 +136,7 @@ class ForgeBlogApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     @property
     def show_discussion(self):
@@ -147,21 +151,23 @@ class ForgeBlogApp(Application):
         links = [
             SitemapEntry('Home', base),
             SitemapEntry('Search', base + 'search'),
-            ]
+        ]
         if has_access(self, 'write')():
-            links += [ SitemapEntry('New Post', base + 'new') ]
+            links += [SitemapEntry('New Post', base + 'new')]
         return links
 
     def admin_menu(self):
         import sys
-        admin_url = c.project.url() + 'admin/' + self.config.options.mount_point + '/'
+        admin_url = c.project.url() + 'admin/' + \
+            self.config.options.mount_point + '/'
         # temporarily disabled until some bugs are fixed
         links = super(ForgeBlogApp, self).admin_menu(force_options=True)
         # We don't want external feeds in menu unless they're enabled
         if asbool(config.get('forgeblog.exfeed', 'false')):
-            links.insert(0, SitemapEntry('External feeds', admin_url + 'exfeed', className='admin_modal'))
+            links.insert(0, SitemapEntry('External feeds',
+                         admin_url + 'exfeed', className='admin_modal'))
         return links
-        #return super(ForgeBlogApp, self).admin_menu(force_options=True)
+        # return super(ForgeBlogApp, self).admin_menu(force_options=True)
 
     def install(self, project):
         'Set up any default permissions and roles here'
@@ -180,7 +186,7 @@ class ForgeBlogApp(Application):
             M.ACE.allow(role_developer, 'moderate'),
             M.ACE.allow(role_admin, 'configure'),
             M.ACE.allow(role_admin, 'admin'),
-            ]
+        ]
 
     def uninstall(self, project):
         "Remove all the tool's artifacts from the database"
@@ -198,6 +204,7 @@ class ForgeBlogApp(Application):
             json.dump(post, f, cls=jsonify.GenericJSON, indent=2)
         f.write(']}')
 
+
 class RootController(BaseController, FeedController):
 
     def __init__(self):
@@ -264,7 +271,6 @@ class RootController(BaseController, FeedController):
         post = BM.BlogPost.new(**kw)
         redirect(h.really_unicode(post.url()).encode('utf-8'))
 
-
     @with_trailing_slash
     @expose('jinja:allura:templates/markdown_syntax_dialog.html')
     def markdown_syntax_dialog(self, **kw):
@@ -281,6 +287,7 @@ class RootController(BaseController, FeedController):
             raise exc.HTTPNotFound()
         return PostController(post), rest
 
+
 class PostController(BaseController, FeedController):
 
     def __init__(self, post):
@@ -341,7 +348,7 @@ class PostController(BaseController, FeedController):
             self.post.delete()
             flash('Post deleted', 'info')
             redirect(h.really_unicode(c.app.url).encode('utf-8'))
-        for k,v in kw.iteritems():
+        for k, v in kw.iteritems():
             setattr(self.post, k, v)
         self.post.commit()
         redirect('.')
@@ -379,13 +386,16 @@ class PostController(BaseController, FeedController):
             self.post.url())
 
     def _get_version(self, version):
-        if not version: return self.post
+        if not version:
+            return self.post
         try:
             return self.post.get_version(version)
         except ValueError:
             raise exc.HTTPNotFound()
 
+
 class BlogAdminController(DefaultAdminController):
+
     def __init__(self, app):
         self.app = app
 
@@ -399,9 +409,11 @@ class BlogAdminController(DefaultAdminController):
     @expose()
     @require_post()
     def set_options(self, show_discussion=False):
-        self.app.config.options['show_discussion'] = show_discussion and True or False
+        self.app.config.options[
+            'show_discussion'] = show_discussion and True or False
         flash('Blog options updated')
-        redirect(h.really_unicode(c.project.url()+'admin/tools').encode('utf-8'))
+        redirect(h.really_unicode(c.project.url() + 'admin/tools')
+                 .encode('utf-8'))
 
     @without_trailing_slash
     @expose('jinja:forgeblog:templates/blog/admin_exfeed.html')
@@ -442,12 +454,14 @@ class BlogAdminController(DefaultAdminController):
         self.app.external_feeds_list = exfeed_list
         flash('External feeds updated')
         if len(invalid_list) > 0:
-            flash('Invalid link(s): %s' % ','.join(link for link in invalid_list), 'error')
+            flash('Invalid link(s): %s' %
+                  ','.join(link for link in invalid_list), 'error')
 
-        redirect(c.project.url()+'admin/tools')
+        redirect(c.project.url() + 'admin/tools')
 
 
 class RootRestController(BaseController):
+
     def __init__(self):
         self._discuss = AppDiscussionRestController()
 
@@ -472,7 +486,8 @@ class RootRestController(BaseController):
             post_titles = []
             for post in posts:
                 if has_access(post, 'read')():
-                    post_titles.append({'title': post.title, 'url': h.absurl('/rest' + post.url())})
+                    post_titles.append(
+                        {'title': post.title, 'url': h.absurl('/rest' + post.url())})
             return dict(posts=post_titles, count=result['count'], limit=result['limit'], page=result['page'])
 
     @expose()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/model/blog.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/model/blog.py b/ForgeBlog/forgeblog/model/blog.py
index 7e34aea..f520265 100644
--- a/ForgeBlog/forgeblog/model/blog.py
+++ b/ForgeBlog/forgeblog/model/blog.py
@@ -36,23 +36,26 @@ from allura.lib import utils
 config = utils.ConfigProxy(
     common_suffix='forgemail.domain')
 
+
 class Globals(MappedClass):
 
     class __mongometa__:
         name = 'blog-globals'
         session = M.project_orm_session
-        indexes = [ 'app_config_id' ]
+        indexes = ['app_config_id']
 
     type_s = 'BlogGlobals'
     _id = FieldProperty(schema.ObjectId)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id)
-    external_feeds=FieldProperty([str])
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: c.app.config._id)
+    external_feeds = FieldProperty([str])
 
 
 class BlogPostSnapshot(M.Snapshot):
+
     class __mongometa__:
-        name='blog_post_snapshot'
-    type_s='Blog Post Snapshot'
+        name = 'blog_post_snapshot'
+    type_s = 'Blog Post Snapshot'
 
     def original(self):
         return BlogPost.query.get(_id=self.artifact_id)
@@ -92,14 +95,18 @@ class BlogPostSnapshot(M.Snapshot):
             return None
         return orig.email_address
 
+
 class BlogPost(M.VersionedArtifact, ActivityObject):
+
     class __mongometa__:
-        name='blog_post'
+        name = 'blog_post'
         history_class = BlogPostSnapshot
-        unique_indexes = [ ('app_config_id', 'slug') ]
+        unique_indexes = [('app_config_id', 'slug')]
         indexes = [
-            ('app_config_id', 'state', 'timestamp'),  # for [[project_blog_posts]] macro
-            ('neighborhood_id', 'state', 'timestamp'),  # for [[neighborhood_blog_posts]] macro
+            # for [[project_blog_posts]] macro
+            ('app_config_id', 'state', 'timestamp'),
+            # for [[neighborhood_blog_posts]] macro
+            ('neighborhood_id', 'state', 'timestamp'),
         ]
 
     type_s = 'Blog Post'
@@ -109,7 +116,8 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
     text_cache = FieldProperty(MarkdownCache)
     timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
     slug = FieldProperty(str)
-    state = FieldProperty(schema.OneOf('draft', 'published'), if_missing='draft')
+    state = FieldProperty(
+        schema.OneOf('draft', 'published'), if_missing='draft')
     neighborhood_id = ForeignIdProperty('Neighborhood', if_missing=None)
 
     @property
@@ -128,12 +136,14 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
 
     def _get_date(self):
         return self.timestamp.date()
+
     def _set_date(self, value):
         self.timestamp = datetime.combine(value, self.time)
     date = property(_get_date, _set_date)
 
     def _get_time(self):
         return self.timestamp.time()
+
     def _set_time(self, value):
         self.timestamp = datetime.combine(self.date, value)
     time = property(_get_time, _set_time)
@@ -162,19 +172,20 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
         # first and *then* truncating doesn't work either, because the
         # ellipsis tag ends up orphaned from the main text.
         ellipsis = '... [read more](%s)' % self.url()
-        paragraphs = self.text.replace('\r','').split('\n\n')
+        paragraphs = self.text.replace('\r', '').split('\n\n')
         total_length = 0
         for i, p in enumerate(paragraphs):
             total_length += len(p)
             if total_length >= 400:
                 break
-        text = '\n\n'.join(paragraphs[:i+1])
+        text = '\n\n'.join(paragraphs[:i + 1])
         return g.markdown.convert(text + (ellipsis if i + 1 < len(paragraphs)
-                                                   else ''))
+                                          else ''))
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return '%s@%s%s' % (self.title.replace('/', '.'), domain, config.common_suffix)
 
     @staticmethod
@@ -184,8 +195,8 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
             for ch in title.replace(' ', '-')
             if ch.isalnum() or ch == '-')
         return '%s/%s' % (
-                timestamp.strftime('%Y/%m'),
-                slugsafe)
+            timestamp.strftime('%Y/%m'),
+            slugsafe)
 
     def make_slug(self):
         base = BlogPost.make_base_slug(self.title, self.timestamp)
@@ -195,7 +206,7 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
                 session(self).insert_now(self, state(self))
                 return self.slug
             except DuplicateKeyError:
-                self.slug = base + '-%.3d' % randint(0,999)
+                self.slug = base + '-%.3d' % randint(0, 999)
 
     def url(self):
         return self.app.url + self.slug + '/'
@@ -215,26 +226,27 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
 
     def get_version(self, version):
         HC = self.__mongometa__.history_class
-        return HC.query.find({'artifact_id':self._id, 'version':int(version)}).one()
+        return HC.query.find({'artifact_id': self._id, 'version': int(version)}).one()
 
     def commit(self):
         activity = functools.partial(g.director.create_activity, c.user,
-                target=c.project)
+                                     target=c.project)
         self.subscribe()
         super(BlogPost, self).commit()
         if self.version > 1:
-            v1 = self.get_version(self.version-1)
+            v1 = self.get_version(self.version - 1)
             v2 = self
-            la = [ line + '\n'  for line in v1.text.splitlines() ]
-            lb = [ line + '\n'  for line in v2.text.splitlines() ]
+            la = [line + '\n' for line in v1.text.splitlines()]
+            lb = [line + '\n' for line in v2.text.splitlines()]
             diff = ''.join(difflib.unified_diff(
-                    la, lb,
-                    'v%d' % v1.version,
-                    'v%d' % v2.version))
+                la, lb,
+                'v%d' % v1.version,
+                'v%d' % v2.version))
             description = diff
             if v1.state != 'published' and v2.state == 'published':
                 activity('created', self)
-                M.Feed.post(self, self.title, self.text, author=self.author(), pubdate=self.get_version(1).timestamp)
+                M.Feed.post(self, self.title, self.text, author=self.author(),
+                            pubdate=self.get_version(1).timestamp)
                 description = self.text
                 subject = '%s created post %s' % (
                     c.user.username, self.title)
@@ -252,7 +264,8 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
                 c.user.username, self.title)
             if self.state == 'published':
                 activity('created', self)
-                M.Feed.post(self, self.title, self.text, author=self.author(), pubdate=self.timestamp)
+                M.Feed.post(self, self.title, self.text,
+                            author=self.author(), pubdate=self.timestamp)
         if self.state == 'published':
             M.Notification.post(
                 artifact=self, topic='metadata', text=description, subject=subject)
@@ -282,10 +295,11 @@ class BlogPost(M.VersionedArtifact, ActivityObject):
 
 
 class Attachment(M.BaseAttachment):
-    ArtifactClass=BlogPost
+    ArtifactClass = BlogPost
+
     class __mongometa__:
-        polymorphic_identity='BlogAttachment'
-    attachment_type=FieldProperty(str, if_missing='BlogAttachment')
+        polymorphic_identity = 'BlogAttachment'
+    attachment_type = FieldProperty(str, if_missing='BlogAttachment')
 
 
 Mapper.compile_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/functional/test_rest.py b/ForgeBlog/forgeblog/tests/functional/test_rest.py
index b1b3bc7..05e06c8 100644
--- a/ForgeBlog/forgeblog/tests/functional/test_rest.py
+++ b/ForgeBlog/forgeblog/tests/functional/test_rest.py
@@ -44,7 +44,9 @@ class TestBlogApi(TestRestApiBase):
             'labels': 'label1, label2'
         }
         r = self.api_post('/rest/p/test/blog/', **data)
-        assert_equal(r.location, 'http://localhost/rest/p/test/blog/%s/%s/test/' % (date.today().strftime("%Y"), date.today().strftime("%m")))
+        assert_equal(
+            r.location, 'http://localhost/rest/p/test/blog/%s/%s/test/' %
+            (date.today().strftime("%Y"), date.today().strftime("%m")))
         assert_equal(r.status_int, 201)
         url = '/rest' + BM.BlogPost.query.find().first().url()
         r = self.api_get('/rest/p/test/blog/')
@@ -58,8 +60,6 @@ class TestBlogApi(TestRestApiBase):
         assert_equal(r.json['state'], data['state'])
         assert_equal(r.json['labels'], data['labels'].split(','))
 
-
-
     def test_update_post(self):
         data = {
             'title': 'test',
@@ -100,8 +100,10 @@ class TestBlogApi(TestRestApiBase):
         assert_equal(r.status_int, 404)
 
     def test_read_permissons(self):
-        self.api_post('/rest/p/test/blog/', title='test', text='test text', state='published')
-        self.app.get('/rest/p/test/blog/', extra_environ={'username': '*anonymous'}, status=200)
+        self.api_post('/rest/p/test/blog/', title='test',
+                      text='test text', state='published')
+        self.app.get('/rest/p/test/blog/',
+                     extra_environ={'username': '*anonymous'}, status=200)
         p = M.Project.query.get(shortname='test')
         acl = p.app_instance('blog').config.acl
         anon = M.ProjectRole.by_name('*anonymous')._id
@@ -113,7 +115,8 @@ class TestBlogApi(TestRestApiBase):
 
     def test_new_post_permissons(self):
         self.app.post('/rest/p/test/blog/',
-                      params=dict(title='test', text='test text', state='published'),
+                      params=dict(title='test', text='test text',
+                                  state='published'),
                       extra_environ={'username': '*anonymous'},
                       status=401)
         p = M.Project.query.get(shortname='test')
@@ -122,15 +125,18 @@ class TestBlogApi(TestRestApiBase):
         anon_write = M.ACE.allow(anon, 'write')
         acl.append(anon_write)
         self.app.post('/rest/p/test/blog/',
-                      params=dict(title='test', text='test text', state='published'),
+                      params=dict(title='test', text='test text',
+                                  state='published'),
                       extra_environ={'username': '*anonymous'},
                       status=201)
 
     def test_update_post_permissons(self):
-        self.api_post('/rest/p/test/blog/', title='test', text='test text', state='published')
+        self.api_post('/rest/p/test/blog/', title='test',
+                      text='test text', state='published')
         url = '/rest' + BM.BlogPost.query.find().first().url()
         self.app.post(url.encode('utf-8'),
-                      params=dict(title='test2', text='test text2', state='published'),
+                      params=dict(title='test2', text='test text2',
+                                  state='published'),
                       extra_environ={'username': '*anonymous'},
                       status=401)
         p = M.Project.query.get(shortname='test')
@@ -139,7 +145,8 @@ class TestBlogApi(TestRestApiBase):
         anon_write = M.ACE.allow(anon, 'write')
         acl.append(anon_write)
         self.app.post(url.encode('utf-8'),
-                      params=dict(title='test2', text='test text2', state='published'),
+                      params=dict(title='test2', text='test text2',
+                                  state='published'),
                       extra_environ={'username': '*anonymous'},
                       status=200)
         r = self.api_get(url)
@@ -148,12 +155,15 @@ class TestBlogApi(TestRestApiBase):
         assert_equal(r.json['state'], 'published')
 
     def test_permission_draft_post(self):
-        self.api_post('/rest/p/test/blog/', title='test', text='test text', state='draft')
-        r = self.app.get('/rest/p/test/blog/', extra_environ={'username': '*anonymous'})
+        self.api_post('/rest/p/test/blog/', title='test',
+                      text='test text', state='draft')
+        r = self.app.get('/rest/p/test/blog/',
+                         extra_environ={'username': '*anonymous'})
         assert_equal(r.json['posts'], [])
         url = '/rest' + BM.BlogPost.query.find().first().url()
         self.app.post(url.encode('utf-8'),
-                      params=dict(title='test2', text='test text2', state='published'),
+                      params=dict(title='test2', text='test text2',
+                                  state='published'),
                       extra_environ={'username': '*anonymous'},
                       status=401)
         p = M.Project.query.get(shortname='test')
@@ -161,22 +171,29 @@ class TestBlogApi(TestRestApiBase):
         anon = M.ProjectRole.by_name('*anonymous')._id
         anon_write = M.ACE.allow(anon, 'write')
         acl.append(anon_write)
-        r = self.app.get('/rest/p/test/blog/', extra_environ={'username': '*anonymous'})
+        r = self.app.get('/rest/p/test/blog/',
+                         extra_environ={'username': '*anonymous'})
         assert_equal(r.json['posts'][0]['title'], 'test')
 
     def test_draft_post(self):
-        self.api_post('/rest/p/test/blog/', title='test', text='test text', state='draft')
-        r = self.app.get('/rest/p/test/blog/', extra_environ={'username': '*anonymous'})
+        self.api_post('/rest/p/test/blog/', title='test',
+                      text='test text', state='draft')
+        r = self.app.get('/rest/p/test/blog/',
+                         extra_environ={'username': '*anonymous'})
         assert_equal(r.json['posts'], [])
         url = '/rest' + BM.BlogPost.query.find().first().url()
         self.api_post(url, state='published')
-        r = self.app.get('/rest/p/test/blog/', extra_environ={'username': '*anonymous'})
+        r = self.app.get('/rest/p/test/blog/',
+                         extra_environ={'username': '*anonymous'})
         assert_equal(r.json['posts'][0]['title'], 'test')
 
     def test_pagination(self):
-        self.api_post('/rest/p/test/blog/', title='test1', text='test text1', state='published')
-        self.api_post('/rest/p/test/blog/', title='test2', text='test text2', state='published')
-        self.api_post('/rest/p/test/blog/', title='test3', text='test text3', state='published')
+        self.api_post('/rest/p/test/blog/', title='test1',
+                      text='test text1', state='published')
+        self.api_post('/rest/p/test/blog/', title='test2',
+                      text='test text2', state='published')
+        self.api_post('/rest/p/test/blog/', title='test3',
+                      text='test text3', state='published')
         r = self.api_get('/rest/p/test/blog/', limit='1', page='0')
         assert_equal(r.json['posts'][0]['title'], 'test3')
         assert_equal(len(r.json['posts']), 1)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/functional/test_root.py b/ForgeBlog/forgeblog/tests/functional/test_root.py
index 96a8113..d3eb932 100644
--- a/ForgeBlog/forgeblog/tests/functional/test_root.py
+++ b/ForgeBlog/forgeblog/tests/functional/test_root.py
@@ -32,14 +32,15 @@ from allura import model as M
 # CommentController methods exposed:
 #     reply, delete
 
+
 class Test(TestController):
 
     def _post(self, slug='', **kw):
         d = {
-                'title':'My Post',
-                'text':'Nothing to see here',
-                'labels':'',
-                'state':'published'}
+            'title': 'My Post',
+            'text': 'Nothing to see here',
+            'labels': '',
+            'state': 'published'}
         d.update(kw)
         r = self.app.post('/blog%s/save' % slug, params=d)
         return r
@@ -210,7 +211,7 @@ class Test(TestController):
         self._post(title='two', text='[blog:%s/one]' % d)
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
-        r= self.app.get('/blog/%s/one/' % d)
+        r = self.app.get('/blog/%s/one/' % d)
         assert 'Related' in r
         assert 'Blog Post: %s/two' % d in r
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/test_app.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/test_app.py b/ForgeBlog/forgeblog/tests/test_app.py
index 895bb06..bab8f22 100644
--- a/ForgeBlog/forgeblog/tests/test_app.py
+++ b/ForgeBlog/forgeblog/tests/test_app.py
@@ -62,10 +62,13 @@ class TestBulkExport(object):
         blog.bulk_export(f)
         f.seek(0)
         blog = json.loads(f.read())
-        blog['posts'] = sorted(blog['posts'], key=lambda x: x['title'], reverse=True)
+        blog['posts'] = sorted(
+            blog['posts'], key=lambda x: x['title'], reverse=True)
         assert_equal(blog['posts'][0]['title'], 'Test2 title')
         assert_equal(blog['posts'][0]['text'], 'test2 post')
         assert_equal(blog['posts'][1]['title'], 'Test title')
         assert_equal(blog['posts'][1]['text'], 'test post')
-        assert_equal(blog['posts'][1]['labels'], ['the firstlabel', 'the second label'])
-        assert_equal(blog['posts'][1]['discussion_thread']['posts'][0]['text'], 'test comment')
+        assert_equal(blog['posts'][1]['labels'],
+                     ['the firstlabel', 'the second label'])
+        assert_equal(blog['posts'][1]['discussion_thread']
+                     ['posts'][0]['text'], 'test comment')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/test_commands.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/test_commands.py b/ForgeBlog/forgeblog/tests/test_commands.py
index 569470e..a086519 100644
--- a/ForgeBlog/forgeblog/tests/test_commands.py
+++ b/ForgeBlog/forgeblog/tests/test_commands.py
@@ -30,15 +30,18 @@ from allura import model as M
 from forgeblog import model as BM
 
 
-test_config = pkg_resources.resource_filename('allura', '../test.ini') + '#main'
+test_config = pkg_resources.resource_filename(
+    'allura', '../test.ini') + '#main'
 
 
 def setUp():
     setup_basic_test()
     setup_global_objects()
 
+
 def _mock_feed(*entries):
     class attrdict(dict):
+
         def __getattr__(self, name):
             return self[name]
 
@@ -53,11 +56,12 @@ def _mock_feed(*entries):
             subtitle='',
             summary='',
             link='http://example.com/',
-            updated=datetime.utcnow()+timedelta(days=_mock_feed.i - 100))
+            updated=datetime.utcnow() + timedelta(days=_mock_feed.i - 100))
         entry.update(e)
         entry['updated_parsed'] = entry['updated'].timetuple()
         if 'content' in entry:
-            entry['content'] = [attrdict(type=entry['content_type'], value=entry['content'])]
+            entry['content'] = [
+                attrdict(type=entry['content_type'], value=entry['content'])]
         if 'summary_detail' in entry:
             entry['summary_detail'] = attrdict(entry['summary_detail'])
         feed.entries.append(entry)
@@ -65,6 +69,7 @@ def _mock_feed(*entries):
     return feed
 _mock_feed.i = 0
 
+
 @skipif(module_not_available('html2text'))
 @mock.patch.object(feedparser, 'parse')
 def test_pull_rss_feeds(parsefeed):
@@ -82,11 +87,11 @@ def test_pull_rss_feeds(parsefeed):
     )
 
     rendered_html_content = "\n".join([
-       r"1\. foo",
+        r"1\. foo",
         "",
-       r"\#foo bar [baz](baz) foo bar ",
+        r"\#foo bar [baz](baz) foo bar ",
         "",
-       r"\#foo bar [ baz ](baz)",
+        r"\#foo bar [ baz ](baz)",
         " [link](http://example.com/)",
     ])
 
@@ -97,13 +102,14 @@ def test_pull_rss_feeds(parsefeed):
         dict(summary_detail=dict(type='text/html', value=html_content)),
     )
 
-    base_app =  M.AppConfig.query.find().all()[0]
-    tmp_app = M.AppConfig(tool_name=u'Blog', discussion_id=base_app.discussion_id,
-                          project_id=base_app.project_id,
-                          options={u'ordinal': 0, u'show_right_bar': True,
-                                    u'project_name': base_app.project.name,
-                                    u'mount_point': u'blog',
-                                    u'mount_label': u'Blog'})
+    base_app = M.AppConfig.query.find().all()[0]
+    tmp_app = M.AppConfig(
+        tool_name=u'Blog', discussion_id=base_app.discussion_id,
+        project_id=base_app.project_id,
+        options={u'ordinal': 0, u'show_right_bar': True,
+                 u'project_name': base_app.project.name,
+                 u'mount_point': u'blog',
+                 u'mount_label': u'Blog'})
     new_external_feeds = ['http://example.com/news/feed/']
     BM.Globals(app_config_id=tmp_app._id, external_feeds=new_external_feeds)
     ThreadLocalORMSession.flush_all()
@@ -113,7 +119,8 @@ def test_pull_rss_feeds(parsefeed):
     cmd.run([test_config, '-a', tmp_app._id])
     cmd.command()
     parsefeed.assert_called_with('http://example.com/news/feed/')
-    posts = BM.BlogPost.query.find({'app_config_id': tmp_app._id}).sort('timestamp', 1)
+    posts = BM.BlogPost.query.find(
+        {'app_config_id': tmp_app._id}).sort('timestamp', 1)
     assert_equal(posts.count(), 4)
     posts = posts.all()
     assert_equal(posts[0].title, 'Test')
@@ -125,6 +132,7 @@ def test_pull_rss_feeds(parsefeed):
     assert_equal(posts[3].title, 'Default Title 4')
     assert_equal(posts[3].text, rendered_html_content)
 
+
 @skipif(module_not_available('html2text'))
 def test_plaintext_preprocessor():
     from html2text import html2text
@@ -140,10 +148,11 @@ def test_plaintext_preprocessor():
     )
     html = g.markdown.convert(text)
     assert_equal(html,
-        '<div class="markdown_content"><p>1. foo '
-        '#foo bar <a class="" href="../baz">baz</a> foo bar '
-        '#foo bar <a class="" href="../baz"> baz </a></p></div>'
-    )
+                 '<div class="markdown_content"><p>1. foo '
+                 '#foo bar <a class="" href="../baz">baz</a> foo bar '
+                 '#foo bar <a class="" href="../baz"> baz </a></p></div>'
+                 )
+
 
 @skipif(module_not_available('html2text'))
 def test_plaintext_preprocessor_wrapped():
@@ -162,7 +171,7 @@ def test_plaintext_preprocessor_wrapped():
     )
     html = g.markdown.convert(text)
     assert_equal(html,
-        '<div class="markdown_content"><p>1. foo</p>\n'
-        '<p>#foo bar <a class="" href="../baz">baz</a> foo bar </p>\n'
-        '<p>#foo bar <a class="" href="../baz"> baz </a></p></div>'
-    )
+                 '<div class="markdown_content"><p>1. foo</p>\n'
+                 '<p>#foo bar <a class="" href="../baz">baz</a> foo bar </p>\n'
+                 '<p>#foo bar <a class="" href="../baz"> baz </a></p></div>'
+                 )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/test_roles.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/test_roles.py b/ForgeBlog/forgeblog/tests/test_roles.py
index a352305..e270bec 100644
--- a/ForgeBlog/forgeblog/tests/test_roles.py
+++ b/ForgeBlog/forgeblog/tests/test_roles.py
@@ -22,6 +22,7 @@ from allura import model as M
 from allura.lib import security
 from allura.lib import helpers as h
 
+
 def setUp():
     setup_basic_test()
     setup_global_objects()
@@ -29,10 +30,12 @@ def setUp():
     c.project.install_app('blog', 'blog')
     g.set_app('blog')
 
+
 def test_role_assignments():
     admin = M.User.by_username('test-admin')
     user = M.User.by_username('test-user')
     anon = M.User.anonymous()
+
     def check_access(perm):
         pred = security.has_access(c.app, perm)
         return pred(user=admin), pred(user=user), pred(user=anon)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/unit/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/unit/__init__.py b/ForgeBlog/forgeblog/tests/unit/__init__.py
index 86e891f..dbef2b5 100644
--- a/ForgeBlog/forgeblog/tests/unit/__init__.py
+++ b/ForgeBlog/forgeblog/tests/unit/__init__.py
@@ -28,16 +28,18 @@ from alluratest.controller import setup_basic_test
 def setUp():
     setup_basic_test()
 
+
 class BlogTestWithModel(object):
+
     def setUp(self):
         bootstrap.wipe_database()
         project_reg = plugin.ProjectRegistrationProvider.get()
         c.user = bootstrap.create_user('Test User')
         neighborhood = M.Neighborhood(name='Projects', url_prefix='/p/',
-            features=dict(private_projects = False,
-                          max_projects = None,
-                          css = 'none',
-                          google_analytics = False))
+                                      features=dict(private_projects=False,
+                                                    max_projects=None,
+                                                    css='none',
+                                                    google_analytics=False))
         project_reg.register_neighborhood_project(neighborhood, [c.user])
         c.project = neighborhood.register_project('test', c.user)
         c.project.install_app('Blog', 'blog')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/tests/unit/test_blog_post.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/tests/unit/test_blog_post.py b/ForgeBlog/forgeblog/tests/unit/test_blog_post.py
index f2274d9..fadfcf0 100644
--- a/ForgeBlog/forgeblog/tests/unit/test_blog_post.py
+++ b/ForgeBlog/forgeblog/tests/unit/test_blog_post.py
@@ -23,13 +23,16 @@ from forgeblog import model as M
 from forgeblog.tests.unit import BlogTestWithModel
 from allura.model import Feed
 
+
 def wrapped(s):
     return '<div class="markdown_content"><p>%s</p></div>' % s
 
 
 class TestBlogPost(BlogTestWithModel):
+
     def test_new(self):
-        post = M.BlogPost.new(title='test', text='test message', state='published')
+        post = M.BlogPost.new(
+            title='test', text='test message', state='published')
         assert_equal(post.title, 'test')
         assert_equal(post.text, 'test message')
         assert_equal(post.state, 'published')
@@ -38,6 +41,7 @@ class TestBlogPost(BlogTestWithModel):
 
 
 class TestFeed(BlogTestWithModel):
+
     def testd(self):
         post = M.BlogPost()
         post.title = 'test'
@@ -57,6 +61,7 @@ class TestFeed(BlogTestWithModel):
 
 
 class TestHtmlPreview(BlogTestWithModel):
+
     def _make_post(self, text):
         post = M.BlogPost()
         post.text = text

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/forgeblog/widgets.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/forgeblog/widgets.py b/ForgeBlog/forgeblog/widgets.py
index ea763ad..9d54903 100644
--- a/ForgeBlog/forgeblog/widgets.py
+++ b/ForgeBlog/forgeblog/widgets.py
@@ -24,42 +24,50 @@ from allura.lib.widgets import form_fields as ffw
 from allura.lib.widgets import forms
 from allura import model as M
 
+
 class BlogPager(ffw.PageList):
-    template='jinja:forgeblog:templates/blog_widgets/page_list.html'
+    template = 'jinja:forgeblog:templates/blog_widgets/page_list.html'
+
 
 class NewPostForm(forms.ForgeForm):
-    template='jinja:forgeblog:templates/blog_widgets/post_form.html'
+    template = 'jinja:forgeblog:templates/blog_widgets/post_form.html'
+
     class fields(ew_core.NameList):
         title = ew.TextField(validator=fev.UnicodeString(not_empty=True,
-                             messages={'empty':"You must provide a Title"}),
+                             messages={'empty': "You must provide a Title"}),
                              attrs=dict(placeholder='Enter your title here',
                                         title='Enter your title here',
                                         style='width: 425px'))
         text = ffw.MarkdownEdit(show_label=False,
-                                attrs=dict(placeholder='Enter your content here',
-                                           title='Enter your content here'))
+                                attrs=dict(
+                                    placeholder='Enter your content here',
+                                    title='Enter your content here'))
         state = ew.SingleSelectField(
             options=[
                 ew.Option(py_value='draft', label='Draft'),
-                ew.Option(py_value='published', label='Published') ])
+                ew.Option(py_value='published', label='Published')])
         labels = ffw.LabelEdit(placeholder='Add labels here',
                                title='Add labels here')
 
     def resources(self):
-        for r in super(NewPostForm, self).resources(): yield r
+        for r in super(NewPostForm, self).resources():
+            yield r
         yield ew.JSScript('''
             $(function() {
                 $('input[name="title"]').focus();
             });
         ''')
 
+
 class EditPostForm(NewPostForm):
+
     class buttons(ew_core.NameList):
         delete = ew.SubmitButton(label='Delete')
 
+
 class ViewPostForm(ew_core.Widget):
-    template='jinja:forgeblog:templates/blog_widgets/view_post.html'
-    defaults=dict(
+    template = 'jinja:forgeblog:templates/blog_widgets/view_post.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         value=None,
         subscribed=None,
@@ -71,8 +79,9 @@ class ViewPostForm(ew_core.Widget):
             M.Mailbox.subscribed(artifact=kw.get('value'))
         return kw
 
+
 class PreviewPostForm(ew_core.Widget):
-    template='jinja:forgeblog:templates/blog_widgets/preview_post.html'
-    defaults=dict(
+    template = 'jinja:forgeblog:templates/blog_widgets/preview_post.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         value=None)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeBlog/setup.py
----------------------------------------------------------------------
diff --git a/ForgeBlog/setup.py b/ForgeBlog/setup.py
index 10f3fae..40befd6 100644
--- a/ForgeBlog/setup.py
+++ b/ForgeBlog/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgeblog.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeBlog',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeChat/forgechat/command.py
----------------------------------------------------------------------
diff --git a/ForgeChat/forgechat/command.py b/ForgeChat/forgechat/command.py
index fdba6e2..e667843 100644
--- a/ForgeChat/forgechat/command.py
+++ b/ForgeChat/forgechat/command.py
@@ -38,9 +38,10 @@ from allura import model as M
 
 from forgechat import model as CM
 
+
 class IRCBotCommand(allura.command.Command):
-    min_args=1
-    max_args=1
+    min_args = 1
+    max_args = 1
     usage = '<ini file>'
     summary = 'Connect to all configured IRC servers and relay messages'
     parser = command.Command.standard_parser(verbose=True)
@@ -58,11 +59,13 @@ class IRCBotCommand(allura.command.Command):
                     asint(tg.config.get('forgechat.port', '6667')))
                 asyncore.loop()
             except Exception:
-                base.log.exception('Error in ircbot asyncore.loop(), restart in 5s')
+                base.log.exception(
+                    'Error in ircbot asyncore.loop(), restart in 5s')
                 time.sleep(5)
 
+
 class IRCBot(asynchat.async_chat):
-    TIME_BETWEEN_CONFIGS=timedelta(minutes=1)
+    TIME_BETWEEN_CONFIGS = timedelta(minutes=1)
 
     def __init__(self, host, port, nick='sfbot'):
         self.logger = logging.getLogger(__name__)
@@ -91,7 +94,7 @@ class IRCBot(asynchat.async_chat):
     def found_terminator(self):
         request = ''.join(self.data)
         self.logger.debug('RECV %s', request)
-        self.data=[]
+        self.data = []
         if request.startswith(':'):
             sender, cmd, rest = request[1:].split(' ', 2)
             sender = sender.split('!', 1)
@@ -114,7 +117,7 @@ class IRCBot(asynchat.async_chat):
 
     def check_configure(self):
         if (datetime.utcnow() - self.last_configured
-            > self.TIME_BETWEEN_CONFIGS):
+                > self.TIME_BETWEEN_CONFIGS):
             self.configure()
 
     def say(self, s):
@@ -131,7 +134,8 @@ class IRCBot(asynchat.async_chat):
         ThreadLocalORMSession.flush_all()
 
     def handle_command(self, sender, cmd, rest):
-        if cmd == 'NOTICE': pass
+        if cmd == 'NOTICE':
+            pass
         elif cmd == '433':
             self.set_nick()
             self.channels = {}
@@ -140,10 +144,13 @@ class IRCBot(asynchat.async_chat):
             self.say('PONG ' + rest)
         elif cmd in ('NOTICE', 'PRIVMSG'):
             rcpt, msg = rest.split(' ', 1)
-            if not self.set_context(rcpt): return
-            if msg.startswith(':'): msg = msg[1:]
+            if not self.set_context(rcpt):
+                return
+            if msg.startswith(':'):
+                msg = msg[1:]
             self.log_channel(sender, cmd, rcpt, msg)
-            if cmd == 'NOTICE': return
+            if cmd == 'NOTICE':
+                return
             for lnk in search.find_shortlinks(msg):
                 self.handle_shortlink(lnk, sender, rcpt)
         ThreadLocalORMSession.flush_all()
@@ -152,9 +159,11 @@ class IRCBot(asynchat.async_chat):
         ThreadLocalORMSession.close_all()
 
     def set_context(self, rcpt):
-        if rcpt == self.nick: return False
+        if rcpt == self.nick:
+            return False
         chan = self.channels.get(rcpt, None)
-        if not chan: return False
+        if not chan:
+            return False
         h.set_context(chan.project_id,
                       app_config_id=chan.app_config_id)
         return True
@@ -164,8 +173,9 @@ class IRCBot(asynchat.async_chat):
         if security.has_access(art, 'read', user=M.User.anonymous())():
             index = art.index()
             text = index['snippet_s'] or h.get_first(index, 'title')
-            url = urljoin(tg.config.get('base_url', 'http://sourceforge.net'), index['url_s'])
-            self.notice(rcpt, '[%s] - [%s](%s)' % (lnk.link, text,url))
+            url = urljoin(
+                tg.config.get('base_url', 'http://sourceforge.net'), index['url_s'])
+            self.notice(rcpt, '[%s] - [%s](%s)' % (lnk.link, text, url))
 
     def log_channel(self, sender, cmd, rcpt, rest):
         if cmd not in ('NOTICE', 'PRIVMSG'):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeChat/forgechat/main.py
----------------------------------------------------------------------
diff --git a/ForgeChat/forgechat/main.py b/ForgeChat/forgechat/main.py
index 1e7fb7b..c579ea5 100644
--- a/ForgeChat/forgechat/main.py
+++ b/ForgeChat/forgechat/main.py
@@ -43,25 +43,26 @@ from forgechat import version
 
 log = logging.getLogger(__name__)
 
+
 class ForgeChatApp(Application):
     __version__ = version.__version__
-    tool_label='Chat'
-    status='alpha'
-    default_mount_label='Chat'
-    default_mount_point='chat'
-    ordinal=13
-    permissions = ['configure', 'read' ]
+    tool_label = 'Chat'
+    status = 'alpha'
+    default_mount_label = 'Chat'
+    default_mount_point = 'chat'
+    ordinal = 13
+    permissions = ['configure', 'read']
     permissions_desc = {
         'configure': 'Set monitored IRC channel. Requires admin permission.',
         'read': 'View chat logs.',
     }
     config_options = Application.config_options + [
         ConfigOption('channel', str, ''),
-        ]
-    icons={
-        24:'images/chat_24.png',
-        32:'images/chat_32.png',
-        48:'images/chat_48.png'
+    ]
+    icons = {
+        24: 'images/chat_24.png',
+        32: 'images/chat_32.png',
+        48: 'images/chat_48.png'
     }
 
     def __init__(self, project, config):
@@ -79,14 +80,14 @@ class ForgeChatApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     @h.exceptionless([], log)
     def sidebar_menu(self):
         return [
             SitemapEntry('Home', '.'),
             SitemapEntry('Search', 'search'),
-            ]
+        ]
 
     def admin_menu(self):
         return super(ForgeChatApp, self).admin_menu()
@@ -99,7 +100,7 @@ class ForgeChatApp(Application):
         self.config.acl = [
             M.ACE.allow(role_anon, 'read'),
             M.ACE.allow(role_admin, 'configure'),
-            ]
+        ]
         CM.ChatChannel(
             project_id=self.config.project_id,
             app_config_id=self.config._id,
@@ -108,15 +109,16 @@ class ForgeChatApp(Application):
     def uninstall(self, project):
         "Remove all the tool's artifacts from the database"
         CM.ChatChannel.query.remove(dict(
-                project_id=self.config.project_id,
-                app_config_id=self.config._id))
+            project_id=self.config.project_id,
+            app_config_id=self.config._id))
         super(ForgeChatApp, self).uninstall(project)
 
+
 class AdminController(DefaultAdminController):
 
     @with_trailing_slash
     def index(self, **kw):
-        redirect(c.project.url()+'admin/tools')
+        redirect(c.project.url() + 'admin/tools')
 
     @expose()
     @require_post()
@@ -130,6 +132,7 @@ class AdminController(DefaultAdminController):
         flash('Chat options updated')
         super(AdminController, self).configure(channel=channel)
 
+
 class RootController(BaseController):
 
     @expose()
@@ -159,8 +162,9 @@ class RootController(BaseController):
 
     @expose()
     def _lookup(self, y, m, d, *rest):
-        y,m,d = int(y), int(m), int(d)
-        return DayController(date(y,m,d)), rest
+        y, m, d = int(y), int(m), int(d)
+        return DayController(date(y, m, d)), rest
+
 
 class DayController(RootController):
 
@@ -171,8 +175,8 @@ class DayController(RootController):
     def index(self, **kw):
         q = dict(
             timestamp={
-                '$gte':datetime.combine(self.day, time.min),
-                '$lte':datetime.combine(self.day, time.max)})
+                '$gte': datetime.combine(self.day, time.min),
+                '$lte': datetime.combine(self.day, time.max)})
         messages = CM.ChatMessage.query.find(q).sort('timestamp').all()
         prev = c.app.url + (self.day - timedelta(days=1)).strftime('%Y/%m/%d/')
         next = c.app.url + (self.day + timedelta(days=1)).strftime('%Y/%m/%d/')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeChat/forgechat/model/chat.py
----------------------------------------------------------------------
diff --git a/ForgeChat/forgechat/model/chat.py b/ForgeChat/forgechat/model/chat.py
index d2baeda..3907101 100644
--- a/ForgeChat/forgechat/model/chat.py
+++ b/ForgeChat/forgechat/model/chat.py
@@ -31,18 +31,20 @@ class ChatChannel(MappedClass):
         name = 'globals'
         session = M.main_orm_session
         indexes = ['project_id']
-        unique_indexes = [ 'channel' ]
+        unique_indexes = ['channel']
 
     _id = FieldProperty(S.ObjectId)
     project_id = FieldProperty(S.ObjectId)
     app_config_id = FieldProperty(S.ObjectId)
     channel = FieldProperty(str)
-    
+
+
 class ChatMessage(M.Artifact):
+
     class __mongometa__:
-        name='chat_message'
-        indexes = [ 'timestamp' ]
-    type_s='Chat Message'
+        name = 'chat_message'
+        indexes = ['timestamp']
+    type_s = 'Chat Message'
 
     timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
     sender = FieldProperty(str, if_missing='')
@@ -50,7 +52,6 @@ class ChatMessage(M.Artifact):
     text = FieldProperty(str, if_missing='')
     text_cache = FieldProperty(MarkdownCache)
 
-
     def index_id(self):
         id = 'Chat-%s:%s:%s.%s' % (
             self.channel,
@@ -73,7 +74,7 @@ class ChatMessage(M.Artifact):
                 + str(self._id))
 
     def shorthand_id(self):
-        return str(self._id) # pragma no cover
+        return str(self._id)  # pragma no cover
 
     @property
     def sender_short(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeChat/setup.py
----------------------------------------------------------------------
diff --git a/ForgeChat/setup.py b/ForgeChat/setup.py
index 4f01472..6cb2c59 100644
--- a/ForgeChat/setup.py
+++ b/ForgeChat/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgechat.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeChat',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/controllers/forum.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/controllers/forum.py b/ForgeDiscussion/forgediscussion/controllers/forum.py
index 50ed7fb..fd2b816 100644
--- a/ForgeDiscussion/forgediscussion/controllers/forum.py
+++ b/ForgeDiscussion/forgediscussion/controllers/forum.py
@@ -39,27 +39,31 @@ from forgediscussion import tasks
 
 log = logging.getLogger(__name__)
 
+
 class pass_validator(object):
+
     def validate(self, v, s):
         return v
-pass_validator=pass_validator()
+pass_validator = pass_validator()
+
 
 class ModelConfig(object):
-    Discussion=DM.Forum
-    Thread=DM.ForumThread
-    Post=DM.ForumPost
-    Attachment=M.DiscussionAttachment
+    Discussion = DM.Forum
+    Thread = DM.ForumThread
+    Post = DM.ForumPost
+    Attachment = M.DiscussionAttachment
+
 
 class WidgetConfig(object):
     # Forms
     subscription_form = DW.SubscriptionForm()
-    subscribe_form=SubscribeForm()
+    subscribe_form = SubscribeForm()
     edit_post = DW.EditPost(show_subject=True)
     moderate_post = FW.ModeratePost()
     moderate_thread = FW.ModerateThread()
     flag_post = DW.FlagPost()
     post_filter = DW.PostFilter()
-    moderate_posts=DW.ModeratePosts()
+    moderate_posts = DW.ModeratePosts()
     # Other widgets
     discussion = FW.Forum()
     thread = FW.Thread()
@@ -68,9 +72,10 @@ class WidgetConfig(object):
     announcements_table = FW.AnnouncementsTable()
     discussion_header = FW.ForumHeader()
 
+
 class ForumController(DiscussionController):
-    M=ModelConfig
-    W=WidgetConfig
+    M = ModelConfig
+    W = WidgetConfig
 
     def _check_security(self):
         require_access(self.discussion, 'read')
@@ -98,13 +103,14 @@ class ForumController(DiscussionController):
                    limit=validators.Int(if_empty=25, if_invalid=25)))
     def index(self, threads=None, limit=25, page=0, count=0, **kw):
         if self.discussion.deleted:
-            redirect(self.discussion.url()+'deleted')
+            redirect(self.discussion.url() + 'deleted')
         limit, page, start = g.handle_paging(limit, page)
-        c.subscribed=M.Mailbox.subscribed(artifact=self.discussion)
+        c.subscribed = M.Mailbox.subscribed(artifact=self.discussion)
         threads = DM.ForumThread.query.find(dict(discussion_id=self.discussion._id, num_replies={'$gt': 0})) \
                                       .sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)])
-        response =  super(ForumController, self).index(threads=threads.skip(start).limit(int(limit)).all(),
-                                                       limit=limit, page=page, count=threads.count(), **kw)
+        response = super(
+            ForumController, self).index(threads=threads.skip(start).limit(int(limit)).all(),
+                                         limit=limit, page=page, count=threads.count(), **kw)
         c.discussion_header = self.W.discussion_header
         c.whole_forum_subscription_form = self.W.subscribe_form
         return response
@@ -134,7 +140,7 @@ class ForumThreadController(ThreadController):
                    limit=validators.Int(if_empty=25, if_invalid=25)))
     def index(self, limit=25, page=0, count=0, **kw):
         if self.thread.discussion.deleted and not has_access(c.app, 'configure')():
-            redirect(self.thread.discussion.url()+'deleted')
+            redirect(self.thread.discussion.url() + 'deleted')
         return super(ForumThreadController, self).index(limit=limit, page=page, count=count, show_moderate=True, **kw)
 
     @h.vardec
@@ -144,7 +150,7 @@ class ForumThreadController(ThreadController):
     def moderate(self, **kw):
         require_access(self.thread, 'moderate')
         if self.thread.discussion.deleted and not has_access(c.app, 'configure')():
-            redirect(self.thread.discussion.url()+'deleted')
+            redirect(self.thread.discussion.url() + 'deleted')
         args = self.W.moderate_thread.validate(kw, None)
         tasks.calc_forum_stats.post(self.thread.discussion.shortname)
         if args.pop('delete', None):
@@ -158,6 +164,7 @@ class ForumThreadController(ThreadController):
         self.thread.flags = args.pop('flags', [])
         redirect(self.thread.url())
 
+
 class ForumPostController(PostController):
 
     @h.vardec
@@ -166,7 +173,7 @@ class ForumPostController(PostController):
     @utils.AntiSpam.validate('Spambot protection engaged')
     def index(self, **kw):
         if self.thread.discussion.deleted and not has_access(c.app, 'configure')():
-            redirect(self.thread.discussion.url()+'deleted')
+            redirect(self.thread.discussion.url() + 'deleted')
         return super(ForumPostController, self).index(**kw)
 
     @expose()
@@ -175,7 +182,7 @@ class ForumPostController(PostController):
     def moderate(self, **kw):
         require_access(self.post.thread, 'moderate')
         if self.thread.discussion.deleted and not has_access(c.app, 'configure')():
-            redirect(self.thread.discussion.url()+'deleted')
+            redirect(self.thread.discussion.url() + 'deleted')
         args = self.W.moderate_post.validate(kw, None)
         tasks.calc_thread_stats.post(self.post.thread._id)
         tasks.calc_forum_stats(self.post.discussion.shortname)
@@ -185,5 +192,6 @@ class ForumPostController(PostController):
             redirect(request.referer)
         super(ForumPostController, self).moderate(**kw)
 
+
 class ForumModerationController(ModerationController):
     PostModel = DM.ForumPost

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/controllers/root.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/controllers/root.py b/ForgeDiscussion/forgediscussion/controllers/root.py
index d0526dc..82f4187 100644
--- a/ForgeDiscussion/forgediscussion/controllers/root.py
+++ b/ForgeDiscussion/forgediscussion/controllers/root.py
@@ -51,13 +51,14 @@ from forgediscussion.widgets.admin import AddForumShort
 
 log = logging.getLogger(__name__)
 
+
 class RootController(BaseController, DispatchIndex, FeedController):
 
     class W(object):
-        forum_subscription_form=FW.ForumSubscriptionForm()
-        new_topic=DW.NewTopicPost(submit_text='Post')
-        announcements_table=FW.AnnouncementsTable()
-        add_forum=AddForumShort()
+        forum_subscription_form = FW.ForumSubscriptionForm()
+        new_topic = DW.NewTopicPost(submit_text='Post')
+        announcements_table = FW.AnnouncementsTable()
+        add_forum = AddForumShort()
         search_results = SearchResults()
         search_help = SearchHelp(comments=False, history=False)
 
@@ -71,13 +72,13 @@ class RootController(BaseController, DispatchIndex, FeedController):
         c.new_topic = self.W.new_topic
         c.add_forum = self.W.add_forum
         c.announcements_table = self.W.announcements_table
-        announcements=model.ForumThread.query.find(dict(
-                app_config_id=c.app.config._id,
-                flags='Announcement',
-                )).all()
+        announcements = model.ForumThread.query.find(dict(
+            app_config_id=c.app.config._id,
+            flags='Announcement',
+        )).all()
         forums = model.Forum.query.find(dict(
-                        app_config_id=c.app.config._id,
-                        parent_id=None, deleted=False)).all()
+            app_config_id=c.app.config._id,
+            parent_id=None, deleted=False)).all()
         forums = [f for f in forums if h.has_access(f, 'read')()]
         return dict(forums=forums,
                     announcements=announcements,
@@ -105,7 +106,8 @@ class RootController(BaseController, DispatchIndex, FeedController):
                                              deleted=False))
         c.new_topic = self.W.new_topic
         my_forums = []
-        forum_name = h.really_unicode(unquote(forum_name)) if forum_name else None
+        forum_name = h.really_unicode(unquote(
+            forum_name)) if forum_name else None
         current_forum = None
         for f in forums:
             if forum_name == f.shortname:
@@ -128,7 +130,7 @@ class RootController(BaseController, DispatchIndex, FeedController):
             redirect(request.referrer)
         require_access(discussion, 'post')
         thd = discussion.get_discussion_thread(dict(
-                headers=dict(Subject=subject)))[0]
+            headers=dict(Subject=subject)))[0]
         post = thd.post(subject, text)
         flash('Message posted')
         redirect(thd.url())
@@ -190,9 +192,10 @@ class RootController(BaseController, DispatchIndex, FeedController):
         thread = kw.pop('thread', [])
         objs = []
         for data in forum:
-            objs.append(dict(obj=model.Forum.query.get(shortname=data['shortname'],
-                                                       app_config_id=c.app.config._id),
-                             subscribed=bool(data.get('subscribed'))))
+            objs.append(
+                dict(obj=model.Forum.query.get(shortname=data['shortname'],
+                                               app_config_id=c.app.config._id),
+                     subscribed=bool(data.get('subscribed'))))
         for data in thread:
             objs.append(dict(obj=model.Thread.query.get(_id=data['id']),
                              subscribed=bool(data.get('subscribed'))))
@@ -212,7 +215,7 @@ class RootController(BaseController, DispatchIndex, FeedController):
         """
         return FeedArgs(
             dict(project_id=project._id, app_config_id=app.config._id),
-             'Recent posts to %s' % app.config.options.mount_label,
+            'Recent posts to %s' % app.config.options.mount_label,
             app.url)
 
     @without_trailing_slash
@@ -278,7 +281,8 @@ class RootController(BaseController, DispatchIndex, FeedController):
 
             next_expected_date = begin
             for d in mongo_data:
-                this_date = datetime(d['_id']['year'], d['_id']['month'], d['_id']['day'])
+                this_date = datetime(
+                    d['_id']['year'], d['_id']['month'], d['_id']['day'])
                 for day in h.daterange(next_expected_date, this_date):
                     yield item(day, 0)
                 yield item(this_date, d['posts'])
@@ -306,9 +310,9 @@ class RootRestController(BaseController):
     def index(self, limit=100, page=0, **kw):
         limit, page, start = g.handle_paging(int(limit), int(page))
         forums = model.Forum.query.find(dict(
-                        app_config_id=c.app.config._id,
-                        parent_id=None, deleted=False)
-                ).sort([('shortname', pymongo.ASCENDING)]).skip(start).limit(limit)
+            app_config_id=c.app.config._id,
+            parent_id=None, deleted=False)
+        ).sort([('shortname', pymongo.ASCENDING)]).skip(start).limit(limit)
         count = forums.count()
         json = dict(forums=[dict(_id=f._id,
                                  name=f.name,
@@ -326,10 +330,12 @@ class RootRestController(BaseController):
     @expose('json:')
     def validate_import(self, doc=None, username_mapping=None, **kw):
         require_access(c.project, 'admin')
-        if username_mapping is None: username_mapping = {}
+        if username_mapping is None:
+            username_mapping = {}
         try:
             doc = json.loads(doc)
-            warnings, doc = import_support.validate_import(doc, username_mapping)
+            warnings, doc = import_support.validate_import(
+                doc, username_mapping)
             return dict(warnings=warnings, errors=[])
         except Exception, e:
             raise
@@ -338,12 +344,14 @@ class RootRestController(BaseController):
 
     @expose('json:')
     def perform_import(
-        self, doc=None, username_mapping=None, default_username=None, create_users=False,
-        **kw):
+            self, doc=None, username_mapping=None, default_username=None, create_users=False,
+            **kw):
         require_access(c.project, 'admin')
-        if username_mapping is None: username_mapping = '{}'
+        if username_mapping is None:
+            username_mapping = '{}'
         if c.api_token.get_capability('import') != [c.project.neighborhood.name, c.project.shortname]:
-            log.error('Import capability is not enabled for %s', c.project.shortname)
+            log.error('Import capability is not enabled for %s',
+                      c.project.shortname)
             raise exc.HTTPForbidden(detail='Import is not allowed')
         try:
             doc = json.loads(doc)
@@ -372,7 +380,8 @@ class ForumRestController(BaseController):
     @expose('json:')
     def index(self, limit=100, page=0, **kw):
         limit, page, start = g.handle_paging(int(limit), int(page))
-        topics = model.Forum.thread_class().query.find(dict(discussion_id=self.forum._id))
+        topics = model.Forum.thread_class().query.find(
+            dict(discussion_id=self.forum._id))
         topics = topics.sort([('flags', pymongo.DESCENDING),
                               ('last_post_date', pymongo.DESCENDING)])
         topics = topics.skip(start).limit(limit)
@@ -404,6 +413,7 @@ class ForumRestController(BaseController):
                 return ForumTopicRestController(self.forum, topic), remainder
         raise exc.HTTPNotFound()
 
+
 class ForumTopicRestController(BaseController):
 
     def __init__(self, forum, topic):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/forum_main.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/forum_main.py b/ForgeDiscussion/forgediscussion/forum_main.py
index 1e55a2b..b9f033c 100644
--- a/ForgeDiscussion/forgediscussion/forum_main.py
+++ b/ForgeDiscussion/forgediscussion/forum_main.py
@@ -48,13 +48,16 @@ from widgets.admin import OptionsAdmin, AddForum
 
 log = logging.getLogger(__name__)
 
+
 class W:
     options_admin = OptionsAdmin()
     add_forum = AddForum()
 
+
 class ForgeDiscussionApp(Application):
     __version__ = version.__version__
-    permissions = ['configure', 'read', 'unmoderated_post', 'post', 'moderate', 'admin']
+    permissions = ['configure', 'read',
+                   'unmoderated_post', 'post', 'moderate', 'admin']
     permissions_desc = {
         'configure': 'Create new forums.',
         'read': 'View posts.',
@@ -63,22 +66,22 @@ class ForgeDiscussionApp(Application):
     config_options = Application.config_options + [
         ConfigOption('PostingPolicy',
                      schema.OneOf('ApproveOnceModerated', 'ModerateAll'), 'ApproveOnceModerated')
-        ]
-    PostClass=DM.ForumPost
-    AttachmentClass=DM.ForumAttachment
-    searchable=True
-    exportable=True
-    tool_label='Discussion'
-    tool_description="""
+    ]
+    PostClass = DM.ForumPost
+    AttachmentClass = DM.ForumAttachment
+    searchable = True
+    exportable = True
+    tool_label = 'Discussion'
+    tool_description = """
         Collaborate with your community in your forum.
     """
-    default_mount_label='Discussion'
-    default_mount_point='discussion'
-    ordinal=7
-    icons={
-        24:'images/forums_24.png',
-        32:'images/forums_32.png',
-        48:'images/forums_48.png'
+    default_mount_label = 'Discussion'
+    default_mount_point = 'discussion'
+    ordinal = 7
+    icons = {
+        24: 'images/forums_24.png',
+        32: 'images/forums_32.png',
+        48: 'images/forums_48.png'
     }
 
     def __init__(self, project, config):
@@ -98,7 +101,7 @@ class ForgeDiscussionApp(Application):
         log.info('Message from %s (%s)',
                  topic, self.config.options.mount_point)
         log.info('Headers are: %s', message['headers'])
-        shortname=urllib.unquote_plus(topic.replace('.', '/'))
+        shortname = urllib.unquote_plus(topic.replace('.', '/'))
         forum = DM.Forum.query.get(
             shortname=shortname, app_config_id=self.config._id)
         if forum is None:
@@ -110,9 +113,9 @@ class ForgeDiscussionApp(Application):
         '''Apps should provide their entries to be added to the main nav
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
         '''
-        return [ SitemapEntry(
-                self.config.options.mount_label,
-                '.')]
+        return [SitemapEntry(
+            self.config.options.mount_label,
+            '.')]
 
     @property
     @h.exceptionless([], log)
@@ -120,7 +123,7 @@ class ForgeDiscussionApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     @property
     def forums(self):
@@ -132,12 +135,13 @@ class ForgeDiscussionApp(Application):
 
     def subforums_of(self, parent_id):
         return DM.Forum.query.find(dict(
-                app_config_id=self.config._id,
-                parent_id=parent_id,
-                )).all()
+            app_config_id=self.config._id,
+            parent_id=parent_id,
+        )).all()
 
     def admin_menu(self):
-        admin_url = c.project.url() + 'admin/' + self.config.options.mount_point + '/'
+        admin_url = c.project.url() + 'admin/' + \
+            self.config.options.mount_point + '/'
         links = []
         if has_access(self, 'configure')():
             links.append(SitemapEntry('Forums', admin_url + 'forums'))
@@ -150,35 +154,44 @@ class ForgeDiscussionApp(Application):
             moderate_link = None
             forum_links = []
             forums = DM.Forum.query.find(dict(
-                            app_config_id=c.app.config._id,
-                            parent_id=None, deleted=False))
+                app_config_id=c.app.config._id,
+                parent_id=None, deleted=False))
             for f in forums:
-                if has_access(f,'read')():
+                if has_access(f, 'read')():
                     if f.url() in request.url and h.has_access(f, 'moderate')():
-                        moderate_link = SitemapEntry('Moderate', "%smoderate/" % f.url(), ui_icon=g.icons['pencil'],
-                        small = DM.ForumPost.query.find({'discussion_id':f._id, 'status':{'$ne': 'ok'}}).count())
-                    forum_links.append(SitemapEntry(f.name, f.url(), small=f.num_topics))
+                        moderate_link = SitemapEntry(
+                            'Moderate', "%smoderate/" % f.url(), ui_icon=g.icons['pencil'],
+                            small=DM.ForumPost.query.find({'discussion_id': f._id, 'status': {'$ne': 'ok'}}).count())
+                    forum_links.append(
+                        SitemapEntry(f.name, f.url(), small=f.num_topics))
             url = c.app.url + 'create_topic/'
-            url = h.urlquote(url + c.forum.shortname if getattr(c, 'forum', None) and c.forum else url)
-            l.append(SitemapEntry('Create Topic', url, ui_icon=g.icons['plus']))
+            url = h.urlquote(
+                url + c.forum.shortname if getattr(c, 'forum', None) and c.forum else url)
+            l.append(
+                SitemapEntry('Create Topic', url, ui_icon=g.icons['plus']))
             if has_access(c.app, 'configure')():
-                l.append(SitemapEntry('Add Forum', c.app.url + 'new_forum', ui_icon=g.icons['conversation']))
-                l.append(SitemapEntry('Admin Forums', c.project.url()+'admin/'+self.config.options.mount_point+'/forums', ui_icon=g.icons['pencil']))
+                l.append(SitemapEntry('Add Forum', c.app.url +
+                         'new_forum', ui_icon=g.icons['conversation']))
+                l.append(SitemapEntry('Admin Forums', c.project.url() + 'admin/' +
+                         self.config.options.mount_point + '/forums', ui_icon=g.icons['pencil']))
             if moderate_link:
                 l.append(moderate_link)
-            # if we are in a thread and not anonymous, provide placeholder links to use in js
+            # if we are in a thread and not anonymous, provide placeholder
+            # links to use in js
             if '/thread/' in request.url and c.user not in (None, M.User.anonymous()):
                 l.append(SitemapEntry(
-                        'Mark as Spam', 'flag_as_spam',
-                        ui_icon=g.icons['flag'], className='sidebar_thread_spam'))
-            l.append(SitemapEntry('Stats Graph', c.app.url + 'stats', ui_icon=g.icons['stats']))
+                    'Mark as Spam', 'flag_as_spam',
+                    ui_icon=g.icons['flag'], className='sidebar_thread_spam'))
+            l.append(SitemapEntry('Stats Graph', c.app.url +
+                     'stats', ui_icon=g.icons['stats']))
             if forum_links:
                 l.append(SitemapEntry('Forums'))
                 l = l + forum_links
             l.append(SitemapEntry('Help'))
-            l.append(SitemapEntry('Formatting Help', c.app.url + 'markdown_syntax'))
+            l.append(
+                SitemapEntry('Formatting Help', c.app.url + 'markdown_syntax'))
             return l
-        except: # pragma no cover
+        except:  # pragma no cover
             log.exception('sidebar_menu')
             return []
 
@@ -198,7 +211,7 @@ class ForgeDiscussionApp(Application):
             M.ACE.allow(role_developer, 'moderate'),
             M.ACE.allow(role_admin, 'configure'),
             M.ACE.allow(role_admin, 'admin'),
-            ]
+        ]
 
         utils.create_forum(self, new_forum=dict(
             shortname='general',
@@ -226,6 +239,7 @@ class ForgeDiscussionApp(Application):
             json.dump(forum, f, cls=jsonify.GenericJSON, indent=2)
         f.write(']}')
 
+
 class ForumAdminController(DefaultAdminController):
 
     def _check_security(self):
@@ -252,13 +266,14 @@ class ForumAdminController(DefaultAdminController):
     @expose()
     @require_post()
     def update_forums(self, forum=None, **kw):
-        if forum is None: forum = []
+        if forum is None:
+            forum = []
         for f in forum:
             forum = DM.Forum.query.get(_id=ObjectId(str(f['id'])))
             if f.get('delete'):
-                forum.deleted=True
+                forum.deleted = True
             elif f.get('undelete'):
-                forum.deleted=False
+                forum.deleted = False
             else:
                 if '.' in f['shortname'] or '/' in f['shortname'] or ' ' in f['shortname']:
                     flash('Shortname cannot contain space . or /', 'error')
@@ -269,7 +284,8 @@ class ForumAdminController(DefaultAdminController):
                 forum.monitoring_email = f['monitoring_email']
                 if 'members_only' in f:
                     if 'anon_posts' in f:
-                        flash('You cannot have anonymous posts in a members only forum.', 'warning')
+                        flash(
+                            'You cannot have anonymous posts in a members only forum.', 'warning')
                         forum.anon_posts = False
                         del f['anon_posts']
                     forum.members_only = True

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/import_support.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/import_support.py b/ForgeDiscussion/forgediscussion/import_support.py
index 60aff57..5318ec5 100644
--- a/ForgeDiscussion/forgediscussion/import_support.py
+++ b/ForgeDiscussion/forgediscussion/import_support.py
@@ -30,14 +30,17 @@ from forgediscussion import model as DM
 
 log = logging.getLogger(__name__)
 
+
 def validate_import(json, username_mapping, default_username=None):
     warnings = []
     schema = make_schema(username_mapping, default_username, warnings)
     json = schema.validate(json)
     return warnings, json
 
+
 def perform_import(json, username_mapping, default_username=None, create_users=False):
-    if create_users: default_username=create_user
+    if create_users:
+        default_username = create_user
 
     # Validate the import, create missing users
     warnings, json = validate_import(json, username_mapping, default_username)
@@ -69,42 +72,44 @@ def perform_import(json, username_mapping, default_username=None, create_users=F
                 subject=head['subject'])
             for p in posts:
                 p = create_post(f._id, t._id, p)
-            t.first_post_id=p._id
+            t.first_post_id = p._id
             ThreadLocalORMSession.flush_all()
             t.update_stats()
         ThreadLocalORMSession.flush_all()
         f.update_stats()
     return warnings
 
+
 def make_schema(user_name_map, default_username, warnings):
     USER = AlluraUser(user_name_map, default_username, warnings)
     TIMESTAMP = TimeStamp()
 
     POST = {
-        'msg_id':str,
-        'is_followup_to':str,
-        'is_deleted':str,
-        'thread_id':str,
-        'poster_name':str,
-        'poster_user':USER,
-        'subject':str,
-        'date':TIMESTAMP,
-        'body':str,
-        }
+        'msg_id': str,
+        'is_followup_to': str,
+        'is_deleted': str,
+        'thread_id': str,
+        'poster_name': str,
+        'poster_user': USER,
+        'subject': str,
+        'date': TIMESTAMP,
+        'body': str,
+    }
 
     FORUM = {
         'name': str,
         'description': str,
-        'threads': { str: [ POST ] },
-        }
+        'threads': {str: [POST]},
+    }
 
     result = S.SchemaItem.make({
-            'class':str,
-            'trackers':[None],
-            'forums': { str: FORUM }
-            })
+        'class': str,
+        'trackers': [None],
+        'forums': {str: FORUM}
+    })
     return result
 
+
 class AlluraUser(S.FancySchemaItem):
 
     def __init__(self, mapping, default_username, warnings, **kw):
@@ -131,6 +136,7 @@ class AlluraUser(S.FancySchemaItem):
     def _from_python(self, value, state):
         return value.username
 
+
 class TimeStamp(S.FancySchemaItem):
 
     def _validate(self, value, **kwargs):
@@ -141,6 +147,7 @@ class TimeStamp(S.FancySchemaItem):
         value = datetime.utcfromtimestamp(value)
         return value
 
+
 def create_user(json_username):
     allura_username = c.project.shortname + '-' + json_username
     while True:
@@ -156,6 +163,7 @@ def create_user(json_username):
             raise
     return allura_username
 
+
 def create_post(discussion_id, thread_id, json_post):
     p = DM.ForumPost(
         _id='%s@import' % (json_post.msg_id),


[04/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/widgets/admin_custom_fields.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/widgets/admin_custom_fields.py b/ForgeTracker/forgetracker/widgets/admin_custom_fields.py
index 52af881..2938685 100644
--- a/ForgeTracker/forgetracker/widgets/admin_custom_fields.py
+++ b/ForgeTracker/forgetracker/widgets/admin_custom_fields.py
@@ -25,32 +25,34 @@ from pylons import tmpl_context as c
 from forgetracker import model
 from formencode import validators as fev
 
+
 class MilestonesAdmin(ffw.SortableTable):
-    defaults=dict(
+    defaults = dict(
         ffw.SortableTable.defaults,
         button=ffw.AdminField(field=ew.InputField(
-                css_class='add', field_type='button',
-                value='New Milestone')),
+            css_class='add', field_type='button',
+            value='New Milestone')),
         empty_msg='No milestones have been created.',
         nonempty_msg='Drag and drop the milestones to reorder.',
         repetitions=0)
     fields = [
         ew.HiddenField(name='old_name'),
-        ffw.Radio(name='default', label='Default', css_class='default-milestone'),
+        ffw.Radio(name='default', label='Default',
+                  css_class='default-milestone'),
         ew.Checkbox(name='complete', show_label=True, suppress_label=True),
         ew.TextField(name='name',
-        attrs={'style':'width: 80px'}),
+                     attrs={'style': 'width: 80px'}),
         ffw.DateField(name='due_date',
-        attrs={'style':'width: 80px'}),
+                      attrs={'style': 'width: 80px'}),
         ffw.AutoResizeTextarea(
             name='description',
-            attrs={'style':'height:1em; width: 150px'}),
+            attrs={'style': 'height:1em; width: 150px'}),
         ew.InputField(
             label='Delete',
             field_type='button',
-            attrs={'class':'delete', 'value':'Delete'}),
-        ]
-    button =  ew.InputField(
+            attrs={'class': 'delete', 'value': 'Delete'}),
+    ]
+    button = ew.InputField(
         css_class='add', field_type='button', value='New Milestone')
 
     def prepare_context(self, context):
@@ -62,38 +64,41 @@ class MilestonesAdmin(ffw.SortableTable):
         return response
 
     def resources(self):
-        for r in super(MilestonesAdmin, self).resources(): yield r
+        for r in super(MilestonesAdmin, self).resources():
+            yield r
         yield ew.CSSScript('''div.state-field table{ width: 700px; }''')
 
+
 class CustomFieldAdminDetail(ffw.StateField):
-    template='jinja:forgetracker:templates/tracker_widgets/custom_field_admin_detail.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/custom_field_admin_detail.html'
+    defaults = dict(
         ffw.StateField.defaults,
         selector=ffw.AdminField(field=ew.SingleSelectField(
-                name='type',
-                options=[
-                    ew.Option(py_value='string', label='Text'),
-                    ew.Option(py_value='number', label='Number'),
-                    ew.Option(py_value='boolean', label='Boolean'),
-                    ew.Option(py_value='select', label='Select'),
-                    ew.Option(py_value='milestone', label='Milestone'),
-                    ew.Option(py_value='user', label='User'),
-                    ],
-                )),
+            name='type',
+            options=[
+                ew.Option(py_value='string', label='Text'),
+                ew.Option(py_value='number', label='Number'),
+                ew.Option(py_value='boolean', label='Boolean'),
+                ew.Option(py_value='select', label='Select'),
+                ew.Option(py_value='milestone', label='Milestone'),
+                ew.Option(py_value='user', label='User'),
+            ],
+        )),
         states=dict(
             select=ffw.FieldCluster(
                 fields=[
                     ffw.AdminField(field=ew.TextField(name='options',
-                        label='Options (separate with spaces; quote if containing spaces; prefix with * to set a default)',
-                        )) ],
+                                                      label='Options (separate with spaces; quote if containing spaces; prefix with * to set a default)',
+                                                      ))],
                 show_labels=False),
             milestone=ffw.FieldCluster(
                 # name='milestones',
-                fields=[ MilestonesAdmin(name='milestones') ])
-            ))
+                fields=[MilestonesAdmin(name='milestones')])
+        ))
+
 
 class CustomFieldAdmin(ew.CompoundField):
-    template='jinja:forgetracker:templates/tracker_widgets/custom_field_admin.html'
+    template = 'jinja:forgetracker:templates/tracker_widgets/custom_field_admin.html'
 
     def resources(self):
         for r in super(CustomFieldAdmin, self).resources():
@@ -108,14 +113,17 @@ class CustomFieldAdmin(ew.CompoundField):
             label='Show in list view',
             show_label=True,
             suppress_label=True),
-        CustomFieldAdminDetail() ]
+        CustomFieldAdminDetail()]
+
 
 class TrackerFieldAdmin(f.ForgeForm):
-    submit_text=None
+    submit_text = None
+
     class fields(ew_core.NameList):
         open_status_names = ew.TextField(label='Open Statuses')
         closed_status_names = ew.TextField(label='Closed Statuses')
         custom_fields = ffw.SortableRepeatedField(field=CustomFieldAdmin())
+
     class buttons(ew_core.NameList):
         save = ew.SubmitButton(label='Save')
         cancel = ew.SubmitButton(
@@ -127,18 +135,23 @@ class TrackerFieldAdmin(f.ForgeForm):
         for rr in self.fields['custom_fields'].resources():
             yield rr
 
+
 class CustomFieldDisplay(ew.CompoundField):
-    template='jinja:forgetracker:templates/tracker_widgets/custom_field_display.html'
+    template = 'jinja:forgetracker:templates/tracker_widgets/custom_field_display.html'
+
 
 class CustomFieldsDisplay(ew.RepeatedField):
-    template='jinja:forgetracker:templates/tracker_widgets/custom_fields_display.html'
+    template = 'jinja:forgetracker:templates/tracker_widgets/custom_fields_display.html'
+
 
 class TrackerFieldDisplay(f.ForgeForm):
+
     class fields(ew_core.NameList):
         milestone_names = ew.TextField()
         open_status_names = ew.TextField(label='Open Statuses')
         closed_status_names = ew.TextField(label='Open Statuses')
         custom_fields = CustomFieldsDisplay()
+
     def resources(self):
         for rr in self.fields['custom_fields'].resources():
             yield rr

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/widgets/bin_form.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/widgets/bin_form.py b/ForgeTracker/forgetracker/widgets/bin_form.py
index 9777dcf..a62026e 100644
--- a/ForgeTracker/forgetracker/widgets/bin_form.py
+++ b/ForgeTracker/forgetracker/widgets/bin_form.py
@@ -22,19 +22,21 @@ from allura.lib import validators as V
 from forgetracker import model
 from formencode import validators as fev
 
+
 class BinForm(ew.SimpleForm):
-    template='jinja:forgetracker:templates/tracker_widgets/bin_form.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/bin_form.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
-        submit_text = "Save Bin")
+        submit_text="Save Bin")
 
     class hidden_fields(ew.NameList):
-        _id=jinja2_ew.HiddenField(validator=V.Ming(model.Bin), if_missing=None)
+        _id = jinja2_ew.HiddenField(
+            validator=V.Ming(model.Bin), if_missing=None)
 
     class fields(ew.NameList):
-        summary=jinja2_ew.TextField(
+        summary = jinja2_ew.TextField(
             label='Bin Name',
             validator=fev.UnicodeString(not_empty=True))
-        terms=jinja2_ew.TextField(
+        terms = jinja2_ew.TextField(
             label='Search Terms',
             validator=fev.UnicodeString(not_empty=True))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/widgets/ticket_form.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/widgets/ticket_form.py b/ForgeTracker/forgetracker/widgets/ticket_form.py
index 2bdf61b..0a0d816 100644
--- a/ForgeTracker/forgetracker/widgets/ticket_form.py
+++ b/ForgeTracker/forgetracker/widgets/ticket_form.py
@@ -25,8 +25,9 @@ from allura import model as M
 from allura.lib.widgets import form_fields as ffw
 from allura.lib import helpers as h
 
+
 class TicketCustomFields(ew.CompoundField):
-    template='jinja:forgetracker:templates/tracker_widgets/ticket_custom_fields.html'
+    template = 'jinja:forgetracker:templates/tracker_widgets/ticket_custom_fields.html'
 
     def __init__(self, *args, **kwargs):
         super(TicketCustomFields, self).__init__(*args, **kwargs)
@@ -49,8 +50,9 @@ class TicketCustomFields(ew.CompoundField):
                     self._fields.append(TicketCustomField.make(cf))
         return self._fields
 
+
 class GenericTicketForm(ew.SimpleForm):
-    defaults=dict(
+    defaults = dict(
         ew.SimpleForm.defaults,
         name="ticket_form",
         submit_text='Save',
@@ -74,7 +76,8 @@ class GenericTicketForm(ew.SimpleForm):
 
         display = field.display(**ctx)
         if ctx['errors'] and field.show_errors and not ignore_errors:
-            display = "%s<div class='error'>%s</div>" % (display, ctx['errors'])
+            display = "%s<div class='error'>%s</div>" % (display,
+                                                         ctx['errors'])
         return display
 
     def _add_current_value_to_user_field(self, field, user):
@@ -97,21 +100,30 @@ class GenericTicketForm(ew.SimpleForm):
     def fields(self):
         fields = [
             ew.TextField(name='summary', label='Title',
-                attrs={'style':'width: 425px','placeholder':'Title'},
-                validator=fev.UnicodeString(not_empty=True, messages={'empty':"You must provide a Title"})),
-            ffw.MarkdownEdit(label='Description',name='description',
-                    attrs={'style':'width: 95%'}),
+                         attrs={'style': 'width: 425px',
+                                'placeholder': 'Title'},
+                         validator=fev.UnicodeString(
+                             not_empty=True, messages={
+                                 'empty': "You must provide a Title"})),
+            ffw.MarkdownEdit(label='Description', name='description',
+                             attrs={'style': 'width: 95%'}),
             ew.SingleSelectField(name='status', label='Status',
-                options=lambda: c.app.globals.all_status_names.split()),
+                                 options=lambda: c.app.globals.all_status_names.split(
+                                 )),
             ffw.ProjectUserCombo(name='assigned_to', label='Owner'),
-            ffw.LabelEdit(label='Labels',name='labels', className='ticket_form_tags'),
-            ew.Checkbox(name='private', label='Mark as Private', attrs={'class':'unlabeled'}),
-            ew.InputField(name='attachment', label='Attachment', field_type='file', attrs={'multiple': 'True'}, validator=fev.FieldStorageUploadConverter(if_missing=None)),
+            ffw.LabelEdit(label='Labels', name='labels',
+                          className='ticket_form_tags'),
+            ew.Checkbox(name='private', label='Mark as Private',
+                        attrs={'class': 'unlabeled'}),
+            ew.InputField(name='attachment', label='Attachment', field_type='file', attrs={
+                          'multiple': 'True'}, validator=fev.FieldStorageUploadConverter(if_missing=None)),
             ffw.MarkdownEdit(name='comment', label='Comment',
-                        attrs={'style':'min-height:7em; width:97%'}),
-            ew.SubmitButton(label=self.submit_text,name='submit',
-                attrs={'class':"ui-button ui-widget ui-state-default ui-button-text-only"}),
-            ew.HiddenField(name='ticket_num', validator=fev.Int(if_missing=None)),
+                             attrs={'style': 'min-height:7em; width:97%'}),
+            ew.SubmitButton(label=self.submit_text, name='submit',
+                            attrs={
+                                'class': "ui-button ui-widget ui-state-default ui-button-text-only"}),
+            ew.HiddenField(name='ticket_num',
+                           validator=fev.Int(if_missing=None)),
         ]
         # milestone is kind of special because of the layout
         # add it to the main form rather than handle with the other customs
@@ -122,8 +134,10 @@ class GenericTicketForm(ew.SimpleForm):
                     break
         return ew_core.NameList(fields)
 
+
 class TicketForm(GenericTicketForm):
-    template='jinja:forgetracker:templates/tracker_widgets/ticket_form.html'
+    template = 'jinja:forgetracker:templates/tracker_widgets/ticket_form.html'
+
     @property
     def fields(self):
         fields = ew_core.NameList(super(TicketForm, self).fields)
@@ -132,7 +146,8 @@ class TicketForm(GenericTicketForm):
         return fields
 
     def resources(self):
-        for r in super(TicketForm, self).resources(): yield r
+        for r in super(TicketForm, self).resources():
+            yield r
         yield ew.JSScript('''
         $(function(){
             $('#show_attach').click(function(evt) {
@@ -153,18 +168,21 @@ class TicketForm(GenericTicketForm):
             });
         });''')
 
+
 class TicketCustomField(object):
 
     def _select(field):
         options = []
-        field_options = h.split_select_field_options(h.really_unicode(field.options))
+        field_options = h.split_select_field_options(
+            h.really_unicode(field.options))
 
         for opt in field_options:
             selected = False
             if opt.startswith('*'):
                 opt = opt[1:]
                 selected = True
-            options.append(ew.Option(label=opt,html_value=opt,py_value=opt,selected=selected))
+            options.append(
+                ew.Option(label=opt, html_value=opt, py_value=opt, selected=selected))
         return ew.SingleSelectField(label=field.label, name=str(field.name), options=options)
 
     def _milestone(field):
@@ -206,8 +224,9 @@ class TicketCustomField(object):
         factory = cls.SELECTOR.get(field.get('type'), cls._default)
         return factory(field)
 
+
 class MilestoneField(ew.SingleSelectField):
-    template=ew.Snippet('''<select {{widget.j2_attrs({
+    template = ew.Snippet('''<select {{widget.j2_attrs({
                'id':id,
                'name':rendered_name,
                'multiple':multiple,
@@ -229,8 +248,10 @@ class MilestoneField(ew.SingleSelectField):
         context = super(MilestoneField, self).prepare_context(context)
 
         # group open / closed milestones
-        context['open_milestones'] = [opt for opt in self.options if not opt.complete]
-        context['closed_milestones'] = [opt for opt in self.options if opt.complete]
+        context['open_milestones'] = [
+            opt for opt in self.options if not opt.complete]
+        context['closed_milestones'] = [
+            opt for opt in self.options if opt.complete]
 
         # filter closed milestones entirely
         #value = context['value']

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/widgets/ticket_search.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/widgets/ticket_search.py b/ForgeTracker/forgetracker/widgets/ticket_search.py
index 2369512..453ed68 100644
--- a/ForgeTracker/forgetracker/widgets/ticket_search.py
+++ b/ForgeTracker/forgetracker/widgets/ticket_search.py
@@ -25,9 +25,10 @@ import ew.jinja2_ew as ew
 from allura.lib.widgets import form_fields as ffw
 from allura.lib.widgets import forms
 
+
 class TicketSearchResults(ew_core.SimpleForm):
-    template='jinja:forgetracker:templates/tracker_widgets/ticket_search_results.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/ticket_search_results.html'
+    defaults = dict(
         ew_core.SimpleForm.defaults,
         solr_error=None,
         count=None,
@@ -40,9 +41,9 @@ class TicketSearchResults(ew_core.SimpleForm):
         columns=None)
 
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
-        lightbox=ffw.Lightbox(name='col_list',trigger='#col_menu')
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
+        lightbox = ffw.Lightbox(name='col_list', trigger='#col_menu')
 
     def resources(self):
         yield ew.JSLink('tracker_js/ticket-list.js')
@@ -50,9 +51,10 @@ class TicketSearchResults(ew_core.SimpleForm):
         for r in super(TicketSearchResults, self).resources():
             yield r
 
+
 class MassEdit(ew_core.SimpleForm):
-    template='jinja:forgetracker:templates/tracker_widgets/mass_edit.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/mass_edit.html'
+    defaults = dict(
         ew_core.SimpleForm.defaults,
         count=None,
         limit=None,
@@ -62,9 +64,9 @@ class MassEdit(ew_core.SimpleForm):
         sort=None)
 
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
-        lightbox=ffw.Lightbox(name='col_list',trigger='#col_menu')
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
+        lightbox = ffw.Lightbox(name='col_list', trigger='#col_menu')
 
     def resources(self):
         yield ew.JSLink('tracker_js/ticket-list.js')
@@ -72,9 +74,10 @@ class MassEdit(ew_core.SimpleForm):
         for r in super(MassEdit, self).resources():
             yield r
 
+
 class MassEditForm(ew_core.Widget):
-    template='jinja:forgetracker:templates/tracker_widgets/mass_edit_form.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/mass_edit_form.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         globals=None,
         query=None,
@@ -85,8 +88,9 @@ class MassEditForm(ew_core.Widget):
     def resources(self):
         yield ew.JSLink('tracker_js/mass-edit.js')
 
+
 class MassMoveForm(forms.MoveTicketForm):
-    defaults=dict(
+    defaults = dict(
         forms.MoveTicketForm.defaults,
         action='.')
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/setup.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/setup.py b/ForgeTracker/setup.py
index 1934cc2..71b069c 100644
--- a/ForgeTracker/setup.py
+++ b/ForgeTracker/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgetracker.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeTracker',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/__init__.py b/ForgeUserStats/forgeuserstats/__init__.py
index 77505f1..144e298 100644
--- a/ForgeUserStats/forgeuserstats/__init__.py
+++ b/ForgeUserStats/forgeuserstats/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/controllers/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/controllers/__init__.py b/ForgeUserStats/forgeuserstats/controllers/__init__.py
index 77505f1..144e298 100644
--- a/ForgeUserStats/forgeuserstats/controllers/__init__.py
+++ b/ForgeUserStats/forgeuserstats/controllers/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/controllers/userstats.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/controllers/userstats.py b/ForgeUserStats/forgeuserstats/controllers/userstats.py
index 0624a31..c0ecfc3 100644
--- a/ForgeUserStats/forgeuserstats/controllers/userstats.py
+++ b/ForgeUserStats/forgeuserstats/controllers/userstats.py
@@ -29,14 +29,15 @@ from allura.lib import validators as V
 
 stats_preferences_form = StatsPreferencesForm()
 
+
 class ForgeUserStatsCatController(BaseController):
 
     @expose()
     def _lookup(self, category, *remainder):
         cat = M.TroveCategory.query.get(shortname=category)
-        return ForgeUserStatsCatController(category = cat), remainder
+        return ForgeUserStatsCatController(category=cat), remainder
 
-    def __init__(self, category = None):
+    def __init__(self, category=None):
         self.category = category
         super(ForgeUserStatsCatController, self).__init__()
 
@@ -59,6 +60,7 @@ class ForgeUserStatsCatController(BaseController):
         ret_dict['category'] = self.category
         return ret_dict
 
+
 class ForgeUserStatsController(BaseController):
 
     category = ForgeUserStatsCatController()
@@ -74,9 +76,9 @@ class ForgeUserStatsController(BaseController):
         if not self.user.stats:
             UserStats.create(self.user)
         return dict(
-            user = self.user,
-            form = StatsPreferencesForm(
-                action = c.project.url() + 'userstats/change_settings'))
+            user=self.user,
+            form=StatsPreferencesForm(
+                action=c.project.url() + 'userstats/change_settings'))
 
     @expose()
     @require_post()
@@ -115,30 +117,30 @@ class ForgeUserStatsController(BaseController):
         ret_dict['last_login'] = stats.last_login
         if stats.last_login:
             ret_dict['last_login_days'] = \
-                (datetime.utcnow()-stats.last_login).days
+                (datetime.utcnow() - stats.last_login).days
 
         categories = {}
         for p in self.user.my_projects():
             for cat in p.trove_topic:
-                cat = M.TroveCategory.query.get(_id = cat)
+                cat = M.TroveCategory.query.get(_id=cat)
                 if categories.get(cat):
                     categories[cat] += 1
                 else:
                     categories[cat] = 1
-        categories = sorted(categories.items(), key=lambda (x,y): y,reverse=True)
+        categories = sorted(categories.items(),
+                            key=lambda (x, y): y, reverse=True)
 
         ret_dict['lastmonth_logins'] = stats.getLastMonthLogins()
         ret_dict['categories'] = categories
         days = ret_dict['days']
         if days >= 30:
             ret_dict['permonthlogins'] = \
-                round(stats.tot_logins_count*30.0/days,2)
+                round(stats.tot_logins_count * 30.0 / days, 2)
         else:
             ret_dict['permonthlogins'] = 'n/a'
 
         return ret_dict
 
-
     @expose('jinja:forgeuserstats:templates/commits.html')
     @with_trailing_slash
     def commits(self, **kw):
@@ -154,8 +156,8 @@ class ForgeUserStatsController(BaseController):
 
         commits = stats.getCommitsByCategory()
         return dict(
-            user = self.user,
-            data = commits)
+            user=self.user,
+            data=commits)
 
     @expose('jinja:forgeuserstats:templates/artifacts.html')
     @with_trailing_slash
@@ -173,8 +175,8 @@ class ForgeUserStatsController(BaseController):
         stats = self.user.stats
         artifacts = stats.getArtifactsByCategory(detailed=True)
         return dict(
-            user = self.user,
-            data = artifacts)
+            user=self.user,
+            data=artifacts)
 
     @expose('jinja:forgeuserstats:templates/tickets.html')
     @with_trailing_slash
@@ -215,22 +217,22 @@ def _getDataForCategory(category, stats):
     days = (datetime.utcnow() - stats.start_date).days
     if days >= 30:
         pmartifacts = dict(
-            created = round(totartifacts['created']*30.0/days,2),
-            modified=round(totartifacts['modified']*30.0/days,2))
+            created=round(totartifacts['created'] * 30.0 / days, 2),
+            modified=round(totartifacts['modified'] * 30.0 / days, 2))
         pmcommits = dict(
-            number=round(totcommits['number']*30.0/days,2),
-            lines=round(totcommits['lines']*30.0/days,2))
+            number=round(totcommits['number'] * 30.0 / days, 2),
+            lines=round(totcommits['lines'] * 30.0 / days, 2))
         pmtickets = dict(
-            assigned=round(tottickets['assigned']*30.0/days,2),
-            revoked=round(tottickets['revoked']*30.0/days,2),
-            solved=round(tottickets['solved']*30.0/days,2),
+            assigned=round(tottickets['assigned'] * 30.0 / days, 2),
+            revoked=round(tottickets['revoked'] * 30.0 / days, 2),
+            solved=round(tottickets['solved'] * 30.0 / days, 2),
             averagesolvingtime='n/a')
         for key in artifacts_by_type:
             value = artifacts_by_type[key]
             artifacts_by_type[key]['pmcreated'] = \
-                round(value['created']*30.0/days,2)
-            artifacts_by_type[key]['pmmodified']= \
-                round(value['modified']*30.0/days,2)
+                round(value['created'] * 30.0 / days, 2)
+            artifacts_by_type[key]['pmmodified'] = \
+                round(value['modified'] * 30.0 / days, 2)
     else:
         pmartifacts = dict(created='n/a', modified='n/a')
         pmcommits = dict(number='n/a', lines='n/a')
@@ -241,18 +243,18 @@ def _getDataForCategory(category, stats):
             averagesolvingtime='n/a')
         for key in artifacts_by_type:
             artifacts_by_type[key]['pmcreated'] = 'n/a'
-            artifacts_by_type[key]['pmmodified']= 'n/a'
+            artifacts_by_type[key]['pmmodified'] = 'n/a'
 
     return dict(
-        days = days,
-        totcommits = totcommits,
-        lastmonthcommits = lmcommits,
-        lastmonthtickets = lm_tickets,
-        tottickets = tottickets,
-        permonthcommits = pmcommits,
-        totartifacts = totartifacts,
-        lastmonthartifacts = lm_totartifacts,
-        permonthartifacts = pmartifacts,
-        artifacts_by_type = artifacts_by_type,
-        lastmonth_artifacts_by_type = lm_artifacts_by_type,
-        permonthtickets = pmtickets)
+        days=days,
+        totcommits=totcommits,
+        lastmonthcommits=lmcommits,
+        lastmonthtickets=lm_tickets,
+        tottickets=tottickets,
+        permonthcommits=pmcommits,
+        totartifacts=totartifacts,
+        lastmonthartifacts=lm_totartifacts,
+        permonthartifacts=pmartifacts,
+        artifacts_by_type=artifacts_by_type,
+        lastmonth_artifacts_by_type=lm_artifacts_by_type,
+        permonthtickets=pmtickets)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/main.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/main.py b/ForgeUserStats/forgeuserstats/main.py
index 62aba4b..44e606b 100644
--- a/ForgeUserStats/forgeuserstats/main.py
+++ b/ForgeUserStats/forgeuserstats/main.py
@@ -38,7 +38,9 @@ from ming.orm import session
 
 log = logging.getLogger(__name__)
 
+
 class UserStatsListener(EventsListener):
+
     def newArtifact(self, art_type, art_datetime, project, user):
         stats = user.stats
         if not stats:
@@ -67,7 +69,8 @@ class UserStatsListener(EventsListener):
         elif event_type == "revoked":
             stats.addRevokedTicket(ticket.mod_date, project)
         elif event_type == "closed":
-            stats.addClosedTicket(ticket.created_date,ticket.mod_date,project)
+            stats.addClosedTicket(
+                ticket.created_date, ticket.mod_date, project)
 
     def newCommit(self, newcommit, project, user):
         stats = user.stats
@@ -86,25 +89,26 @@ class UserStatsListener(EventsListener):
     def newOrganization(self, organization):
         pass
 
+
 class ForgeUserStatsApp(Application):
     __version__ = version.__version__
-    tool_label='UserStats'
-    default_mount_label='Stats'
-    default_mount_point='stats'
+    tool_label = 'UserStats'
+    default_mount_label = 'Stats'
+    default_mount_point = 'stats'
     permissions = ['configure', 'read', 'write',
-                    'unmoderated_post', 'post', 'moderate', 'admin']
+                   'unmoderated_post', 'post', 'moderate', 'admin']
     permissions_desc = {
         'read': 'View user stats.',
         'admin': 'Toggle stats visibility.',
     }
     max_instances = 0
-    ordinal=15
+    ordinal = 15
     config_options = Application.config_options
     default_external_feeds = []
-    icons={
-        24:'userstats/images/stats_24.png',
-        32:'userstats/images/stats_32.png',
-        48:'userstats/images/stats_48.png'
+    icons = {
+        24: 'userstats/images/stats_24.png',
+        32: 'userstats/images/stats_32.png',
+        48: 'userstats/images/stats_48.png'
     }
     root = ForgeUserStatsController()
 
@@ -129,7 +133,7 @@ class ForgeUserStatsApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     @property
     def show_discussion(self):
@@ -149,16 +153,16 @@ class ForgeUserStatsApp(Application):
 
     def admin_menu(self):
         links = [SitemapEntry(
-                     'Settings', c.project.url() + 'userstats/settings')]
+            'Settings', c.project.url() + 'userstats/settings')]
         return links
 
     def install(self, project):
-        #It doesn't make any sense to install the tool twice on the same
-        #project therefore, if it already exists, it doesn't install it
-        #a second time.
+        # It doesn't make any sense to install the tool twice on the same
+        # project therefore, if it already exists, it doesn't install it
+        # a second time.
         for tool in project.app_configs:
             if tool.tool_name == 'userstats':
-                if self.config.options.mount_point!=tool.options.mount_point:
+                if self.config.options.mount_point != tool.options.mount_point:
                     project.uninstall_app(self.config.options.mount_point)
                     return
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/model/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/model/__init__.py b/ForgeUserStats/forgeuserstats/model/__init__.py
index 77505f1..144e298 100644
--- a/ForgeUserStats/forgeuserstats/model/__init__.py
+++ b/ForgeUserStats/forgeuserstats/model/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/model/stats.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/model/stats.py b/ForgeUserStats/forgeuserstats/model/stats.py
index 493b1c3..03d6e83 100644
--- a/ForgeUserStats/forgeuserstats/model/stats.py
+++ b/ForgeUserStats/forgeuserstats/model/stats.py
@@ -25,25 +25,27 @@ from allura.lib import plugin
 from allura.model.session import main_orm_session
 from allura.model import Stats
 
+
 class UserStats(Stats):
+
     class __mongometa__:
-        name='userstats'
+        name = 'userstats'
         session = main_orm_session
-        unique_indexes = [ '_id', 'user_id']
+        unique_indexes = ['_id', 'user_id']
 
-    tot_logins_count = FieldProperty(int, if_missing = 0)
+    tot_logins_count = FieldProperty(int, if_missing=0)
     last_login = FieldProperty(datetime)
-    lastmonthlogins=FieldProperty([datetime])
+    lastmonthlogins = FieldProperty([datetime])
     user_id = FieldProperty(S.ObjectId)
 
     @classmethod
     def create(cls, user):
         auth_provider = plugin.AuthenticationProvider.get(request)
         reg_date = auth_provider.user_registration_date(user)
-        stats = cls.query.get(user_id = user._id)
+        stats = cls.query.get(user_id=user._id)
         if stats:
             return stats
-        stats = cls(user_id=user._id, registration_date = reg_date)
+        stats = cls(user_id=user._id, registration_date=reg_date)
         user.stats_id = stats._id
         return stats
 
@@ -64,5 +66,5 @@ class UserStats(Stats):
         self.tot_logins_count += 1
         self.lastmonthlogins.append(login_datetime)
         self.checkOldArtifacts()
-        
+
 Mapper.compile_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/tests/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/tests/__init__.py b/ForgeUserStats/forgeuserstats/tests/__init__.py
index 77505f1..144e298 100644
--- a/ForgeUserStats/forgeuserstats/tests/__init__.py
+++ b/ForgeUserStats/forgeuserstats/tests/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/tests/test_model.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/tests/test_model.py b/ForgeUserStats/forgeuserstats/tests/test_model.py
index 6940385..aef4db7 100644
--- a/ForgeUserStats/forgeuserstats/tests/test_model.py
+++ b/ForgeUserStats/forgeuserstats/tests/test_model.py
@@ -31,8 +31,12 @@ from allura import model as M
 
 from forgeuserstats.model import stats as USM
 
-test_project_with_repo = 'test2'  # important to be distinct from 'test' which ForgeGit uses, so that the tests can run in parallel and not clobber each other
-with_git = td.with_tool(test_project_with_repo, 'Git', 'src-git', 'Git', type='git')
+# important to be distinct from 'test' which ForgeGit uses, so that the
+# tests can run in parallel and not clobber each other
+test_project_with_repo = 'test2'
+with_git = td.with_tool(test_project_with_repo, 'Git',
+                        'src-git', 'Git', type='git')
+
 
 class TestUserStats(unittest.TestCase):
 
@@ -96,12 +100,16 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified']
         assert art_wiki['created'] == init_art_wiki['created'] + 1
         assert art_wiki['modified'] == init_art_wiki['modified']
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created'] + 1
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified']
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created'] + 1
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified']
-
-        #In that case, last month stats should not be changed
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created'] + 1
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified']
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created'] + 1
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified']
+
+        # In that case, last month stats should not be changed
         new_date = datetime.utcnow() + timedelta(-32)
         self.user.stats.addNewArtifact('Wiki', new_date, p)
         lm_art = self.user.stats.getLastMonthArtifacts()
@@ -116,10 +124,14 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified']
         assert art_wiki['created'] == init_art_wiki['created'] + 2
         assert art_wiki['modified'] == init_art_wiki['modified']
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created'] + 2
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified']
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created'] + 1
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified']
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created'] + 2
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified']
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created'] + 1
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified']
 
         p.trove_topic = [topic._id]
 
@@ -138,15 +150,21 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified']
         assert art_wiki['created'] == init_art_wiki['created'] + 3
         assert art_wiki['modified'] == init_art_wiki['modified']
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created'] + 3
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified']
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created'] + 2
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified']
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created'] + 3
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified']
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created'] + 2
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified']
         assert art_sci['created'] == init_art_sci['created'] + 1
         assert art_sci['modified'] == init_art_sci['modified']
-        assert dict(messagetype='Wiki', created= 1, modified = 0) in art_by_cat[topic]
+        assert dict(messagetype='Wiki', created=1,
+                    modified=0) in art_by_cat[topic]
         art_by_cat = self.user.stats.getArtifactsByCategory(detailed=False)
-        assert art_by_cat[topic]['created'] == 1 and art_by_cat[topic]['modified'] == 0
+        assert art_by_cat[topic]['created'] == 1 and art_by_cat[
+            topic]['modified'] == 0
 
     @td.with_user_project('test-user-2')
     def test_modify_artifact_stats(self):
@@ -173,12 +191,16 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified'] + 1
         assert art_wiki['created'] == init_art_wiki['created']
         assert art_wiki['modified'] == init_art_wiki['modified'] + 1
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created']
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified'] + 1
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created']
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
-
-        #In that case, last month stats should not be changed
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created']
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified'] + 1
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created']
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
+
+        # In that case, last month stats should not be changed
         new_date = datetime.utcnow() + timedelta(-32)
         self.user.stats.addModifiedArtifact('Wiki', new_date, p)
         lm_art = self.user.stats.getLastMonthArtifacts()
@@ -193,10 +215,14 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified'] + 2
         assert art_wiki['created'] == init_art_wiki['created']
         assert art_wiki['modified'] == init_art_wiki['modified'] + 2
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created']
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified'] + 2
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created']
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created']
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified'] + 2
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created']
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified'] + 1
 
         p.trove_topic = [topic._id]
 
@@ -215,15 +241,21 @@ class TestUserStats(unittest.TestCase):
         assert artifacts['modified'] == init_art['modified'] + 3
         assert art_wiki['created'] == init_art_wiki['created']
         assert art_wiki['modified'] == init_art_wiki['modified'] + 3
-        assert art_by_type['Wiki']['created'] == init_art_by_type['Wiki']['created']
-        assert art_by_type['Wiki']['modified'] == init_art_by_type['Wiki']['modified'] + 3
-        assert lm_art_by_type['Wiki']['created'] == init_lm_art_by_type['Wiki']['created']
-        assert lm_art_by_type['Wiki']['modified'] == init_lm_art_by_type['Wiki']['modified'] +2
+        assert art_by_type['Wiki'][
+            'created'] == init_art_by_type['Wiki']['created']
+        assert art_by_type['Wiki'][
+            'modified'] == init_art_by_type['Wiki']['modified'] + 3
+        assert lm_art_by_type['Wiki'][
+            'created'] == init_lm_art_by_type['Wiki']['created']
+        assert lm_art_by_type['Wiki'][
+            'modified'] == init_lm_art_by_type['Wiki']['modified'] + 2
         assert art_sci['created'] == init_art_sci['created']
         assert art_sci['modified'] == init_art_sci['modified'] + 1
-        assert dict(messagetype='Wiki', created=0, modified=1) in art_by_cat[topic]
+        assert dict(messagetype='Wiki', created=0,
+                    modified=1) in art_by_cat[topic]
         art_by_cat = self.user.stats.getArtifactsByCategory(detailed=False)
-        assert art_by_cat[topic]['created'] == 0 and art_by_cat[topic]['modified'] == 1
+        assert art_by_cat[topic]['created'] == 0 and art_by_cat[
+            topic]['modified'] == 1
 
     @td.with_user_project('test-user-2')
     def test_ticket_stats(self):
@@ -231,14 +263,16 @@ class TestUserStats(unittest.TestCase):
         topic = TroveCategory.query.get(shortname='scientific')
         create_time = datetime.utcnow() + timedelta(-5)
 
-        init_lm_tickets_art = self.user.stats.getLastMonthArtifacts(art_type='Ticket')
+        init_lm_tickets_art = self.user.stats.getLastMonthArtifacts(
+            art_type='Ticket')
         init_tickets_art = self.user.stats.getArtifacts(art_type='Ticket')
         init_tickets_sci_art = self.user.stats.getArtifacts(category=topic._id)
         init_tickets = self.user.stats.getTickets()
         init_lm_tickets = self.user.stats.getLastMonthTickets()
 
         self.user.stats.addNewArtifact('Ticket', create_time, p)
-        lm_tickets_art = self.user.stats.getLastMonthArtifacts(art_type='Ticket')
+        lm_tickets_art = self.user.stats.getLastMonthArtifacts(
+            art_type='Ticket')
         tickets_art = self.user.stats.getArtifacts(art_type='Ticket')
         tickets_sci_art = self.user.stats.getArtifacts(category=topic._id)
 
@@ -276,7 +310,8 @@ class TestUserStats(unittest.TestCase):
         assert lm_tickets['solved'] == init_lm_tickets['solved']
         assert lm_tickets['averagesolvingtime'] is None
 
-        self.user.stats.addClosedTicket(create_time, create_time + timedelta(1), p)
+        self.user.stats.addClosedTicket(
+            create_time, create_time + timedelta(1), p)
         tickets = self.user.stats.getTickets()
         lm_tickets = self.user.stats.getLastMonthTickets()
 
@@ -284,7 +319,7 @@ class TestUserStats(unittest.TestCase):
         assert tickets['revoked'] == init_tickets['revoked'] + 1
         assert tickets['solved'] == init_tickets['solved'] + 1
 
-        solving_time = dict(seconds=0,minutes=0,days=1,hours=0)
+        solving_time = dict(seconds=0, minutes=0, days=1, hours=0)
         assert tickets['averagesolvingtime'] == solving_time
         assert lm_tickets['assigned'] == init_lm_tickets['assigned'] + 1
         assert lm_tickets['revoked'] == init_lm_tickets['revoked']
@@ -292,11 +327,12 @@ class TestUserStats(unittest.TestCase):
         assert lm_tickets['averagesolvingtime'] == solving_time
 
         p.trove_topic = []
-        self.user.stats.addClosedTicket(create_time, create_time + timedelta(3), p)
+        self.user.stats.addClosedTicket(
+            create_time, create_time + timedelta(3), p)
         tickets = self.user.stats.getTickets()
         lm_tickets = self.user.stats.getLastMonthTickets()
 
-        solving_time = dict(seconds=0,minutes=0,days=2,hours=0)
+        solving_time = dict(seconds=0, minutes=0, days=2, hours=0)
 
         assert tickets['assigned'] == init_tickets['assigned'] + 1
         assert tickets['revoked'] == init_tickets['revoked'] + 1
@@ -309,7 +345,7 @@ class TestUserStats(unittest.TestCase):
 
         by_cat = self.user.stats.getTicketsByCategory()
         lm_by_cat = self.user.stats.getLastMonthTicketsByCategory()
-        solving_time=dict(days=1,hours=0,minutes=0,seconds=0)
+        solving_time = dict(days=1, hours=0, minutes=0, seconds=0)
 
         assert by_cat[topic]['assigned'] == 1
         assert by_cat[topic]['revoked'] == 1
@@ -360,7 +396,8 @@ class TestUserStats(unittest.TestCase):
         assert lm_by_cat[topic]['number'] == 1
         assert lm_by_cat[topic]['lines'] == 1
 
-        self.user.stats.addCommit(commit, datetime.utcnow() + timedelta(-40), p)
+        self.user.stats.addCommit(
+            commit, datetime.utcnow() + timedelta(-40), p)
         commits = self.user.stats.getCommits()
         assert commits['number'] == init_commits['number'] + 2
         assert commits['lines'] == init_commits['lines'] + 2
@@ -385,48 +422,55 @@ class TestUserStats(unittest.TestCase):
         lm_logins = self.user.stats.getLastMonthLogins()
         assert logins == init_logins + 1
         assert lm_logins == init_lm_logins + 1
-        assert abs(self.user.stats.last_login - login_datetime) < timedelta(seconds=1)
+        assert abs(self.user.stats.last_login -
+                   login_datetime) < timedelta(seconds=1)
 
         self.user.stats.addLogin(datetime.utcnow() + timedelta(-32))
         logins = self.user.stats.tot_logins_count
         lm_logins = self.user.stats.getLastMonthLogins()
         assert logins == init_logins + 2
         assert lm_logins == init_lm_logins + 1
-        assert abs(self.user.stats.last_login - login_datetime) < timedelta(seconds=1)
+        assert abs(self.user.stats.last_login -
+                   login_datetime) < timedelta(seconds=1)
 
     def test_start_date(self):
-        stats = USM.UserStats(registration_date=datetime(2012,04,01))
-        self.assertEqual(stats.start_date, datetime(2012,04,01))
+        stats = USM.UserStats(registration_date=datetime(2012, 04, 01))
+        self.assertEqual(stats.start_date, datetime(2012, 04, 01))
         with h.push_config(config, **{'userstats.start_date': '2013-04-01'}):
-            self.assertEqual(stats.start_date, datetime(2013,04,01))
+            self.assertEqual(stats.start_date, datetime(2013, 04, 01))
         with h.push_config(config, **{'userstats.start_date': '2011-04-01'}):
-            self.assertEqual(stats.start_date, datetime(2012,04,01))
+            self.assertEqual(stats.start_date, datetime(2012, 04, 01))
 
     @mock.patch('allura.model.stats.difflib.unified_diff')
     def test_count_loc(self, unified_diff):
         stats = USM.UserStats()
         newcommit = mock.Mock(
-                parent_ids=['deadbeef'],
-                diffs=mock.Mock(
-                    changed=[mock.MagicMock()],
-                    copied=[mock.MagicMock()],
-                    added=[mock.MagicMock()],
-                ),
-            )
-        unified_diff.return_value = ['+++','---','+line']
+            parent_ids=['deadbeef'],
+            diffs=mock.Mock(
+                changed=[mock.MagicMock()],
+                copied=[mock.MagicMock()],
+                added=[mock.MagicMock()],
+            ),
+        )
+        unified_diff.return_value = ['+++', '---', '+line']
         newcommit.tree.get_blob_by_path.return_value = mock.MagicMock()
-        newcommit.tree.get_blob_by_path.return_value.__iter__.return_value = ['one']
-        newcommit.repo.commit().tree.get_blob_by_path.return_value = mock.MagicMock()
-        newcommit.repo.commit().tree.get_blob_by_path.return_value.__iter__.return_value = ['two']
+        newcommit.tree.get_blob_by_path.return_value.__iter__.return_value = [
+            'one']
+        newcommit.repo.commit(
+        ).tree.get_blob_by_path.return_value = mock.MagicMock()
+        newcommit.repo.commit().tree.get_blob_by_path.return_value.__iter__.return_value = [
+            'two']
         commit_datetime = datetime.utcnow()
         project = mock.Mock(
-                trove_topic=[],
-                trove_language=[],
-            )
+            trove_topic=[],
+            trove_language=[],
+        )
         stats.addCommit(newcommit, commit_datetime, project)
-        self.assertEqual(stats.general[0].commits[0], {'lines': 3, 'number': 1, 'language': None})
+        self.assertEqual(stats.general[0].commits[0],
+                         {'lines': 3, 'number': 1, 'language': None})
         unified_diff.reset_mock()
         with h.push_config(config, **{'userstats.count_lines_of_code': 'false'}):
             stats.addCommit(newcommit, commit_datetime, project)
-        self.assertEqual(stats.general[0].commits[0], {'lines': 3, 'number': 2, 'language': None})
+        self.assertEqual(stats.general[0].commits[0],
+                         {'lines': 3, 'number': 2, 'language': None})
         unified_diff.assert_not_called()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/tests/test_stats.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/tests/test_stats.py b/ForgeUserStats/forgeuserstats/tests/test_stats.py
index 0f8efa9..20fbc3b 100644
--- a/ForgeUserStats/forgeuserstats/tests/test_stats.py
+++ b/ForgeUserStats/forgeuserstats/tests/test_stats.py
@@ -31,8 +31,11 @@ from forgewiki import model as WM
 from forgetracker import model as TM
 
 
-test_project_with_repo = 'test2'  # important to be distinct from 'test' which ForgeGit uses, so that the tests can run in parallel and not clobber each other
-with_git = td.with_tool(test_project_with_repo, 'Git', 'src-git', 'Git', type='git')
+# important to be distinct from 'test' which ForgeGit uses, so that the
+# tests can run in parallel and not clobber each other
+test_project_with_repo = 'test2'
+with_git = td.with_tool(test_project_with_repo, 'Git',
+                        'src-git', 'Git', type='git')
 
 
 class TestStats(TestController):
@@ -46,7 +49,7 @@ class TestStats(TestController):
         user = User.by_username('test-user')
         init_logins = user.stats.tot_logins_count
         r = self.app.post('/auth/do_login', params=dict(
-                username=user.username, password='foo'))
+            username=user.username, password='foo'))
 
         assert user.stats.tot_logins_count == 1 + init_logins
         assert user.stats.getLastMonthLogins() == 1 + init_logins
@@ -57,8 +60,8 @@ class TestStats(TestController):
         initial_artifacts = c.user.stats.getArtifacts()
         initial_wiki = c.user.stats.getArtifacts(art_type="Wiki")
         self.app.post('/wiki/TestPage/update',
-            params=dict(title='TestPage', text='some text'),
-            extra_environ=dict(username=str(c.user.username)))
+                      params=dict(title='TestPage', text='some text'),
+                      extra_environ=dict(username=str(c.user.username)))
 
         artifacts = c.user.stats.getArtifacts()
         wiki = c.user.stats.getArtifacts(art_type="Wiki")
@@ -69,8 +72,8 @@ class TestStats(TestController):
         assert wiki['modified'] == initial_wiki['modified']
 
         self.app.post('/wiki/TestPage2/update',
-            params=dict(title='TestPage2', text='some text'),
-            extra_environ=dict(username=str(c.user.username)))
+                      params=dict(title='TestPage2', text='some text'),
+                      extra_environ=dict(username=str(c.user.username)))
 
         artifacts = c.user.stats.getArtifacts()
         wiki = c.user.stats.getArtifacts(art_type="Wiki")
@@ -81,8 +84,9 @@ class TestStats(TestController):
         assert wiki['modified'] == initial_wiki['modified']
 
         self.app.post('/wiki/TestPage2/update',
-            params=dict(title='TestPage2', text='some modified text'),
-            extra_environ=dict(username=str(c.user.username)))
+                      params=dict(title='TestPage2',
+                                  text='some modified text'),
+                      extra_environ=dict(username=str(c.user.username)))
 
         artifacts = c.user.stats.getArtifacts()
         wiki = c.user.stats.getArtifacts(art_type="Wiki")
@@ -95,12 +99,13 @@ class TestStats(TestController):
     @td.with_tool('test', 'tickets', mount_point='tickets', mount_label='tickets', username='test-admin')
     def test_tracker_stats(self):
         initial_tickets = c.user.stats.getTickets()
-        initial_tickets_artifacts = c.user.stats.getArtifacts(art_type="Ticket")
+        initial_tickets_artifacts = c.user.stats.getArtifacts(
+            art_type="Ticket")
 
         r = self.app.post('/tickets/save_ticket',
-            params={'ticket_form.summary':'test',
-                    'ticket_form.assigned_to' : str(c.user.username)},
-            extra_environ=dict(username=str(c.user.username)))
+                          params={'ticket_form.summary': 'test',
+                                  'ticket_form.assigned_to': str(c.user.username)},
+                          extra_environ=dict(username=str(c.user.username)))
 
         ticketnum = str(TM.Ticket.query.get(summary='test').ticket_num)
 
@@ -110,14 +115,16 @@ class TestStats(TestController):
         assert tickets['assigned'] == initial_tickets['assigned'] + 1
         assert tickets['solved'] == initial_tickets['solved']
         assert tickets['revoked'] == initial_tickets['revoked']
-        assert tickets_artifacts['created'] == initial_tickets_artifacts['created'] + 1
-        assert tickets_artifacts['modified'] == initial_tickets_artifacts['modified']
+        assert tickets_artifacts[
+            'created'] == initial_tickets_artifacts['created'] + 1
+        assert tickets_artifacts[
+            'modified'] == initial_tickets_artifacts['modified']
 
         r = self.app.post('/tickets/%s/update_ticket_from_widget' % ticketnum,
-            params={'ticket_form.ticket_num' : ticketnum,
-                    'ticket_form.summary':'footext3',
-                    'ticket_form.status' : 'closed'},
-            extra_environ=dict(username=str(c.user.username)))
+                          params={'ticket_form.ticket_num': ticketnum,
+                                  'ticket_form.summary': 'footext3',
+                                  'ticket_form.status': 'closed'},
+                          extra_environ=dict(username=str(c.user.username)))
 
         tickets = c.user.stats.getTickets()
         tickets_artifacts = c.user.stats.getArtifacts(art_type="Ticket")
@@ -125,12 +132,14 @@ class TestStats(TestController):
         assert tickets['assigned'] == initial_tickets['assigned'] + 1
         assert tickets['solved'] == initial_tickets['solved'] + 1
         assert tickets['revoked'] == initial_tickets['revoked']
-        assert tickets_artifacts['created'] == initial_tickets_artifacts['created'] + 1
-        assert tickets_artifacts['modified'] == initial_tickets_artifacts['modified'] + 1
+        assert tickets_artifacts[
+            'created'] == initial_tickets_artifacts['created'] + 1
+        assert tickets_artifacts[
+            'modified'] == initial_tickets_artifacts['modified'] + 1
 
         r = self.app.post('/tickets/save_ticket',
-            params={'ticket_form.summary':'test2'},
-            extra_environ=dict(username=str(c.user.username)))
+                          params={'ticket_form.summary': 'test2'},
+                          extra_environ=dict(username=str(c.user.username)))
 
         ticketnum = str(TM.Ticket.query.get(summary='test2').ticket_num)
 
@@ -140,14 +149,16 @@ class TestStats(TestController):
         assert tickets['assigned'] == initial_tickets['assigned'] + 1
         assert tickets['solved'] == initial_tickets['solved'] + 1
         assert tickets['revoked'] == initial_tickets['revoked']
-        assert tickets_artifacts['created'] == initial_tickets_artifacts['created'] + 2
-        assert tickets_artifacts['modified'] == initial_tickets_artifacts['modified'] + 1
+        assert tickets_artifacts[
+            'created'] == initial_tickets_artifacts['created'] + 2
+        assert tickets_artifacts[
+            'modified'] == initial_tickets_artifacts['modified'] + 1
 
         r = self.app.post('/tickets/%s/update_ticket_from_widget' % ticketnum,
-            params={'ticket_form.ticket_num' : ticketnum,
-                    'ticket_form.summary':'test2',
-                    'ticket_form.assigned_to' : str(c.user.username)},
-            extra_environ=dict(username=str(c.user.username)))
+                          params={'ticket_form.ticket_num': ticketnum,
+                                  'ticket_form.summary': 'test2',
+                                  'ticket_form.assigned_to': str(c.user.username)},
+                          extra_environ=dict(username=str(c.user.username)))
 
         tickets = c.user.stats.getTickets()
         tickets_artifacts = c.user.stats.getArtifacts(art_type="Ticket")
@@ -155,14 +166,16 @@ class TestStats(TestController):
         assert tickets['assigned'] == initial_tickets['assigned'] + 2
         assert tickets['solved'] == initial_tickets['solved'] + 1
         assert tickets['revoked'] == initial_tickets['revoked']
-        assert tickets_artifacts['created'] == initial_tickets_artifacts['created'] + 2
-        assert tickets_artifacts['modified'] == initial_tickets_artifacts['modified'] + 2
+        assert tickets_artifacts[
+            'created'] == initial_tickets_artifacts['created'] + 2
+        assert tickets_artifacts[
+            'modified'] == initial_tickets_artifacts['modified'] + 2
 
         r = self.app.post('/tickets/%s/update_ticket_from_widget' % ticketnum,
-            params={'ticket_form.ticket_num' : ticketnum,
-                    'ticket_form.summary':'test2',
-                    'ticket_form.assigned_to' : 'test-user'},
-            extra_environ=dict(username=str(c.user.username)))
+                          params={'ticket_form.ticket_num': ticketnum,
+                                  'ticket_form.summary': 'test2',
+                                  'ticket_form.assigned_to': 'test-user'},
+                          extra_environ=dict(username=str(c.user.username)))
 
         tickets = c.user.stats.getTickets()
         tickets_artifacts = c.user.stats.getArtifacts(art_type="Ticket")
@@ -170,8 +183,11 @@ class TestStats(TestController):
         assert tickets['assigned'] == initial_tickets['assigned'] + 2
         assert tickets['solved'] == initial_tickets['solved'] + 1
         assert tickets['revoked'] == initial_tickets['revoked'] + 1
-        assert tickets_artifacts['created'] == initial_tickets_artifacts['created'] + 2
-        assert tickets_artifacts['modified'] == initial_tickets_artifacts['modified'] + 3
+        assert tickets_artifacts[
+            'created'] == initial_tickets_artifacts['created'] + 2
+        assert tickets_artifacts[
+            'modified'] == initial_tickets_artifacts['modified'] + 3
+
 
 class TestGitCommit(TestController, unittest.TestCase):
 
@@ -189,7 +205,8 @@ class TestGitCommit(TestController, unittest.TestCase):
     @td.with_wiki
     def setup_with_tools(self):
         setup_global_objects()
-        h.set_context(test_project_with_repo, 'src-git', neighborhood='Projects')
+        h.set_context(test_project_with_repo, 'src-git',
+                      neighborhood='Projects')
         repo_dir = pkg_resources.resource_filename(
             'forgeuserstats', 'tests/data')
         c.app.repo.fs_path = repo_dir

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/widgets/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/widgets/__init__.py b/ForgeUserStats/forgeuserstats/widgets/__init__.py
index 77505f1..144e298 100644
--- a/ForgeUserStats/forgeuserstats/widgets/__init__.py
+++ b/ForgeUserStats/forgeuserstats/widgets/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/forgeuserstats/widgets/forms.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/forgeuserstats/widgets/forms.py b/ForgeUserStats/forgeuserstats/widgets/forms.py
index 0ea3a20..c7b987f 100644
--- a/ForgeUserStats/forgeuserstats/widgets/forms.py
+++ b/ForgeUserStats/forgeuserstats/widgets/forms.py
@@ -23,17 +23,17 @@ from formencode import validators as fev
 import ew as ew_core
 import ew.jinja2_ew as ew
 
+
 class StatsPreferencesForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
         visible = ew.Checkbox(
             label='Make my personal statistics visible to other users.')
-            
+
     def display(self, **kw):
         if kw.get('user').stats.visible:
-            self.fields['visible'].attrs = {'checked':'true'}      
+            self.fields['visible'].attrs = {'checked': 'true'}
         else:
-            self.fields['visible'].attrs = {}    
+            self.fields['visible'].attrs = {}
         return super(ForgeForm, self).display(**kw)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeUserStats/setup.py
----------------------------------------------------------------------
diff --git a/ForgeUserStats/setup.py b/ForgeUserStats/setup.py
index c8cb39b..ab57945 100644
--- a/ForgeUserStats/setup.py
+++ b/ForgeUserStats/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgeuserstats.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeUserStats',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/converters.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/converters.py b/ForgeWiki/forgewiki/converters.py
index 4439d08..95b88f7 100644
--- a/ForgeWiki/forgewiki/converters.py
+++ b/ForgeWiki/forgewiki/converters.py
@@ -64,9 +64,9 @@ def mediawiki2markdown(source):
         raise ImportError("""This operation requires GPL libraries:
         "mediawiki" (https://github.com/zikzakmedia/python-mediawiki.git)
         "html2text" (https://github.com/aaronsw/html2text.git)""")
-    
+
     html2text.BODY_WIDTH = 0
-    
+
     wiki_content = wiki2html(source, True)
     wiki_content = _convert_toc(wiki_content)
     markdown_text = html2text.html2text(wiki_content)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/model/wiki.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/model/wiki.py b/ForgeWiki/forgewiki/model/wiki.py
index 11ad589..18bfb85 100644
--- a/ForgeWiki/forgewiki/model/wiki.py
+++ b/ForgeWiki/forgewiki/model/wiki.py
@@ -17,7 +17,8 @@
 
 from datetime import datetime
 import difflib
-from pylons import app_globals as g #g is a namespace for globally accessable app helpers
+# g is a namespace for globally accessable app helpers
+from pylons import app_globals as g
 from pylons import tmpl_context as context
 
 from ming import schema
@@ -25,16 +26,16 @@ from ming.orm import FieldProperty, ForeignIdProperty, Mapper, session
 from ming.orm.declarative import MappedClass
 
 from allura.model import (
-        VersionedArtifact,
-        Snapshot,
-        Feed,
-        Thread,
-        Post,
-        User,
-        BaseAttachment,
-        Notification,
-        project_orm_session,
-        Shortlink,
+    VersionedArtifact,
+    Snapshot,
+    Feed,
+    Thread,
+    Post,
+    User,
+    BaseAttachment,
+    Notification,
+    project_orm_session,
+    Shortlink,
 )
 from allura.model.timeline import ActivityObject
 from allura.model.types import MarkdownCache
@@ -45,22 +46,25 @@ from allura.lib import utils
 config = utils.ConfigProxy(
     common_suffix='forgemail.domain')
 
+
 class Globals(MappedClass):
 
     class __mongometa__:
         name = 'wiki-globals'
         session = project_orm_session
-        indexes = [ 'app_config_id' ]
+        indexes = ['app_config_id']
 
     type_s = 'WikiGlobals'
     _id = FieldProperty(schema.ObjectId)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:context.app.config._id)
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: context.app.config._id)
     root = FieldProperty(str)
 
 
 class PageHistory(Snapshot):
+
     class __mongometa__:
-        name='page_history'
+        name = 'page_history'
 
     def original(self):
         return Page.query.get(_id=self.artifact_id)
@@ -92,17 +96,18 @@ class PageHistory(Snapshot):
     def email_address(self):
         return self.original().email_address
 
+
 class Page(VersionedArtifact, ActivityObject):
+
     class __mongometa__:
-        name='page'
+        name = 'page'
         history_class = PageHistory
         unique_indexes = [('app_config_id', 'title')]
 
-
-    title=FieldProperty(str)
-    text=FieldProperty(schema.String, if_missing='')
+    title = FieldProperty(str)
+    text = FieldProperty(schema.String, if_missing='')
     text_cache = FieldProperty(MarkdownCache)
-    viewable_by=FieldProperty([str])
+    viewable_by = FieldProperty([str])
     type_s = 'Wiki'
 
     @property
@@ -127,14 +132,14 @@ class Page(VersionedArtifact, ActivityObject):
         ss = VersionedArtifact.commit(self)
         session(self).flush()
         if self.version > 1:
-            v1 = self.get_version(self.version-1)
+            v1 = self.get_version(self.version - 1)
             v2 = self
-            la = [ line + '\n'  for line in v1.text.splitlines() ]
-            lb = [ line + '\n'  for line in v2.text.splitlines() ]
+            la = [line + '\n' for line in v1.text.splitlines()]
+            lb = [line + '\n' for line in v2.text.splitlines()]
             diff = ''.join(difflib.unified_diff(
-                    la, lb,
-                    'v%d' % v1.version,
-                    'v%d' % v2.version))
+                la, lb,
+                'v%d' % v1.version,
+                'v%d' % v2.version))
             description = '<pre>' + diff + '</pre>'
             if v1.title != v2.title:
                 subject = '%s renamed page %s to %s' % (
@@ -153,7 +158,8 @@ class Page(VersionedArtifact, ActivityObject):
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return '%s@%s%s' % (self.title.replace('/', '.'), domain, config.common_suffix)
 
     @property
@@ -161,7 +167,8 @@ class Page(VersionedArtifact, ActivityObject):
         return 'Discussion for %s page' % self.title
 
     def url(self):
-        s = self.app_config.url() + h.urlquote(self.title.encode('utf-8')) + '/'
+        s = self.app_config.url() + \
+            h.urlquote(self.title.encode('utf-8')) + '/'
         if self.deleted:
             s += '?deleted=True'
         return s
@@ -182,7 +189,7 @@ class Page(VersionedArtifact, ActivityObject):
     def upsert(cls, title, version=None):
         """Update page with `title` or insert new page with that name"""
         if version is None:
-            #Check for existing page object
+            # Check for existing page object
             obj = cls.query.get(
                 app_config_id=context.app.config._id,
                 title=title)
@@ -190,14 +197,15 @@ class Page(VersionedArtifact, ActivityObject):
                 obj = cls(
                     title=title,
                     app_config_id=context.app.config._id,
-                    )
+                )
                 Thread.new(discussion_id=obj.app_config.discussion_id,
                            ref_id=obj.index_id())
             return obj
         else:
             pg = cls.upsert(title)
             HC = cls.__mongometa__.history_class
-            ss = HC.query.find({'artifact_id':pg._id, 'version':int(version)}).one()
+            ss = HC.query.find(
+                {'artifact_id': pg._id, 'version': int(version)}).one()
             return ss
 
     @classmethod
@@ -226,7 +234,7 @@ class Page(VersionedArtifact, ActivityObject):
                 t[user.username] = user.id
             return t.values()
         user_ids = uniq([r.author for r in self.history().all()])
-        return User.query.find({'_id':{'$in':user_ids}}).all()
+        return User.query.find({'_id': {'$in': user_ids}}).all()
 
     def delete(self):
         Shortlink.query.remove(dict(ref_id=self.index_id()))
@@ -234,10 +242,12 @@ class Page(VersionedArtifact, ActivityObject):
         suffix = " {:%Y-%m-%d %H:%M:%S.%f}".format(datetime.utcnow())
         self.title += suffix
 
+
 class WikiAttachment(BaseAttachment):
-    ArtifactType=Page
+    ArtifactType = Page
+
     class __mongometa__:
-        polymorphic_identity='WikiAttachment'
-    attachment_type=FieldProperty(str, if_missing='WikiAttachment')
+        polymorphic_identity = 'WikiAttachment'
+    attachment_type = FieldProperty(str, if_missing='WikiAttachment')
 
 Mapper.compile_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
index 7815ba9..b70672c 100644
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
+++ b/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
@@ -25,6 +25,7 @@ log = logging.getLogger(__name__)
 
 
 class MediawikiExtractor(object):
+
     """Base class for MediaWiki data provider"""
 
     def __init__(self, options):
@@ -40,6 +41,7 @@ class MediawikiExtractor(object):
 
 
 class MySQLExtractor(MediawikiExtractor):
+
     """Extract MediaWiki data to json.
 
     Use connection to MySQL database as a data source.
@@ -60,7 +62,8 @@ class MySQLExtractor(MediawikiExtractor):
         try:
             import MySQLdb
         except ImportError:
-            raise ImportError('GPL library MySQL-python is required for this operation')
+            raise ImportError(
+                'GPL library MySQL-python is required for this operation')
 
         if not self._connection:
             self._connection = MySQLdb.connect(**self.db_options)
@@ -147,7 +150,7 @@ class MySQLExtractor(MediawikiExtractor):
             # so we need to build path to file as follows
             md5 = hashlib.md5(name).hexdigest()
             path = os.path.join(self.options.attachments_dir,
-                               md5[:1], md5[:2], name)
+                                md5[:1], md5[:2], name)
             if os.path.isfile(path):
                 yield path
 
@@ -185,4 +188,5 @@ class MySQLExtractor(MediawikiExtractor):
         for filepath in self._attachments(page_id):
             self._save_attachment(filepath, 'pages', str(page_id),
                                   'attachments')
-        log.info('Extracted attachments for page %s (%s)', page_id, page['title'])
+        log.info('Extracted attachments for page %s (%s)',
+                 page_id, page['title'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
index 588fa64..a3e1e29 100644
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
+++ b/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
@@ -34,6 +34,7 @@ log = logging.getLogger(__name__)
 
 
 class MediawikiLoader(object):
+
     """Load MediaWiki data from json to Allura wiki tool"""
     TIMESTAMP_FMT = '%Y%m%d%H%M%S'
 
@@ -42,13 +43,13 @@ class MediawikiLoader(object):
         self.nbhd = M.Neighborhood.query.get(name=options.nbhd)
         if not self.nbhd:
             raise ValueError("Can't find neighborhood with name %s"
-                                  % options.nbhd)
+                             % options.nbhd)
         self.project = M.Project.query.get(shortname=options.project,
                                            neighborhood_id=self.nbhd._id)
         if not self.project:
             raise ValueError("Can't find project with shortname %s "
-                                  "and neighborhood_id %s"
-                                  % (options.project, self.nbhd._id))
+                             "and neighborhood_id %s"
+                             % (options.project, self.nbhd._id))
 
         self.wiki = self.project.app_instance('wiki')
         if not self.wiki:
@@ -132,10 +133,10 @@ class MediawikiLoader(object):
                 p = WM.Page.upsert(page['title'])
                 p.viewable_by = ['all']
                 p.text = mediawiki_internal_links2markdown(
-                            mediawiki2markdown(page['text']),
-                            page['title'])
+                    mediawiki2markdown(page['text']),
+                    page['title'])
                 timestamp = datetime.datetime.strptime(page['timestamp'],
-                                                        self.TIMESTAMP_FMT)
+                                                       self.TIMESTAMP_FMT)
                 p.mod_date = timestamp
                 c.user = (M.User.query.get(username=page['username'].lower())
                           or M.User.anonymous())
@@ -147,7 +148,8 @@ class MediawikiLoader(object):
                 gl = WM.Globals.query.get(app_config_id=self.wiki.config._id)
                 if gl is not None:
                     gl.root = page['title']
-            log.info('Loaded history of page %s (%s)', page['page_id'], page['title'])
+            log.info('Loaded history of page %s (%s)',
+                     page['page_id'], page['title'])
 
             self.load_talk(page_dir, page['title'])
             self.load_attachments(page_dir, page['title'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
index 7d5b2b4..8a6e79e 100644
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
+++ b/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
@@ -32,43 +32,46 @@ log = logging.getLogger(__name__)
 
 
 class Wiki2Markdown(ScriptTask):
+
     """Import MediaWiki to Allura Wiki tool"""
     @classmethod
     def parser(cls):
         parser = argparse.ArgumentParser(description='Import wiki from'
-            'mediawiki-dump to allura wiki')
+                                         'mediawiki-dump to allura wiki')
         parser.add_argument('-e', '--extract-only', action='store_true',
-                          dest='extract',
-                          help='Store data from the mediawiki-dump '
-                          'on the local filesystem; not load into Allura')
-        parser.add_argument('-l', '--load-only', action='store_true', dest='load',
-                    help='Load into Allura previously-extracted data')
+                            dest='extract',
+                            help='Store data from the mediawiki-dump '
+                            'on the local filesystem; not load into Allura')
+        parser.add_argument(
+            '-l', '--load-only', action='store_true', dest='load',
+            help='Load into Allura previously-extracted data')
         parser.add_argument('-d', '--dump-dir', dest='dump_dir', default='',
-                    help='Directory for dump files')
+                            help='Directory for dump files')
         parser.add_argument('-n', '--neighborhood', dest='nbhd', default='',
-                    help='Neighborhood name to load data')
+                            help='Neighborhood name to load data')
         parser.add_argument('-p', '--project', dest='project', default='',
-                    help='Project shortname to load data into')
+                            help='Project shortname to load data into')
         parser.add_argument('-a', '--attachments-dir', dest='attachments_dir',
-                    help='Path to directory with mediawiki attachments dump',
-                    default='')
+                            help='Path to directory with mediawiki attachments dump',
+                            default='')
         parser.add_argument('--db_config_prefix', dest='db_config_prefix',
-                          help='Key prefix (e.g. "legacy.") in ini file to '
-                          'use instead of commandline db params')
+                            help='Key prefix (e.g. "legacy.") in ini file to '
+                            'use instead of commandline db params')
         parser.add_argument('-s', '--source', dest='source', default='mysql',
-                    help='Database type to extract from (only mysql for now)')
+                            help='Database type to extract from (only mysql for now)')
         parser.add_argument('--db_name', dest='db_name', default='mediawiki',
-                    help='Database name')
+                            help='Database name')
         parser.add_argument('--host', dest='host', default='localhost',
-                    help='Database host')
+                            help='Database host')
         parser.add_argument('--port', dest='port', type=int, default=0,
-                    help='Database port')
+                            help='Database port')
         parser.add_argument('--user', dest='user', default='',
-                    help='User for database connection')
+                            help='User for database connection')
         parser.add_argument('--password', dest='password', default='',
-                    help='Password for database connection')
-        parser.add_argument('--keep-dumps', action='store_true', dest='keep_dumps',
-                    help='Leave dump files on disk after run')
+                            help='Password for database connection')
+        parser.add_argument(
+            '--keep-dumps', action='store_true', dest='keep_dumps',
+            help='Leave dump files on disk after run')
         return parser
 
     @classmethod
@@ -93,14 +96,15 @@ class Wiki2Markdown(ScriptTask):
 
         if not options.dump_dir:
             if options.load and not options.extract:
-                raise ValueError('You must specify directory containing dump files')
+                raise ValueError(
+                    'You must specify directory containing dump files')
             else:
                 options.dump_dir = tempfile.mkdtemp()
                 log.info("Writing temp files to %s", options.dump_dir)
 
         if options.load and (not options.project or not options.nbhd):
             raise ValueError('You must specify neighborhood and project '
-                                  'to load data')
+                             'to load data')
 
         if options.extract:
             if options.db_config_prefix:
@@ -112,12 +116,14 @@ class Wiki2Markdown(ScriptTask):
             if options.source == 'mysql':
                 pass
             elif options.source in ('sqlite', 'postgres', 'sql-dump'):
-                raise ValueError('This source not implemented yet. Only mysql for now')
+                raise ValueError(
+                    'This source not implemented yet. Only mysql for now')
             else:
                 raise ValueError('You must specify a valid data source')
 
             if not options.attachments_dir:
-                raise ValueError('You must specify path to directory with mediawiki attachmets dump.')
+                raise ValueError(
+                    'You must specify path to directory with mediawiki attachmets dump.')
 
         return options
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
index 8d3f8b7..625362c 100644
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
+++ b/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
@@ -15,4 +15,4 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-from .wiki_from_trac import WikiFromTrac
\ No newline at end of file
+from .wiki_from_trac import WikiFromTrac

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
index 6415bbf..7f146e6 100644
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
+++ b/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
@@ -129,7 +129,7 @@ class WikiExporter(object):
         if type is None:
             return url
         glue = '&' if '?' in suburl else '?'
-        return  url + glue + 'format=' + type
+        return url + glue + 'format=' + type
 
     def fetch(self, url):
         return urlopen(url)
@@ -201,6 +201,7 @@ class WikiExporter(object):
         internal_url = urlsplit(self.base_url).path + 'wiki/'
         internal_link_re = r'\[([^]]+)\]\(%s([^)]*)\)' % internal_url
         internal_link = re.compile(internal_link_re, re.UNICODE)
+
         def sub(match):
             caption = match.group(1)
             page = self.convert_title(match.group(2))
@@ -232,8 +233,10 @@ class WikiExporter(object):
         a = []
         for line in text.split('\n'):
             if not line.startswith('    '):
-                line = re.sub(r'\[(https?://[^\s\[\]]+)\s([^\[\]]+)\]', r'[\2](\1)', line)
-                line = re.sub(r'\[(wiki:[^\s\[\]]+)\s([^\[\]]+)\]', r'[\2](/\1/)', line)
+                line = re.sub(
+                    r'\[(https?://[^\s\[\]]+)\s([^\[\]]+)\]', r'[\2](\1)', line)
+                line = re.sub(r'\[(wiki:[^\s\[\]]+)\s([^\[\]]+)\]',
+                              r'[\2](/\1/)', line)
                 line = re.sub(r'\!(([A-Z][a-z0-9]+){2,})', r'\1', line)
                 line = re.sub(r'\'\'\'(.*?)\'\'\'', r'*\1*', line)
                 line = re.sub(r'\'\'(.*?)\'\'', r'_\1_', line)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
index 55e4480..45d056c 100644
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
+++ b/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
@@ -41,13 +41,15 @@ def load_data(doc_file_name=None, optparser=None, options=None):
                 if not isinstance(k, basestring) or not isinstance(v, basestring):
                     raise ValueError
         except ValueError:
-            optparser.error('--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
+            optparser.error(
+                '--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
         finally:
             f.close()
 
     import_options['user_map'] = user_map
 
-    cli = AlluraImportApiClient(options.base_url, options.api_key, options.secret_key, options.verbose)
+    cli = AlluraImportApiClient(
+        options.base_url, options.api_key, options.secret_key, options.verbose)
     doc_txt = open(doc_file_name).read()
 
     if options.wiki:


[18/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_home.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_home.py b/Allura/allura/tests/functional/test_home.py
index 1a5bb57..f350b29 100644
--- a/Allura/allura/tests/functional/test_home.py
+++ b/Allura/allura/tests/functional/test_home.py
@@ -34,9 +34,10 @@ class TestProjectHome(TestController):
     def test_project_nav(self):
         response = self.app.get('/p/test/_nav.json')
         root = self.app.get('/p/test/wiki/').follow()
-        assert re.search(r'<!-- Server: \S+ -->', str(root.html)), 'Missing Server comment'
+        assert re.search(r'<!-- Server: \S+ -->',
+                         str(root.html)), 'Missing Server comment'
         nav_links = root.html.find('div', dict(id='top_nav')).findAll('a')
-        assert len(nav_links) ==  len(response.json['menu'])
+        assert len(nav_links) == len(response.json['menu'])
         for nl, entry in zip(nav_links, response.json['menu']):
             assert nl['href'] == entry['url']
 
@@ -52,12 +53,14 @@ class TestProjectHome(TestController):
         menu = response.json['menu']
         wikis = menu[-2]['children']
         assert_equal(len(wikis), 2)
-        assert {u'url': u'/p/test/wiki/', u'name': u'Wiki', u'icon': u'tool-wiki', 'tool_name': 'wiki'} in wikis, wikis
-        assert {u'url': u'/p/test/wiki2/', u'name': u'wiki2', u'icon': u'tool-wiki', 'tool_name': 'wiki'} in wikis, wikis
+        assert {u'url': u'/p/test/wiki/', u'name': u'Wiki', u'icon':
+                u'tool-wiki', 'tool_name': 'wiki'} in wikis, wikis
+        assert {u'url': u'/p/test/wiki2/', u'name': u'wiki2', u'icon':
+                u'tool-wiki', 'tool_name': 'wiki'} in wikis, wikis
 
     @td.with_wiki
     def test_project_group_nav_more_than_ten(self):
-        for i in range(1,15):
+        for i in range(1, 15):
             tool_name = "wiki%s" % str(i)
             c.user = M.User.by_username('test-admin')
             p = M.Project.query.get(shortname='test')
@@ -67,7 +70,8 @@ class TestProjectHome(TestController):
         response = self.app.get('/p/test/_nav.json')
         menu = response.json['menu']
         assert_equal(len(menu[0]['children']), 11)
-        assert {u'url': u'/p/test/_list/wiki', u'name': u'More...', u'icon': u'tool-wiki', 'tool_name': 'wiki'} in menu[0]['children']
+        assert {u'url': u'/p/test/_list/wiki', u'name': u'More...',
+                u'icon': u'tool-wiki', 'tool_name': 'wiki'} in menu[0]['children']
 
     @td.with_wiki
     def test_neighborhood_home(self):
@@ -117,7 +121,8 @@ class TestProjectHome(TestController):
     def test_members(self):
         nbhd = M.Neighborhood.query.get(name='Projects')
         self.app.post('/admin/groups/create', params={'name': 'B_role'})
-        test_project = M.Project.query.get(shortname='test', neighborhood_id=nbhd._id)
+        test_project = M.Project.query.get(
+            shortname='test', neighborhood_id=nbhd._id)
         test_project.add_user(M.User.by_username('test-user-1'), ['B_role'])
         test_project.add_user(M.User.by_username('test-user'), ['Developer'])
         test_project.add_user(M.User.by_username('test-user-0'), ['Member'])
@@ -140,9 +145,9 @@ class TestProjectHome(TestController):
         assert "<td>Test User 1</td>" in str(tr[6])
         assert "<td>Test User 2</td>" in str(tr[7])
 
-
     def test_members_anonymous(self):
-        r = self.app.get('/p/test/_members/', extra_environ=dict(username='*anonymous'))
+        r = self.app.get('/p/test/_members/',
+                         extra_environ=dict(username='*anonymous'))
         assert '<td>Test Admin</td>' in r
         assert '<td><a href="/u/test-admin/">test-admin</a></td>' in r
         assert '<td>Admin</td>' in r

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_neighborhood.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_neighborhood.py b/Allura/allura/tests/functional/test_neighborhood.py
index 7fe3916..3d71949 100644
--- a/Allura/allura/tests/functional/test_neighborhood.py
+++ b/Allura/allura/tests/functional/test_neighborhood.py
@@ -40,8 +40,9 @@ class TestNeighborhood(TestController):
         assert r.location.endswith('/adobe/wiki/Home/')
         r = r.follow()
         assert 'This is the "Adobe" neighborhood' in str(r), str(r)
-        r = self.app.get('/adobe/admin/', extra_environ=dict(username='test-user'),
-                         status=403)
+        r = self.app.get(
+            '/adobe/admin/', extra_environ=dict(username='test-user'),
+            status=403)
 
     def test_redirect(self):
         r = self.app.post('/adobe/_admin/update',
@@ -52,15 +53,19 @@ class TestNeighborhood(TestController):
 
     def test_admin(self):
         r = self.app.get('/adobe/_admin/', extra_environ=dict(username='root'))
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
-        r = self.app.get('/adobe/_admin/accolades', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/accolades',
+                         extra_environ=dict(username='root'))
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         neighborhood.features['google_analytics'] = True
         r = self.app.post('/adobe/_admin/update',
-                          params=dict(name='Mozq1', css='', homepage='# MozQ1!', tracking_id='U-123456'),
+                          params=dict(name='Mozq1', css='',
+                                      homepage='# MozQ1!', tracking_id='U-123456'),
                           extra_environ=dict(username='root'))
         r = self.app.post('/adobe/_admin/update',
-                          params=dict(name='Mozq1', css='', homepage='# MozQ1!\n[Root]'),
+                          params=dict(name='Mozq1', css='',
+                                      homepage='# MozQ1!\n[Root]'),
                           extra_environ=dict(username='root'))
         # make sure project_template is validated as proper json
         r = self.app.post('/adobe/_admin/update',
@@ -144,11 +149,12 @@ class TestNeighborhood(TestController):
 
         r = self.app.get('/p/test/admin/tools')
         assert '<div class="fleft isnt_sorted">' in r
-        delete_tool = r.html.find('a', {'class':'mount_delete'})
+        delete_tool = r.html.find('a', {'class': 'mount_delete'})
         assert_equal(len(delete_tool), 1)
 
     def test_show_title(self):
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         # if not set show_title must be True
         assert neighborhood.show_title
@@ -172,13 +178,13 @@ class TestNeighborhood(TestController):
                          extra_environ=dict(username='root'))
         assert 'class="project_title"' in str(r)
 
-
     def test_admin_stats_del_count(self):
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         proj = M.Project.query.get(neighborhood_id=neighborhood._id)
         proj.deleted = True
         ThreadLocalORMSession.flush_all()
-        r = self.app.get('/adobe/_admin/stats/', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/stats/',
+                         extra_environ=dict(username='root'))
         assert 'Deleted: 1' in r
         assert 'Private: 0' in r
 
@@ -188,7 +194,8 @@ class TestNeighborhood(TestController):
         proj.deleted = False
         proj.private = True
         ThreadLocalORMSession.flush_all()
-        r = self.app.get('/adobe/_admin/stats/', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/stats/',
+                         extra_environ=dict(username='root'))
         assert 'Deleted: 0' in r
         assert 'Private: 1' in r
 
@@ -197,15 +204,19 @@ class TestNeighborhood(TestController):
         proj = M.Project.query.get(neighborhood_id=neighborhood._id)
         proj.private = False
         ThreadLocalORMSession.flush_all()
-        r = self.app.get('/adobe/_admin/stats/adminlist', extra_environ=dict(username='root'))
-        pq = M.Project.query.find(dict(neighborhood_id=neighborhood._id, deleted=False))
+        r = self.app.get('/adobe/_admin/stats/adminlist',
+                         extra_environ=dict(username='root'))
+        pq = M.Project.query.find(
+            dict(neighborhood_id=neighborhood._id, deleted=False))
         pq.sort('name')
         projects = pq.skip(0).limit(int(25)).all()
         for proj in projects:
-            admin_role = M.ProjectRole.query.get(project_id=proj.root_project._id, name='Admin')
+            admin_role = M.ProjectRole.query.get(
+                project_id=proj.root_project._id, name='Admin')
             if admin_role is None:
                 continue
-            user_role_list = M.ProjectRole.query.find(dict(project_id=proj.root_project._id, name=None)).all()
+            user_role_list = M.ProjectRole.query.find(
+                dict(project_id=proj.root_project._id, name=None)).all()
             for ur in user_role_list:
                 if ur.user is not None and admin_role._id in ur.roles:
                     assert proj.name in r
@@ -213,13 +224,15 @@ class TestNeighborhood(TestController):
 
     def test_icon(self):
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0], 'nf', 'allura', 'images', file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('icon', file_name, file_data)
 
         r = self.app.get('/adobe/_admin/', extra_environ=dict(username='root'))
         r = self.app.post('/adobe/_admin/update',
-                          params=dict(name='Mozq1', css='', homepage='# MozQ1'),
+                          params=dict(name='Mozq1', css='',
+                                      homepage='# MozQ1'),
                           extra_environ=dict(username='root'), upload_files=[upload])
         r = self.app.get('/adobe/icon')
         image = PIL.Image.open(StringIO(r.body))
@@ -231,27 +244,34 @@ class TestNeighborhood(TestController):
         # analytics allowed
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         neighborhood.features['google_analytics'] = True
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert 'Analytics Tracking ID' in r
-        r = self.app.get('/adobe/adobe-1/admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/adobe-1/admin/overview',
+                         extra_environ=dict(username='root'))
         assert 'Analytics Tracking ID' in r
         r = self.app.post('/adobe/_admin/update',
-                          params=dict(name='Adobe', css='', homepage='# MozQ1', tracking_id='U-123456'),
+                          params=dict(name='Adobe', css='',
+                                      homepage='# MozQ1', tracking_id='U-123456'),
                           extra_environ=dict(username='root'))
         r = self.app.post('/adobe/adobe-1/admin/update',
                           params=dict(tracking_id='U-654321'),
                           extra_environ=dict(username='root'))
-        r = self.app.get('/adobe/adobe-1/admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/adobe-1/admin/overview',
+                         extra_environ=dict(username='root'))
         assert "_add_tracking('nbhd', 'U-123456');" in r
         assert "_add_tracking('proj', 'U-654321');" in r
         # analytics not allowed
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         neighborhood.features['google_analytics'] = False
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert 'Analytics Tracking ID' not in r
-        r = self.app.get('/adobe/adobe-1/admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/adobe-1/admin/overview',
+                         extra_environ=dict(username='root'))
         assert 'Analytics Tracking ID' not in r
-        r = self.app.get('/adobe/adobe-1/admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/adobe-1/admin/overview',
+                         extra_environ=dict(username='root'))
         assert "_add_tracking('nbhd', 'U-123456');" not in r
         assert "_add_tracking('proj', 'U-654321');" not in r
 
@@ -264,7 +284,8 @@ class TestNeighborhood(TestController):
         neighborhood.features['css'] = 'none'
         r = self.app.get('/adobe/')
         assert test_css not in r
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert custom_css not in r
 
         neighborhood = M.Neighborhood.query.get(name='Adobe')
@@ -273,7 +294,8 @@ class TestNeighborhood(TestController):
         while isinstance(r.response, HTTPFound):
             r = r.follow()
         assert test_css in r
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert custom_css in r
 
         neighborhood = M.Neighborhood.query.get(name='Adobe')
@@ -282,14 +304,16 @@ class TestNeighborhood(TestController):
         while isinstance(r.response, HTTPFound):
             r = r.follow()
         assert test_css in r
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert custom_css in r
 
     def test_picker_css(self):
         neighborhood = M.Neighborhood.query.get(name='Adobe')
         neighborhood.features['css'] = 'picker'
 
-        r = self.app.get('/adobe/_admin/overview', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/overview',
+                         extra_environ=dict(username='root'))
         assert 'Project title, font' in r
         assert 'Project title, color' in r
         assert 'Bar on top' in r
@@ -321,7 +345,9 @@ class TestNeighborhood(TestController):
         neighborhood = M.Neighborhood.query.get(name='Projects')
         neighborhood.features['max_projects'] = None
         r = self.app.post('/p/register',
-                          params=dict(project_unixname='maxproject1', project_name='Max project1', project_description='', neighborhood='Projects'),
+                          params=dict(
+                              project_unixname='maxproject1', project_name='Max project1',
+                              project_description='', neighborhood='Projects'),
                           antispam=True,
                           extra_environ=dict(username='root'), status=302)
         assert '/p/maxproject1/admin' in r.location
@@ -330,7 +356,9 @@ class TestNeighborhood(TestController):
         neighborhood = M.Neighborhood.query.get(name='Projects')
         neighborhood.features['max_projects'] = 0
         r = self.app.post('/p/register',
-                          params=dict(project_unixname='maxproject2', project_name='Max project2', project_description='', neighborhood='Projects'),
+                          params=dict(
+                              project_unixname='maxproject2', project_name='Max project2',
+                              project_description='', neighborhood='Projects'),
                           antispam=True,
                           extra_environ=dict(username='root'))
         while isinstance(r.response, HTTPFound):
@@ -341,7 +369,9 @@ class TestNeighborhood(TestController):
         # Set rate limit to unlimit
         with h.push_config(config, **{'project.rate_limits': '{}'}):
             r = self.app.post('/p/register',
-                              params=dict(project_unixname='rateproject1', project_name='Rate project1', project_description='', neighborhood='Projects'),
+                              params=dict(
+                                  project_unixname='rateproject1', project_name='Rate project1',
+                                  project_description='', neighborhood='Projects'),
                               antispam=True,
                               extra_environ=dict(username='test-user-1'), status=302)
             assert '/p/rateproject1/admin' in r.location
@@ -349,7 +379,9 @@ class TestNeighborhood(TestController):
         # Set rate limit to 1 in first hour of user account
         with h.push_config(config, **{'project.rate_limits': '{"3600": 1}'}):
             r = self.app.post('/p/register',
-                              params=dict(project_unixname='rateproject2', project_name='Rate project2', project_description='', neighborhood='Projects'),
+                              params=dict(
+                                  project_unixname='rateproject2', project_name='Rate project2',
+                                  project_description='', neighborhood='Projects'),
                               antispam=True,
                               extra_environ=dict(username='test-user-1'))
             while isinstance(r.response, HTTPFound):
@@ -360,7 +392,9 @@ class TestNeighborhood(TestController):
         # Set rate limit to unlimit
         with h.push_config(config, **{'project.rate_limits': '{}'}):
             r = self.app.post('/p/register',
-                              params=dict(project_unixname='rateproject1', project_name='Rate project1', project_description='', neighborhood='Projects'),
+                              params=dict(
+                                  project_unixname='rateproject1', project_name='Rate project1',
+                                  project_description='', neighborhood='Projects'),
                               antispam=True,
                               extra_environ=dict(username='root'), status=302)
             assert '/p/rateproject1/admin' in r.location
@@ -368,55 +402,66 @@ class TestNeighborhood(TestController):
         # Set rate limit to 1 in first hour of user account
         with h.push_config(config, **{'project.rate_limits': '{"3600": 1}'}):
             r = self.app.post('/p/register',
-                              params=dict(project_unixname='rateproject2', project_name='Rate project2', project_description='', neighborhood='Projects'),
+                              params=dict(
+                                  project_unixname='rateproject2', project_name='Rate project2',
+                                  project_description='', neighborhood='Projects'),
                               antispam=True,
                               extra_environ=dict(username='root'))
             assert '/p/rateproject2/admin' in r.location
 
     def test_invite(self):
         p_nbhd_id = str(M.Neighborhood.query.get(name='Projects')._id)
-        r = self.app.get('/adobe/_moderate/', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_moderate/',
+                         extra_environ=dict(username='root'))
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='adobe-1', invite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='adobe-1', invite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'error' in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='no_such_user', invite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='no_such_user',
+                                      invite='on', neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'error' in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='test', invite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='test', invite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'invited' in r, r
         assert 'warning' not in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='test', invite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='test', invite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'warning' in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='test', uninvite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='test', uninvite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'uninvited' in r
         assert 'warning' not in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='test', uninvite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='test', uninvite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'warning' in r
         r = self.app.post('/adobe/_moderate/invite',
-                          params=dict(pid='test', invite='on', neighborhood_id=p_nbhd_id),
+                          params=dict(pid='test', invite='on',
+                                      neighborhood_id=p_nbhd_id),
                           extra_environ=dict(username='root'))
         r = self.app.get(r.location, extra_environ=dict(username='root'))
         assert 'invited' in r
         assert 'warning' not in r
 
     def test_evict(self):
-        r = self.app.get('/adobe/_moderate/', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_moderate/',
+                         extra_environ=dict(username='root'))
         r = self.app.post('/adobe/_moderate/evict',
                           params=dict(pid='test'),
                           extra_environ=dict(username='root'))
@@ -434,27 +479,40 @@ class TestNeighborhood(TestController):
     def test_register(self):
         r = self.app.get('/adobe/register', status=405)
         r = self.app.post('/adobe/register',
-                          params=dict(project_unixname='', project_name='Nothing', project_description='', neighborhood='Adobe'),
+                          params=dict(
+                              project_unixname='', project_name='Nothing',
+                              project_description='', neighborhood='Adobe'),
                           antispam=True,
                           extra_environ=dict(username='root'))
-        assert r.html.find('div', {'class':'error'}).string == 'Please use only letters, numbers, and dashes 3-15 characters long.'
+        assert r.html.find('div', {'class': 'error'}
+                           ).string == 'Please use only letters, numbers, and dashes 3-15 characters long.'
         r = self.app.post('/adobe/register',
-                          params=dict(project_unixname='mymoz', project_name='My Moz', project_description='', neighborhood='Adobe'),
+                          params=dict(
+                              project_unixname='mymoz', project_name='My Moz',
+                              project_description='', neighborhood='Adobe'),
                           antispam=True,
                           extra_environ=dict(username='*anonymous'),
                           status=302)
         r = self.app.post('/adobe/register',
-                          params=dict(project_unixname='foo.mymoz', project_name='My Moz', project_description='', neighborhood='Adobe'),
+                          params=dict(
+                              project_unixname='foo.mymoz', project_name='My Moz',
+                              project_description='', neighborhood='Adobe'),
                           antispam=True,
                           extra_environ=dict(username='root'))
-        assert r.html.find('div', {'class':'error'}).string == 'Please use only letters, numbers, and dashes 3-15 characters long.'
+        assert r.html.find('div', {'class': 'error'}
+                           ).string == 'Please use only letters, numbers, and dashes 3-15 characters long.'
         r = self.app.post('/p/register',
-                          params=dict(project_unixname='test', project_name='Tester', project_description='', neighborhood='Projects'),
+                          params=dict(
+                              project_unixname='test', project_name='Tester',
+                              project_description='', neighborhood='Projects'),
                           antispam=True,
                           extra_environ=dict(username='root'))
-        assert r.html.find('div', {'class':'error'}).string == 'This project name is taken.'
+        assert r.html.find('div', {'class': 'error'}
+                           ).string == 'This project name is taken.'
         r = self.app.post('/adobe/register',
-                          params=dict(project_unixname='mymoz', project_name='My Moz', project_description='', neighborhood='Adobe'),
+                          params=dict(
+                              project_unixname='mymoz', project_name='My Moz',
+                              project_description='', neighborhood='Adobe'),
                           antispam=True,
                           extra_environ=dict(username='root'),
                           status=302)
@@ -506,7 +564,8 @@ class TestNeighborhood(TestController):
         flash_msg_cookie = urllib2.unquote(r.headers['Set-Cookie'])
         assert 'Internal Error.' in flash_msg_cookie
 
-        proj = M.Project.query.get(shortname='myprivate1', neighborhood_id=neighborhood._id)
+        proj = M.Project.query.get(
+            shortname='myprivate1', neighborhood_id=neighborhood._id)
         assert proj is None
 
         # Turn on private
@@ -526,7 +585,8 @@ class TestNeighborhood(TestController):
             antispam=True,
             extra_environ=dict(username='root'))
 
-        proj = M.Project.query.get(shortname='myprivate2', neighborhood_id=neighborhood._id)
+        proj = M.Project.query.get(
+            shortname='myprivate2', neighborhood_id=neighborhood._id)
         assert proj.private
 
     def test_register_private_ok(self):
@@ -542,7 +602,8 @@ class TestNeighborhood(TestController):
             antispam=True,
             extra_environ=dict(username='root'),
             status=302)
-        assert config.get('auth.login_url', '/auth/') not in r.location, r.location
+        assert config.get('auth.login_url',
+                          '/auth/') not in r.location, r.location
         r = self.app.get(
             '/p/mymoz/wiki/',
             extra_environ=dict(username='root')).follow(extra_environ=dict(username='root'), status=200)
@@ -557,29 +618,31 @@ class TestNeighborhood(TestController):
             status=403)
 
     def test_project_template(self):
-        icon_url = 'file://' + os.path.join(allura.__path__[0], 'nf', 'allura', 'images', 'neo-icon-set-454545-256x350.png')
+        icon_url = 'file://' + \
+            os.path.join(allura.__path__[0], 'nf', 'allura',
+                         'images', 'neo-icon-set-454545-256x350.png')
         test_groups = [{
-            "name": "Viewer", # group will be created, all params are valid
+            "name": "Viewer",  # group will be created, all params are valid
             "permissions": ["read"],
             "usernames": ["user01"]
         }, {
-            "name": "", # group won't be created - invalid name
+            "name": "",  # group won't be created - invalid name
             "permissions": ["read"],
             "usernames": ["user01"]
         }, {
-            "name": "TestGroup1", # group won't be created - invalid perm name
+            "name": "TestGroup1",  # group won't be created - invalid perm name
             "permissions": ["foobar"],
             "usernames": ["user01"]
         }, {
-            "name": "TestGroup2", # will be created; 'inspect' perm ignored
+            "name": "TestGroup2",  # will be created; 'inspect' perm ignored
             "permissions": ["read", "inspect"],
             "usernames": ["user01", "user02"]
         }, {
-            "name": "TestGroup3", # will be created with no users in group
+            "name": "TestGroup3",  # will be created with no users in group
             "permissions": ["admin"]
         }]
         r = self.app.post('/adobe/_admin/update', params=dict(name='Mozq1',
-            css='', homepage='# MozQ1!\n[Root]', project_template="""{
+                                                              css='', homepage='# MozQ1!\n[Root]', project_template="""{
                 "private":true,
                 "icon":{
                     "url":"%s",
@@ -625,15 +688,23 @@ class TestNeighborhood(TestController):
             status=302).follow()
         p = M.Project.query.get(shortname='testtemp')
         # make sure the correct tools got installed in the right order
-        top_nav = r.html.find('div', {'id':'top_nav'})
-        assert top_nav.contents[1].contents[1].contents[1]['href'] == '/adobe/testtemp/wiki/'
-        assert 'Wiki' in top_nav.contents[1].contents[1].contents[1].contents[0]
-        assert top_nav.contents[1].contents[3].contents[1]['href'] == '/adobe/testtemp/discussion/'
-        assert 'Discussion' in top_nav.contents[1].contents[3].contents[1].contents[0]
-        assert top_nav.contents[1].contents[5].contents[1]['href'] == '/adobe/testtemp/news/'
-        assert 'News' in top_nav.contents[1].contents[5].contents[1].contents[0]
-        assert top_nav.contents[1].contents[7].contents[1]['href'] == '/adobe/testtemp/admin/'
-        assert 'Admin' in top_nav.contents[1].contents[7].contents[1].contents[0]
+        top_nav = r.html.find('div', {'id': 'top_nav'})
+        assert top_nav.contents[1].contents[1].contents[
+            1]['href'] == '/adobe/testtemp/wiki/'
+        assert 'Wiki' in top_nav.contents[
+            1].contents[1].contents[1].contents[0]
+        assert top_nav.contents[1].contents[3].contents[
+            1]['href'] == '/adobe/testtemp/discussion/'
+        assert 'Discussion' in top_nav.contents[
+            1].contents[3].contents[1].contents[0]
+        assert top_nav.contents[1].contents[5].contents[
+            1]['href'] == '/adobe/testtemp/news/'
+        assert 'News' in top_nav.contents[
+            1].contents[5].contents[1].contents[0]
+        assert top_nav.contents[1].contents[7].contents[
+            1]['href'] == '/adobe/testtemp/admin/'
+        assert 'Admin' in top_nav.contents[
+            1].contents[7].contents[1].contents[0]
         # make sure project is private
         r = self.app.get(
             '/adobe/testtemp/wiki/',
@@ -677,16 +748,17 @@ class TestNeighborhood(TestController):
                 for username in usernames:
                     user = M.User.by_username(username)
                     if user and user._id:
-                        assert role in M.ProjectRole.by_user(user, project=p).roles
+                        assert role in M.ProjectRole.by_user(
+                            user, project=p).roles
             # confirm roles with invalid json data are not created
             if name in ('', 'TestGroup1'):
                 assert name not in roles
 
     def test_projects_anchored_tools(self):
         r = self.app.post('/adobe/_admin/update', params=dict(name='Adobe',
-            css='',
-            homepage='# Adobe!\n[Root]',
-            project_template="""{
+                                                              css='',
+                                                              homepage='# Adobe!\n[Root]',
+                                                              project_template="""{
                 "private":true,
                 "tools":{
                     "wiki":{
@@ -705,9 +777,9 @@ class TestNeighborhood(TestController):
                 "tool_order":["wiki","admin"],
 
                 }""" ),
-            extra_environ=dict(username='root'))
+                          extra_environ=dict(username='root'))
         neighborhood = M.Neighborhood.query.get(name='Adobe')
-        neighborhood.anchored_tools ='wiki:Wiki'
+        neighborhood.anchored_tools = 'wiki:Wiki'
         r = self.app.post(
             '/adobe/register',
             params=dict(
@@ -719,8 +791,10 @@ class TestNeighborhood(TestController):
             antispam=True,
             extra_environ=dict(username='root'))
         r = self.app.get('/adobe/testtemp/admin/tools')
-        assert r.html.find('div', id='top_nav').find('a', href='/adobe/testtemp/wiki/'), r.html
-        assert r.html.find('div', id='top_nav').find('a', href='/adobe/testtemp/admin/'), r.html
+        assert r.html.find('div', id='top_nav').find(
+            'a', href='/adobe/testtemp/wiki/'), r.html
+        assert r.html.find('div', id='top_nav').find(
+            'a', href='/adobe/testtemp/admin/'), r.html
 
     def test_name_suggest(self):
         r = self.app.get('/p/suggest_name?project_name=My+Moz')
@@ -730,12 +804,17 @@ class TestNeighborhood(TestController):
 
     def test_name_check(self):
         for name in ('My+Moz', 'Te%st!', 'ab', 'a' * 16):
-            r = self.app.get('/p/check_names?neighborhood=Projects&project_unixname=%s' % name)
-            assert_equal(r.json, {'project_unixname': 'Please use only letters, numbers, and dashes 3-15 characters long.'})
-        r = self.app.get('/p/check_names?neighborhood=Projects&project_unixname=mymoz')
+            r = self.app.get(
+                '/p/check_names?neighborhood=Projects&project_unixname=%s' % name)
+            assert_equal(
+                r.json, {'project_unixname': 'Please use only letters, numbers, and dashes 3-15 characters long.'})
+        r = self.app.get(
+            '/p/check_names?neighborhood=Projects&project_unixname=mymoz')
         assert_equal(r.json, {})
-        r = self.app.get('/p/check_names?neighborhood=Projects&project_unixname=test')
-        assert_equal(r.json, {'project_unixname': 'This project name is taken.'})
+        r = self.app.get(
+            '/p/check_names?neighborhood=Projects&project_unixname=test')
+        assert_equal(r.json,
+                     {'project_unixname': 'This project name is taken.'})
 
     @td.with_tool('test/sub1', 'Wiki', 'wiki')
     def test_neighborhood_project(self):
@@ -748,44 +827,54 @@ class TestNeighborhood(TestController):
         # p/test exists, so try creating adobe/test
         self.app.get('/adobe/test/wiki/', status=404)
         r = self.app.post('/adobe/register',
-                          params=dict(project_unixname='test', project_name='Test again', project_description='', neighborhood='Adobe', tools='wiki'),
+                          params=dict(
+                              project_unixname='test', project_name='Test again',
+                              project_description='', neighborhood='Adobe', tools='wiki'),
                           antispam=True,
                           extra_environ=dict(username='root'))
-        assert r.status_int == 302, r.html.find('div', {'class':'error'}).string
+        assert r.status_int == 302, r.html.find(
+            'div', {'class': 'error'}).string
         r = self.app.get('/adobe/test/wiki/').follow(status=200)
 
     def test_neighborhood_awards(self):
         file_name = 'adobe_icon.png'
-        file_path = os.path.join(allura.__path__[0], 'public', 'nf', 'images', file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'public', 'nf', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('icon', file_name, file_data)
 
-        r = self.app.get('/adobe/_admin/awards', extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/awards',
+                         extra_environ=dict(username='root'))
         r = self.app.post('/adobe/_admin/awards/create',
                           params=dict(short='FOO', full='A basic foo award'),
                           extra_environ=dict(username='root'), upload_files=[upload])
         r = self.app.post('/adobe/_admin/awards/create',
-                          params=dict(short='BAR', full='A basic bar award with no icon'),
+                          params=dict(short='BAR',
+                                      full='A basic bar award with no icon'),
                           extra_environ=dict(username='root'))
         foo_id = str(M.Award.query.find(dict(short='FOO')).first()._id)
         bar_id = str(M.Award.query.find(dict(short='BAR')).first()._id)
         r = self.app.post('/adobe/_admin/awards/%s/update' % bar_id,
-                          params=dict(short='BAR2', full='Updated description.'),
+                          params=dict(short='BAR2',
+                                      full='Updated description.'),
                           extra_environ=dict(username='root')).follow().follow()
         assert 'BAR2' in r
         assert 'Updated description.' in r
-        r = self.app.get('/adobe/_admin/awards/%s' % foo_id, extra_environ=dict(username='root'))
-        r = self.app.get('/adobe/_admin/awards/%s/icon' % foo_id, extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/awards/%s' %
+                         foo_id, extra_environ=dict(username='root'))
+        r = self.app.get('/adobe/_admin/awards/%s/icon' %
+                         foo_id, extra_environ=dict(username='root'))
         image = PIL.Image.open(StringIO(r.body))
         assert image.size == (48, 48)
         self.app.post('/adobe/_admin/awards/grant',
-                          params=dict(grant='FOO', recipient='adobe-1'),
-                          extra_environ=dict(username='root'))
-        self.app.get('/adobe/_admin/awards/%s/adobe-1' % foo_id, extra_environ=dict(username='root'))
+                      params=dict(grant='FOO', recipient='adobe-1'),
+                      extra_environ=dict(username='root'))
+        self.app.get('/adobe/_admin/awards/%s/adobe-1' %
+                     foo_id, extra_environ=dict(username='root'))
         self.app.post('/adobe/_admin/awards/%s/adobe-1/revoke' % foo_id,
-                          extra_environ=dict(username='root'))
+                      extra_environ=dict(username='root'))
         self.app.post('/adobe/_admin/awards/%s/delete' % foo_id,
-                          extra_environ=dict(username='root'))
+                      extra_environ=dict(username='root'))
 
     def test_add_a_project_link(self):
         from pylons import tmpl_context as c
@@ -806,13 +895,16 @@ class TestNeighborhood(TestController):
         assert 'Add a Project' in r
 
     def test_help(self):
-        r = self.app.get('/p/_admin/help/', extra_environ=dict(username='root'))
+        r = self.app.get('/p/_admin/help/',
+                         extra_environ=dict(username='root'))
         assert 'macro' in r
 
     @td.with_user_project('test-user')
     def test_profile_topnav_menu(self):
-        r = self.app.get('/u/test-user/', extra_environ=dict(username='test-user')).follow()
-        assert r.html.find('div', id='top_nav').find('a', href='/u/test-user/profile/'), r.html
+        r = self.app.get('/u/test-user/',
+                         extra_environ=dict(username='test-user')).follow()
+        assert r.html.find('div', id='top_nav').find(
+            'a', href='/u/test-user/profile/'), r.html
 
     def test_user_project_creates_on_demand(self):
         M.User.register(dict(username='donald-duck'), make_project=False)
@@ -822,12 +914,13 @@ class TestNeighborhood(TestController):
     def test_disabled_user_has_no_user_project(self):
         user = M.User.register(dict(username='donald-duck'))
         self.app.get('/u/donald-duck/')  # assert it's there
-        M.User.query.update(dict(username='donald-duck'), {'$set': {'disabled': True}})
+        M.User.query.update(dict(username='donald-duck'),
+                            {'$set': {'disabled': True}})
         self.app.get('/u/donald-duck/', status=404)
 
-
     def test_more_projects_link(self):
         r = self.app.get('/adobe/adobe-1/admin/')
-        link = r.html.find('div', {'class':'neighborhood_title_link'}).find('a')
+        link = r.html.find(
+            'div', {'class': 'neighborhood_title_link'}).find('a')
         assert 'View More Projects' in str(link)
         assert link['href'] == '/adobe/'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_rest.py b/Allura/allura/tests/functional/test_rest.py
index 6829434..328e117 100644
--- a/Allura/allura/tests/functional/test_rest.py
+++ b/Allura/allura/tests/functional/test_rest.py
@@ -43,7 +43,8 @@ class TestRestHome(TestRestApiBase):
         assert r.status_int == 403
 
     def test_bad_timestamp(self):
-        r = self.api_post('/rest/p/test/wiki/', api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())
+        r = self.api_post('/rest/p/test/wiki/',
+                          api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())
         assert r.status_int == 403
 
     @mock.patch('allura.controllers.rest.M.OAuthAccessToken')
@@ -80,22 +81,22 @@ class TestRestHome(TestRestApiBase):
     def test_bearer_token_valid(self, request):
         user = M.User.by_username('test-admin')
         consumer_token = M.OAuthConsumerToken(
-                name='foo',
-                description='foo app',
-            )
+            name='foo',
+            description='foo app',
+        )
         request_token = M.OAuthRequestToken(
-                consumer_token_id=consumer_token._id,
-                user_id=user._id,
-                callback='manual',
-                validation_pin=h.nonce(20),
-                is_bearer=True,
-            )
+            consumer_token_id=consumer_token._id,
+            user_id=user._id,
+            callback='manual',
+            validation_pin=h.nonce(20),
+            is_bearer=True,
+        )
         access_token = M.OAuthAccessToken(
-                consumer_token_id=consumer_token._id,
-                request_token_id=request_token._id,
-                user_id=user._id,
-                is_bearer=True,
-            )
+            consumer_token_id=consumer_token._id,
+            request_token_id=request_token._id,
+            user_id=user._id,
+            is_bearer=True,
+        )
         ThreadLocalODMSession.flush_all()
         request.params = {'access_token': access_token.api_key}
         request.scheme = 'https'
@@ -146,7 +147,8 @@ class TestRestHome(TestRestApiBase):
         # Deny anonymous to see 'private-bugs' tool
         role = M.ProjectRole.by_name('*anonymous')._id
         read_permission = M.ACE.allow(role, 'read')
-        app = M.Project.query.get(shortname='test').app_instance('private-bugs')
+        app = M.Project.query.get(
+            shortname='test').app_instance('private-bugs')
         if read_permission in app.config.acl:
             app.config.acl.remove(read_permission)
 
@@ -158,7 +160,8 @@ class TestRestHome(TestRestApiBase):
         assert_in('private-bugs', tool_mounts)
 
         # anonymous sees only non-private tool
-        r = self.app.get('/rest/p/test/', extra_environ={'username': '*anonymous'})
+        r = self.app.get('/rest/p/test/',
+                         extra_environ={'username': '*anonymous'})
         assert_equal(r.json['shortname'], 'test')
         tool_mounts = [t['mount_point'] for t in r.json['tools']]
         assert_in('bugs', tool_mounts)
@@ -168,10 +171,10 @@ class TestRestHome(TestRestApiBase):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         r = self.api_get('/rest/p/test/wiki/tést/')
         assert r.status_int == 200
         assert r.json['title'].encode('utf-8') == 'tést', r.json
@@ -179,8 +182,10 @@ class TestRestHome(TestRestApiBase):
     @td.with_wiki
     def test_deny_access(self):
         wiki = M.Project.query.get(shortname='test').app_instance('wiki')
-        anon_read_perm = M.ACE.allow(M.ProjectRole.by_name('*anonymous')._id, 'read')
-        auth_read_perm = M.ACE.allow(M.ProjectRole.by_name('*authenticated')._id, 'read')
+        anon_read_perm = M.ACE.allow(
+            M.ProjectRole.by_name('*anonymous')._id, 'read')
+        auth_read_perm = M.ACE.allow(
+            M.ProjectRole.by_name('*authenticated')._id, 'read')
         acl = wiki.config.acl
         if anon_read_perm in acl:
             acl.remove(anon_read_perm)
@@ -195,26 +200,27 @@ class TestRestHome(TestRestApiBase):
 
     def test_index(self):
         eps = {
-                'site_stats': {
-                    'foo_24hr': lambda: 42,
-                    'bar_24hr': lambda: 84,
-                    'qux_24hr': lambda: 0,
-                },
-            }
+            'site_stats': {
+                'foo_24hr': lambda: 42,
+                'bar_24hr': lambda: 84,
+                'qux_24hr': lambda: 0,
+            },
+        }
         with mock.patch.dict(g.entry_points, eps):
             response = self.app.get('/rest/')
             assert_equal(response.json, {
                 'site_stats': {
-                        'foo_24hr': 42,
-                        'bar_24hr': 84,
-                        'qux_24hr': 0,
-                    },
-                })
+                    'foo_24hr': 42,
+                    'bar_24hr': 84,
+                    'qux_24hr': 0,
+                },
+            })
 
     def test_name_validation(self):
         r = self.api_get('/rest/p/test/')
         assert r.status_int == 200
         with mock.patch('allura.lib.plugin.ProjectRegistrationProvider') as Provider:
-            Provider.get().shortname_validator.to_python.side_effect = Invalid('name', 'value', {})
+            Provider.get().shortname_validator.to_python.side_effect = Invalid(
+                'name', 'value', {})
             r = self.api_get('/rest/p/test/')
             assert r.status_int == 404

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_rest_api_tickets.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_rest_api_tickets.py b/Allura/allura/tests/functional/test_rest_api_tickets.py
index 145e143..8185d5d 100644
--- a/Allura/allura/tests/functional/test_rest_api_tickets.py
+++ b/Allura/allura/tests/functional/test_rest_api_tickets.py
@@ -30,8 +30,10 @@ class TestApiTicket(TestRestApiBase):
         if not expire:
             expire = timedelta(days=1)
         test_admin = M.User.query.get(username='test-admin')
-        api_ticket = M.ApiTicket(user_id=test_admin._id, capabilities={'import': ['Projects','test']},
-                                 expires=datetime.utcnow() + expire)
+        api_ticket = M.ApiTicket(
+            user_id=test_admin._id, capabilities={
+                'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + expire)
         session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -47,7 +49,8 @@ class TestApiTicket(TestRestApiBase):
 
     def test_bad_timestamp(self):
         self.set_api_ticket()
-        r = self.api_post('/rest/p/test/wiki/', api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())
+        r = self.api_post('/rest/p/test/wiki/',
+                          api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())
         assert r.status_int == 403
 
     def test_bad_path(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_root.py b/Allura/allura/tests/functional/test_root.py
index 918377f..8c7441e 100644
--- a/Allura/allura/tests/functional/test_root.py
+++ b/Allura/allura/tests/functional/test_root.py
@@ -53,10 +53,11 @@ class TestRootController(TestController):
 
     def test_index(self):
         response = self.app.get('/')
-        assert_equal(response.html.find('h2',{'class':'dark title'}).contents[0].strip(), 'All Neighborhoods')
-        nbhds = response.html.findAll('td',{'class':'nbhd'})
+        assert_equal(response.html.find('h2', {'class': 'dark title'}).contents[
+                     0].strip(), 'All Neighborhoods')
+        nbhds = response.html.findAll('td', {'class': 'nbhd'})
         assert nbhds[0].find('a').get('href') == '/adobe/'
-        cat_links = response.html.find('div',{'id':'sidebar'}).findAll('li')
+        cat_links = response.html.find('div', {'id': 'sidebar'}).findAll('li')
         assert len(cat_links) == 4
         assert cat_links[0].find('a').get('href') == '/browse/clustering'
         assert cat_links[0].find('a').find('span').string == 'Clustering'
@@ -68,31 +69,40 @@ class TestRootController(TestController):
 
         response = self.app.get('/')
         # inject it into the sidebar data
-        content = str(response.html.find('div',{'id':'content_base'}))
+        content = str(response.html.find('div', {'id': 'content_base'}))
         assert '<script>' not in content
         assert '&lt;script&gt;' in content
 
     def test_strange_accept_headers(self):
         hdrs = [
             'text/plain;text/html;text/*',
-            'text/html,application/xhtml+xml,application/xml;q=0.9;text/plain;q=0.8,image/png,*/*;q=0.5' ]
+            'text/html,application/xhtml+xml,application/xml;q=0.9;text/plain;q=0.8,image/png,*/*;q=0.5']
         for hdr in hdrs:
-            # malformed headers used to return 500, just make sure they don't now
+            # malformed headers used to return 500, just make sure they don't
+            # now
             self.app.get('/', headers=dict(Accept=hdr), validate_skip=True)
 
     def test_project_browse(self):
-        com_cat = M.ProjectCategory.query.find(dict(label='Communications')).first()
+        com_cat = M.ProjectCategory.query.find(
+            dict(label='Communications')).first()
         fax_cat = M.ProjectCategory.query.find(dict(label='Fax')).first()
-        M.Project.query.find(dict(shortname='adobe-1')).first().category_id = com_cat._id
+        M.Project.query.find(dict(shortname='adobe-1')
+                             ).first().category_id = com_cat._id
         response = self.app.get('/browse')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 1
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
         response = self.app.get('/browse/communications')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 1
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 0
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 0
         response = self.app.get('/browse/communications/fax')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 0
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 0
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 0
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 0
 
     def test_neighborhood_home(self):
         # Install home app
@@ -102,45 +112,57 @@ class TestRootController(TestController):
             p.install_app('home', 'home', 'Home', ordinal=0)
 
         response = self.app.get('/adobe/')
-        projects = response.html.findAll('div',{'class':'border card'})
+        projects = response.html.findAll('div', {'class': 'border card'})
         assert len(projects) == 2
-        cat_links = response.html.find('div',{'id':'sidebar'}).findAll('ul')[1].findAll('li')
+        cat_links = response.html.find(
+            'div', {'id': 'sidebar'}).findAll('ul')[1].findAll('li')
         assert len(cat_links) == 3, cat_links
         assert cat_links[0].find('a').get('href') == '/adobe/browse/clustering'
         assert cat_links[0].find('a').find('span').string == 'Clustering'
 
     def test_neighborhood_project_browse(self):
-        com_cat = M.ProjectCategory.query.find(dict(label='Communications')).first()
+        com_cat = M.ProjectCategory.query.find(
+            dict(label='Communications')).first()
         fax_cat = M.ProjectCategory.query.find(dict(label='Fax')).first()
-        M.Project.query.find(dict(shortname='adobe-1')).first().category_id = com_cat._id
-        M.Project.query.find(dict(shortname='adobe-2')).first().category_id = fax_cat._id
+        M.Project.query.find(dict(shortname='adobe-1')
+                             ).first().category_id = com_cat._id
+        M.Project.query.find(dict(shortname='adobe-2')
+                             ).first().category_id = fax_cat._id
         response = self.app.get('/adobe/browse')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 1
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
         response = self.app.get('/adobe/browse/communications')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 1
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
         response = self.app.get('/adobe/browse/communications/fax')
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-1/'})) == 0
-        assert len(response.html.findAll('a',{'href':'/adobe/adobe-2/'})) == 1
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-1/'})) == 0
+        assert len(
+            response.html.findAll('a', {'href': '/adobe/adobe-2/'})) == 1
 
     @td.with_wiki
     def test_markdown_to_html(self):
         n = M.Neighborhood.query.get(name='Projects')
-        r = self.app.get('/nf/markdown_to_html?markdown=*aaa*bb[wiki:Home]&project=test&app=bugs&neighborhood=%s' % n._id, validate_chunk=True)
+        r = self.app.get(
+            '/nf/markdown_to_html?markdown=*aaa*bb[wiki:Home]&project=test&app=bugs&neighborhood=%s' % n._id, validate_chunk=True)
         assert '<p><em>aaa</em>bb<a class="alink" href="/p/test/wiki/Home/">[wiki:Home]</a></p>' in r, r
 
     def test_slash_redirect(self):
-        r = self.app.get('/p',status=301)
-        r = self.app.get('/p/',status=302)
+        r = self.app.get('/p', status=301)
+        r = self.app.get('/p/', status=302)
 
     @skipif(module_not_available('newrelic'))
     def test_newrelic_set_transaction_name(self):
         from allura.controllers.project import NeighborhoodController
         with mock.patch('newrelic.agent.callable_name') as callable_name,\
-             mock.patch('newrelic.agent.set_transaction_name') as set_transaction_name:
+                mock.patch('newrelic.agent.set_transaction_name') as set_transaction_name:
             callable_name.return_value = 'foo'
             r = self.app.get('/p/')
             arg = callable_name.call_args[0][0]
-            assert_equal(arg.undecorated, NeighborhoodController.index.undecorated)
+            assert_equal(arg.undecorated,
+                         NeighborhoodController.index.undecorated)
             set_transaction_name.assert_called_with('foo')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_search.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_search.py b/Allura/allura/tests/functional/test_search.py
index f63d4c2..1081f25 100644
--- a/Allura/allura/tests/functional/test_search.py
+++ b/Allura/allura/tests/functional/test_search.py
@@ -31,4 +31,3 @@ class TestSearch(TestController):
     def test_project_search_controller(self):
         r = self.app.get('/search/')
         r = self.app.get('/search/', params=dict(q='Root'))
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_site_admin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_site_admin.py b/Allura/allura/tests/functional/test_site_admin.py
index 0f20247..b6da963 100644
--- a/Allura/allura/tests/functional/test_site_admin.py
+++ b/Allura/allura/tests/functional/test_site_admin.py
@@ -24,28 +24,30 @@ from allura import model as M
 from allura.tests import TestController
 from allura.lib.decorators import task
 
+
 class TestSiteAdmin(TestController):
 
     def test_access(self):
         r = self.app.get('/nf/admin/', extra_environ=dict(
-                username='test-user'), status=403)
+            username='test-user'), status=403)
 
         r = self.app.get('/nf/admin/', extra_environ=dict(
-                username='*anonymous'), status=302)
+            username='*anonymous'), status=302)
         r = r.follow()
         assert 'Login' in r
 
     def test_home(self):
         r = self.app.get('/nf/admin/', extra_environ=dict(
-                username='root'))
-        assert 'Forge Site Admin' in r.html.find('h2',{'class':'dark title'}).contents[0]
+            username='root'))
+        assert 'Forge Site Admin' in r.html.find(
+            'h2', {'class': 'dark title'}).contents[0]
         stats_table = r.html.find('table')
         cells = stats_table.findAll('td')
         assert cells[0].contents[0] == 'Adobe', cells[0].contents[0]
 
     def test_tickets_access(self):
         r = self.app.get('/nf/admin/api_tickets', extra_environ=dict(
-                username='test-user'), status=403)
+            username='test-user'), status=403)
 
     def test_new_projects_access(self):
         self.app.get('/nf/admin/new_projects', extra_environ=dict(
@@ -56,7 +58,7 @@ class TestSiteAdmin(TestController):
 
     def test_new_projects(self):
         r = self.app.get('/nf/admin/new_projects', extra_environ=dict(
-                username='root'))
+            username='root'))
         headers = r.html.find('table').findAll('th')
         assert headers[1].contents[0] == 'Created'
         assert headers[2].contents[0] == 'Shortname'
@@ -69,18 +71,18 @@ class TestSiteAdmin(TestController):
     def test_new_projects_deleted_projects(self):
         '''Deleted projects should not be visible here'''
         r = self.app.get('/nf/admin/new_projects', extra_environ=dict(
-                username='root'))
+            username='root'))
         count = len(r.html.find('table').findAll('tr'))
         p = M.Project.query.get(shortname='test')
         p.deleted = True
         ThreadLocalORMSession.flush_all()
         r = self.app.get('/nf/admin/new_projects', extra_environ=dict(
-                username='root'))
+            username='root'))
         assert_equal(len(r.html.find('table').findAll('tr')), count - 1)
 
     def test_new_projects_daterange_filtering(self):
         r = self.app.get('/nf/admin/new_projects', extra_environ=dict(
-                username='root'))
+            username='root'))
         count = len(r.html.find('table').findAll('tr'))
         assert_equal(count, 7)
 
@@ -105,7 +107,8 @@ class TestSiteAdmin(TestController):
         assert 'value="p"' in r
 
     def test_task_list(self):
-        r = self.app.get('/nf/admin/task_manager', extra_environ=dict(username='*anonymous'), status=302)
+        r = self.app.get('/nf/admin/task_manager',
+                         extra_environ=dict(username='*anonymous'), status=302)
         import math
         task = M.MonQTask.post(math.ceil, (12.5,))
         r = self.app.get('/nf/admin/task_manager?page_num=1')
@@ -115,7 +118,8 @@ class TestSiteAdmin(TestController):
         import math
         task = M.MonQTask.post(math.ceil, (12.5,))
         url = '/nf/admin/task_manager/view/%s' % task._id
-        r = self.app.get(url, extra_environ=dict(username='*anonymous'), status=302)
+        r = self.app.get(
+            url, extra_environ=dict(username='*anonymous'), status=302)
         r = self.app.get(url)
         assert 'math.ceil' in r, r
 
@@ -129,15 +133,15 @@ class TestSiteAdmin(TestController):
         user = M.User.by_username('root')
 
         task_args = dict(
-                args=['foo'],
-                kwargs=dict(bar='baz'))
+            args=['foo'],
+            kwargs=dict(bar='baz'))
 
         r = self.app.post('/nf/admin/task_manager/create', params=dict(
             task='allura.tests.functional.test_site_admin.test_task',
             task_args=json.dumps(task_args),
             user='root',
             path='/p/test/admin',
-            ), status=302)
+        ), status=302)
         task = M.MonQTask.query.find({}).sort('_id', -1).next()
         assert str(task._id) in r.location
         assert task.context['project_id'] == project._id

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_static.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_static.py b/Allura/allura/tests/functional/test_static.py
index 11e9a8f..49e9295 100644
--- a/Allura/allura/tests/functional/test_static.py
+++ b/Allura/allura/tests/functional/test_static.py
@@ -17,6 +17,7 @@
 
 from allura.tests import TestController
 
+
 class TestStatic(TestController):
 
     def test_static_controller(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_user_profile.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_user_profile.py b/Allura/allura/tests/functional/test_user_profile.py
index 25d6430..2706776 100644
--- a/Allura/allura/tests/functional/test_user_profile.py
+++ b/Allura/allura/tests/functional/test_user_profile.py
@@ -24,6 +24,7 @@ from allura.model import Project, User
 from allura.tests import decorators as td
 from allura.tests import TestController
 
+
 class TestUserProfile(TestController):
 
     @td.with_user_project('test-admin')
@@ -41,7 +42,7 @@ class TestUserProfile(TestController):
         response = self.app.get('/u/test-admin/profile/')
         assert 'Email Addresses' in response
         self.app.get('/u/test-user', extra_environ=dict(
-                username='test-user'))
+            username='test-user'))
         response = self.app.get('/u/test-user/profile/')
         assert 'Email Addresses' not in response
 
@@ -71,7 +72,8 @@ class TestUserProfile(TestController):
         gen_message_id.return_value = 'id'
         test_user = User.by_username('test-user')
         test_user.set_pref('email_address', 'test-user@example.com')
-        response = self.app.get('/u/test-user/profile/send_message', status=200)
+        response = self.app.get(
+            '/u/test-user/profile/send_message', status=200)
         assert '<b>From:</b> &#34;Test Admin&#34; &lt;test-admin@users.localhost&gt;' in response
         self.app.post('/u/test-user/profile/send_user_message',
                       params={'subject': 'test subject',
@@ -101,45 +103,52 @@ class TestUserProfile(TestController):
             subject=u'test subject')
 
         check.return_value = False
-        response = self.app.get('/u/test-user/profile/send_message', status=200)
+        response = self.app.get(
+            '/u/test-user/profile/send_message', status=200)
         assert 'Sorry, messaging is rate-limited' in response
 
     @td.with_user_project('test-user')
     def test_send_message_for_anonymous(self):
         r = self.app.get('/u/test-user/profile/send_message',
-                     extra_environ={'username': '*anonymous'},
-                     status=302)
-        assert 'You must be logged in to send user messages.' in self.webflash(r)
+                         extra_environ={'username': '*anonymous'},
+                         status=302)
+        assert 'You must be logged in to send user messages.' in self.webflash(
+            r)
 
         r = self.app.post('/u/test-user/profile/send_user_message',
-                      params={'subject': 'test subject',
-                              'message': 'test message',
-                              'cc': 'on'},
-                      extra_environ={'username': '*anonymous'},
-                      status=302)
-        assert 'You must be logged in to send user messages.' in self.webflash(r)
+                          params={'subject': 'test subject',
+                                  'message': 'test message',
+                                  'cc': 'on'},
+                          extra_environ={'username': '*anonymous'},
+                          status=302)
+        assert 'You must be logged in to send user messages.' in self.webflash(
+            r)
 
     @td.with_user_project('test-user')
     def test_link_to_send_message_form(self):
-        User.by_username('test-admin').set_pref('email_address', 'admin@example.com')
-        User.by_username('test-user').set_pref('email_address', 'user@example.com')
+        User.by_username('test-admin').set_pref('email_address',
+                                                'admin@example.com')
+        User.by_username('test-user').set_pref('email_address',
+                                               'user@example.com')
         r = self.app.get('/u/test-user/profile',
                          status=200)
         assert '<a href="send_message">Send me a message</a>' in r
 
         r = self.app.get('/u/test-user/profile',
-                     extra_environ={'username': '*anonymous'},
-                     status=200)
+                         extra_environ={'username': '*anonymous'},
+                         status=200)
 
         assert '<a href="send_message">Send me a message</a>' not in r
 
     @td.with_user_project('test-user')
     def test_disable_user_messages(self):
-        User.by_username('test-admin').set_pref('email_address', 'admin@example.com')
+        User.by_username('test-admin').set_pref('email_address',
+                                                'admin@example.com')
         test_user = User.by_username('test-user')
         test_user.set_pref('email_address', 'user@example.com')
         test_user.set_pref('disable_user_messages', True)
         r = self.app.get('/u/test-user/profile')
         assert '<a href="send_message">Send me a message</a>' not in r
         r = self.app.get('/u/test-user/profile/send_message', status=302)
-        assert 'This user has disabled direct email messages' in self.webflash(r)
+        assert 'This user has disabled direct email messages' in self.webflash(
+            r)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_artifact.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_artifact.py b/Allura/allura/tests/model/test_artifact.py
index 6609a3c..6b2ec66 100644
--- a/Allura/allura/tests/model/test_artifact.py
+++ b/Allura/allura/tests/model/test_artifact.py
@@ -41,22 +41,29 @@ from allura.websetup.schema import REGISTRY
 from alluratest.controller import setup_basic_test, setup_unit_test
 from forgewiki import model as WM
 
+
 class Checkmessage(M.Message):
+
     class __mongometa__:
-        name='checkmessage'
+        name = 'checkmessage'
+
     def url(self):
         return ''
+
     def __init__(self, **kw):
         super(Checkmessage, self).__init__(**kw)
         if self.slug is not None and self.full_slug is None:
-            self.full_slug = datetime.utcnow().strftime('%Y%m%d%H%M%S') + ':' + self.slug
+            self.full_slug = datetime.utcnow().strftime(
+                '%Y%m%d%H%M%S') + ':' + self.slug
 Mapper.compile_all()
 
+
 def setUp():
     setup_basic_test()
     setup_unit_test()
     setup_with_tools()
 
+
 @td.with_wiki
 def setup_with_tools():
     h.set_context('test', 'wiki', neighborhood='Projects')
@@ -68,9 +75,11 @@ def setup_with_tools():
     Checkmessage.project = c.project
     Checkmessage.app_config = c.app.config
 
+
 def tearDown():
     ThreadLocalORMSession.close_all()
 
+
 @with_setup(setUp, tearDown)
 def test_artifact():
     pg = WM.Page(title='TestPage1')
@@ -103,13 +112,14 @@ def test_artifact():
     assert 'TestPage' in pg.shorthand_id()
     assert pg.link_text() == pg.shorthand_id()
 
+
 @with_setup(setUp, tearDown)
 def test_artifactlink():
     pg = WM.Page(title='TestPage2')
     q = M.Shortlink.query.find(dict(
-            project_id=c.project._id,
-            app_config_id=c.app.config._id,
-            link=pg.shorthand_id()))
+        project_id=c.project._id,
+        app_config_id=c.app.config._id,
+        link=pg.shorthand_id()))
     assert q.count() == 0
     ThreadLocalORMSession.flush_all()
     M.MonQTask.run_ready()
@@ -130,19 +140,26 @@ def test_artifactlink():
     ThreadLocalORMSession.flush_all()
     assert q.count() == 0
 
+
 @with_setup(setUp, tearDown)
 def test_gen_messageid():
-    assert re.match(r'[0-9a-zA-Z]*.wiki@test.p.sourceforge.net', h.gen_message_id())
+    assert re.match(r'[0-9a-zA-Z]*.wiki@test.p.sourceforge.net',
+                    h.gen_message_id())
+
 
 @with_setup(setUp, tearDown)
 def test_gen_messageid_with_id_set():
     oid = ObjectId()
-    assert re.match(r'%s.wiki@test.p.sourceforge.net' % str(oid), h.gen_message_id(oid))
+    assert re.match(r'%s.wiki@test.p.sourceforge.net' %
+                    str(oid), h.gen_message_id(oid))
+
 
 @with_setup(setUp, tearDown)
 def test_artifact_messageid():
     p = WM.Page(title='T')
-    assert re.match(r'%s.wiki@test.p.sourceforge.net' % str(p._id), p.message_id())
+    assert re.match(r'%s.wiki@test.p.sourceforge.net' %
+                    str(p._id), p.message_id())
+
 
 @with_setup(setUp, tearDown)
 def test_versioning():
@@ -177,6 +194,6 @@ def test_versioning():
 def test_messages_unknown_lookup():
     from bson import ObjectId
     m = Checkmessage()
-    m.author_id = ObjectId() # something new
+    m.author_id = ObjectId()  # something new
     assert type(m.author()) == M.User, type(m.author())
     assert m.author() == M.User.anonymous()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_auth.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_auth.py b/Allura/allura/tests/model/test_auth.py
index 98542e8..5dadb07 100644
--- a/Allura/allura/tests/model/test_auth.py
+++ b/Allura/allura/tests/model/test_auth.py
@@ -41,16 +41,20 @@ def setUp():
     ThreadLocalORMSession.close_all()
     setup_global_objects()
 
+
 @with_setup(setUp)
 def test_password_encoder():
     # Verify salt
-    ep = plugin.LocalAuthenticationProvider(Request.blank('/'))._encode_password
+    ep = plugin.LocalAuthenticationProvider(
+        Request.blank('/'))._encode_password
     assert ep('test_pass') != ep('test_pass')
     assert ep('test_pass', '0000') == ep('test_pass', '0000')
 
+
 @with_setup(setUp)
 def test_email_address():
-    addr = M.EmailAddress(_id='test_admin@sf.net', claimed_by_user_id=c.user._id)
+    addr = M.EmailAddress(_id='test_admin@sf.net',
+                          claimed_by_user_id=c.user._id)
     ThreadLocalORMSession.flush_all()
     assert addr.claimed_by_user() == c.user
     addr2 = M.EmailAddress.upsert('test@sf.net')
@@ -66,13 +70,15 @@ def test_email_address():
     c.user.claim_address('test@SF.NET')
     assert 'test@sf.net' in c.user.email_addresses
 
+
 @with_setup(setUp)
 def test_openid():
     oid = M.OpenId.upsert('http://google.com/accounts/1', 'My Google OID')
     oid.claimed_by_user_id = c.user._id
     ThreadLocalORMSession.flush_all()
     assert oid.claimed_by_user() is c.user
-    assert M.OpenId.upsert('http://google.com/accounts/1', 'My Google OID') is oid
+    assert M.OpenId.upsert(
+        'http://google.com/accounts/1', 'My Google OID') is oid
     ThreadLocalORMSession.flush_all()
     assert oid is c.user.openid_object(oid._id)
     c.user.claim_openid('http://google.com/accounts/2')
@@ -80,18 +86,21 @@ def test_openid():
     assert oid2._id in c.user.open_ids
     ThreadLocalORMSession.flush_all()
 
+
 @td.with_user_project('test-admin')
 @with_setup(setUp)
 def test_user():
     assert c.user.url() .endswith('/u/test-admin/')
     assert c.user.script_name .endswith('/u/test-admin/')
-    assert_equal(set(p.shortname for p in c.user.my_projects()), set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects()),
+                 set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
     # delete one of the projects and make sure it won't appear in my_projects()
     p = M.Project.query.get(shortname='test2')
     p.deleted = True
-    assert_equal(set(p.shortname for p in c.user.my_projects()), set(['test', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects()),
+                 set(['test', 'u/test-admin', 'adobe-1', '--init--']))
     u = M.User.register(dict(
-            username='nosetest-user'))
+        username='nosetest-user'))
     ThreadLocalORMSession.flush_all()
     assert_equal(u.private_project().shortname, 'u/nosetest-user')
     roles = g.credentials.user_roles(
@@ -105,6 +114,7 @@ def test_user():
     assert provider._validate_password(u, 'foobar')
     assert not provider._validate_password(u, 'foo')
 
+
 @with_setup(setUp)
 def test_user_project_creates_on_demand():
     u = M.User.register(dict(username='foobar123'), make_project=False)
@@ -113,6 +123,7 @@ def test_user_project_creates_on_demand():
     assert u.private_project()
     assert M.Project.query.get(shortname='u/foobar123')
 
+
 @with_setup(setUp)
 def test_user_project_already_deleted_creates_on_demand():
     u = M.User.register(dict(username='foobar123'), make_project=True)
@@ -124,13 +135,16 @@ def test_user_project_already_deleted_creates_on_demand():
     ThreadLocalORMSession.flush_all()
     assert M.Project.query.get(shortname='u/foobar123', deleted=False)
 
+
 @with_setup(setUp)
 def test_user_project_does_not_create_on_demand_for_disabled_user():
-    u = M.User.register(dict(username='foobar123', disabled=True), make_project=False)
+    u = M.User.register(
+        dict(username='foobar123', disabled=True), make_project=False)
     ThreadLocalORMSession.flush_all()
     assert not u.private_project()
     assert not M.Project.query.get(shortname='u/foobar123')
 
+
 @with_setup(setUp)
 def test_user_project_does_not_create_on_demand_for_anonymous_user():
     u = M.User.anonymous()
@@ -139,6 +153,7 @@ def test_user_project_does_not_create_on_demand_for_anonymous_user():
     assert not M.Project.query.get(shortname='u/anonymous')
     assert not M.Project.query.get(shortname='u/*anonymous')
 
+
 @with_setup(setUp)
 def test_user_project_does_not_create_on_demand_for_openid_user():
     u = M.User.register({'username': ''}, make_project=False)
@@ -148,6 +163,7 @@ def test_user_project_does_not_create_on_demand_for_openid_user():
     assert not M.Project.query.get(shortname='u/anonymous')
     assert not M.Project.query.get(shortname='u/*anonymous')
 
+
 @with_setup(setUp)
 def test_project_role():
     role = M.ProjectRole(project_id=c.project._id, name='test_role')
@@ -162,12 +178,13 @@ def test_project_role():
         pr.special
         assert pr.user in (c.user, None, M.User.anonymous())
 
+
 @with_setup(setUp)
 def test_default_project_roles():
     roles = dict(
         (pr.name, pr)
         for pr in M.ProjectRole.query.find(dict(
-                project_id=c.project._id)).all()
+            project_id=c.project._id)).all()
         if pr.name)
     assert 'Admin' in roles.keys(), roles.keys()
     assert 'Developer' in roles.keys(), roles.keys()
@@ -180,6 +197,7 @@ def test_default_project_roles():
     assert len(roles) == M.ProjectRole.query.find(dict(
         project_id=c.project._id)).count() - 1
 
+
 @with_setup(setUp)
 def test_dup_api_token():
     from ming.orm import session
@@ -193,7 +211,9 @@ def test_dup_api_token():
         assert False, "Entry with duplicate unique key was inserted"
     except DuplicateKeyError:
         pass
-    assert len(M.ApiToken.query.find().all()) == 1, "Duplicate entries with unique key found"
+    assert len(M.ApiToken.query.find().all()
+               ) == 1, "Duplicate entries with unique key found"
+
 
 @with_setup(setUp)
 def test_openid_claimed_by_user():
@@ -203,9 +223,11 @@ def test_openid_claimed_by_user():
     ThreadLocalORMSession.flush_all()
     assert oid.claimed_by_user() is None
 
+
 @with_setup(setUp)
 def test_email_address_claimed_by_user():
-    addr = M.EmailAddress(_id='test_admin@sf.net', claimed_by_user_id=c.user._id)
+    addr = M.EmailAddress(_id='test_admin@sf.net',
+                          claimed_by_user_id=c.user._id)
     c.user.disabled = True
     ThreadLocalORMSession.flush_all()
     assert addr.claimed_by_user() is None
@@ -214,8 +236,10 @@ def test_email_address_claimed_by_user():
 @td.with_user_project('test-admin')
 @with_setup(setUp)
 def test_user_projects_by_role():
-    assert_equal(set(p.shortname for p in c.user.my_projects()), set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
-    assert_equal(set(p.shortname for p in c.user.my_projects('Admin')), set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects()),
+                 set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects('Admin')),
+                 set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
     # Remove admin access from c.user to test2 project
     project = M.Project.query.get(shortname='test2')
     admin_role = M.ProjectRole.by_name('Admin', project)
@@ -225,8 +249,11 @@ def test_user_projects_by_role():
     user_role.roles.append(developer_role._id)
     ThreadLocalORMSession.flush_all()
     g.credentials.clear()
-    assert_equal(set(p.shortname for p in c.user.my_projects()), set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
-    assert_equal(set(p.shortname for p in c.user.my_projects('Admin')), set(['test', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects()),
+                 set(['test', 'test2', 'u/test-admin', 'adobe-1', '--init--']))
+    assert_equal(set(p.shortname for p in c.user.my_projects('Admin')),
+                 set(['test', 'u/test-admin', 'adobe-1', '--init--']))
+
 
 @patch.object(g, 'user_message_max_messages', 3)
 def test_check_sent_user_message_times():
@@ -238,5 +265,5 @@ def test_check_sent_user_message_times():
     assert user1.can_send_user_message()
     assert_equal(len(user1.sent_user_message_times), 2)
     user1.sent_user_message_times.append(
-            datetime.utcnow() - timedelta(minutes=15))
+        datetime.utcnow() - timedelta(minutes=15))
     assert not user1.can_send_user_message()


[34/36] git commit: [#6484] ticket:492 Move wiki_from_trac script to tracwikiimporter

Posted by jo...@apache.org.
[#6484] ticket:492 Move wiki_from_trac script to tracwikiimporter


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/2050da06
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/2050da06
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/2050da06

Branch: refs/heads/cj/6484
Commit: 2050da061d28644a56c1ed3f005d46cb099349d9
Parents: 4f9f216
Author: Igor Bondarenko <je...@gmail.com>
Authored: Thu Jan 2 10:44:27 2014 +0200
Committer: Cory Johns <cj...@slashdotmedia.com>
Committed: Fri Jan 10 18:57:16 2014 +0000

----------------------------------------------------------------------
 ForgeWiki/forgewiki/scripts/__init__.py         |  16 --
 .../scripts/wiki_from_trac/__init__.py          |  18 --
 .../scripts/wiki_from_trac/extractors.py        | 244 -------------------
 .../forgewiki/scripts/wiki_from_trac/loaders.py |  74 ------
 .../scripts/wiki_from_trac/wiki_from_trac.py    |  82 -------
 scripts/allura_import.py                        |   2 +-
 scripts/wiki-export.py                          |   2 +-
 7 files changed, 2 insertions(+), 436 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/ForgeWiki/forgewiki/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/__init__.py b/ForgeWiki/forgewiki/scripts/__init__.py
deleted file mode 100644
index 144e298..0000000
--- a/ForgeWiki/forgewiki/scripts/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
deleted file mode 100644
index 625362c..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-from .wiki_from_trac import WikiFromTrac

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
deleted file mode 100644
index 7f146e6..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/extractors.py
+++ /dev/null
@@ -1,244 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-import re
-import sys
-import json
-import traceback
-from urllib import quote, unquote
-from urlparse import urljoin, urlsplit
-
-try:
-    from forgeimporters.base import ProjectExtractor
-    urlopen = ProjectExtractor.urlopen
-except ImportError:
-    try:
-        from allura.lib.helpers import urlopen
-    except ImportError:
-        from urllib2 import urlopen
-
-try:
-    # Ignore this import if the html2text package is not installed
-    import html2text
-except ImportError:
-    pass
-
-from BeautifulSoup import BeautifulSoup
-
-log = logging.getLogger(__name__)
-
-
-class WikiExporter(object):
-
-    PAGE_LIST_URL = 'wiki/TitleIndex'
-    PAGE_URL = 'wiki/%s'
-    CONTENT_DIV_ATTRS = {'class': 'wikipage searchable'}
-    EXCLUDE_PAGES = [
-        'CamelCase',
-        'InterMapTxt',
-        'InterTrac',
-        'InterWiki',
-        'PageTemplates',
-        'SandBox',
-        'TitleIndex',
-        'TracAccessibility',
-        'TracAdmin',
-        'TracBackup',
-        'TracBrowser',
-        'TracChangeset',
-        'TracEnvironment',
-        'TracFineGrainedPermissions',
-        'TracGuide',
-        'TracImport',
-        'TracIni',
-        'TracInterfaceCustomization',
-        'TracLinks',
-        'TracLogging',
-        'TracNavigation',
-        'TracNotification',
-        'TracPermissions',
-        'TracPlugins',
-        'TracQuery',
-        'TracReports',
-        'TracRevisionLog',
-        'TracRoadmap',
-        'TracRss',
-        'TracSearch',
-        'TracSupport',
-        'TracSyntaxColoring',
-        'TracTickets',
-        'TracTicketsCustomFields',
-        'TracTimeline',
-        'TracUnicode',
-        'TracWiki',
-        'TracWorkflow',
-        'WikiDeletePage',
-        'WikiFormatting',
-        'WikiHtml',
-        'WikiMacros',
-        'WikiNewPage',
-        'WikiPageNames',
-        'WikiProcessors',
-        'WikiRestructuredText',
-        'WikiRestructuredTextLinks',
-        'RecentChanges',
-    ]
-    RENAME_PAGES = {
-        'WikiStart': 'Home',  # Change the start page name to Home
-        'Home': 'WikiStart',  # Rename the Home page to WikiStart
-    }
-
-    def __init__(self, base_url, options):
-        self.base_url = base_url
-        self.options = options
-
-    def export(self, out):
-        pages = []
-        for title in self.page_list():
-            try:
-                pages.append(self.get_page(title))
-            except:
-                self.log('Cannot fetch page %s. Skipping' % title)
-                self.log(traceback.format_exc())
-                continue
-        out.write(json.dumps(pages, indent=2, sort_keys=True))
-        out.write('\n')
-
-    def log(self, msg):
-        log.info(msg)
-        if self.options.verbose:
-            print >>sys.stderr, msg
-
-    def url(self, suburl, type=None):
-        url = urljoin(self.base_url, suburl)
-        if type is None:
-            return url
-        glue = '&' if '?' in suburl else '?'
-        return url + glue + 'format=' + type
-
-    def fetch(self, url):
-        return urlopen(url)
-
-    def page_list(self):
-        url = urljoin(self.base_url, self.PAGE_LIST_URL)
-        self.log('Fetching list of pages from %s' % url)
-        r = self.fetch(url)
-        html = BeautifulSoup(r)
-        pages = html.find('div', attrs=self.CONTENT_DIV_ATTRS) \
-                    .find('ul').findAll('li')
-        pages = [page.find('a').text
-                 for page in pages
-                 if page.find('a')
-                 and page.find('a').text not in self.EXCLUDE_PAGES]
-        # Remove duplicate entries by converting page list to a set.
-        # As we're going to fetch all listed pages,
-        # it's safe to destroy the original order of pages.
-        return set(pages)
-
-    def get_page(self, title):
-        title = quote(title)
-        convert_method = '_get_page_' + self.options.converter
-        content = getattr(self, convert_method)(title)
-        page = {
-            'title': self.convert_title(title),
-            'text': self.convert_content(content),
-            'labels': '',
-        }
-        return page
-
-    def _get_page_html2text(self, title):
-        url = self.url(self.PAGE_URL % title)
-        self.log('Fetching page %s' % url)
-        r = self.fetch(url)
-        html = BeautifulSoup(r)
-        return html.find('div', attrs=self.CONTENT_DIV_ATTRS)
-
-    def _get_page_regex(self, title):
-        url = self.url(self.PAGE_URL % title, 'txt')
-        self.log('Fetching page %s' % url)
-        r = self.fetch(url)
-        return r
-
-    def convert_title(self, title):
-        title = self.RENAME_PAGES.get(title, title)
-        title = title.replace('/', '-')  # Handle subpages
-        title = title.rstrip('?')  # Links to non-existent pages ends with '?'
-        return title
-
-    def convert_content(self, content):
-        convert_method = '_convert_content_' + self.options.converter
-        return getattr(self, convert_method)(content)
-
-    def _convert_wiki_toc_to_markdown(self, content):
-        """
-        Removes contents of div.wiki-toc elements and replaces them with
-        the '[TOC]' markdown macro.
-        """
-        for toc in content('div', attrs={'class': 'wiki-toc'}):
-            toc.string = '[TOC]'
-        return content
-
-    def _convert_content_html2text(self, content):
-        html2text.BODY_WIDTH = 0  # Don't wrap lines
-        content = self._convert_wiki_toc_to_markdown(content)
-        content = html2text.html2text(unicode(content))
-        # Convert internal links
-        internal_url = urlsplit(self.base_url).path + 'wiki/'
-        internal_link_re = r'\[([^]]+)\]\(%s([^)]*)\)' % internal_url
-        internal_link = re.compile(internal_link_re, re.UNICODE)
-
-        def sub(match):
-            caption = match.group(1)
-            page = self.convert_title(match.group(2))
-            if caption == page:
-                link = '[%s]' % unquote(page)
-            else:
-                link = '[%s](%s)' % (caption, page)
-            return link
-        return internal_link.sub(sub, content)
-
-    def _convert_content_regex(self, text):
-        # https://gist.github.com/sgk/1286682
-        text = re.sub('\r\n', '\n', text)
-        text = re.sub(r'{{{(.*?)}}}', r'`\1`', text)
-
-        def indent4(m):
-            return '\n    ' + m.group(1).replace('\n', '\n    ')
-
-        text = re.sub(r'(?sm){{{\n(.*?)\n}}}', indent4, text)
-        text = re.sub(r'(?m)^====\s+(.*?)\s+====$', r'#### \1', text)
-        text = re.sub(r'(?m)^===\s+(.*?)\s+===$', r'### \1', text)
-        text = re.sub(r'(?m)^==\s+(.*?)\s+==$', r'## \1', text)
-        text = re.sub(r'(?m)^=\s+(.*?)\s+=$', r'# \1', text)
-        text = re.sub(r'^       * ', r'****', text)
-        text = re.sub(r'^     * ', r'***', text)
-        text = re.sub(r'^   * ', r'**', text)
-        text = re.sub(r'^ * ', r'*', text)
-        text = re.sub(r'^ \d+. ', r'1.', text)
-        a = []
-        for line in text.split('\n'):
-            if not line.startswith('    '):
-                line = re.sub(
-                    r'\[(https?://[^\s\[\]]+)\s([^\[\]]+)\]', r'[\2](\1)', line)
-                line = re.sub(r'\[(wiki:[^\s\[\]]+)\s([^\[\]]+)\]',
-                              r'[\2](/\1/)', line)
-                line = re.sub(r'\!(([A-Z][a-z0-9]+){2,})', r'\1', line)
-                line = re.sub(r'\'\'\'(.*?)\'\'\'', r'*\1*', line)
-                line = re.sub(r'\'\'(.*?)\'\'', r'_\1_', line)
-            a.append(line)
-        return '\n'.join(a)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
deleted file mode 100644
index 45d056c..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/loaders.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import json
-from optparse import OptionParser
-
-from allura.lib.import_api import AlluraImportApiClient
-
-
-def load_data(doc_file_name=None, optparser=None, options=None):
-    import_options = {}
-    for s in options.import_opts:
-        k, v = s.split('=', 1)
-        if v == 'false':
-            v = False
-        import_options[k] = v
-
-    user_map = {}
-    if options.user_map_file:
-        f = open(options.user_map_file)
-        try:
-            user_map = json.load(f)
-            if type(user_map) is not type({}):
-                raise ValueError
-            for k, v in user_map.iteritems():
-                print k, v
-                if not isinstance(k, basestring) or not isinstance(v, basestring):
-                    raise ValueError
-        except ValueError:
-            optparser.error(
-                '--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
-        finally:
-            f.close()
-
-    import_options['user_map'] = user_map
-
-    cli = AlluraImportApiClient(
-        options.base_url, options.api_key, options.secret_key, options.verbose)
-    doc_txt = open(doc_file_name).read()
-
-    if options.wiki:
-        import_wiki(cli, options.project, options.wiki, options, doc_txt)
-
-
-def import_wiki(cli, project, tool, options, doc_txt):
-    url = '/rest/p/' + project + '/' + tool
-    doc = json.loads(doc_txt)
-    if 'wiki' in doc and 'default' in doc['wiki'] and 'artifacts' in doc['wiki']['default']:
-        pages = doc['trackers']['default']['artifacts']
-    else:
-        pages = doc
-    if options.verbose:
-        print "Processing %d pages" % len(pages)
-    for page in pages:
-        title = page.pop('title').encode('utf-8')
-        page['text'] = page['text'].encode('utf-8')
-        page['labels'] = page['labels'].encode('utf-8')
-        r = cli.call(url + '/' + title, **page)
-        assert r == {}
-        print 'Imported wiki page %s' % title

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
deleted file mode 100644
index afc6f41..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import argparse
-import logging
-from tempfile import NamedTemporaryFile
-from tg.decorators import cached_property
-
-from forgewiki.scripts.wiki_from_trac.extractors import WikiExporter
-from forgewiki.scripts.wiki_from_trac.loaders import load_data
-
-from allura.scripts import ScriptTask
-
-
-log = logging.getLogger(__name__)
-
-
-class WikiFromTrac(ScriptTask):
-
-    """Import Trac Wiki to Allura Wiki"""
-    @classmethod
-    def parser(cls):
-        parser = argparse.ArgumentParser(description='Import wiki from'
-                                         'Trac to allura wiki')
-
-        parser.add_argument('trac_url', type=str, help='Trac URL')
-        parser.add_argument('-a', '--api-ticket',
-                            dest='api_key', help='API ticket')
-        parser.add_argument('-s', '--secret-key',
-                            dest='secret_key', help='Secret key')
-        parser.add_argument('-p', '--project', dest='project',
-                            help='Project to import to')
-        parser.add_argument('-t', '--tracker', dest='tracker',
-                            help='Tracker to import to')
-        parser.add_argument('-f', '--forum', dest='forum',
-                            help='Forum tool to import to')
-        parser.add_argument('-w', '--wiki', dest='wiki',
-                            help='Wiki tool to import to')
-        parser.add_argument('-u', '--base-url', dest='base_url',
-                            default='https://sourceforge.net', help='Base Allura (%(default)s for default)')
-        parser.add_argument('-o', dest='import_opts',
-                            default=[], action='append', help='Specify import option(s)', metavar='opt=val')
-        parser.add_argument('--user-map', dest='user_map_file',
-                            help='Map original users to SF.net users', metavar='JSON_FILE')
-        parser.add_argument('--validate', dest='validate',
-                            action='store_true', help='Validate import data')
-        parser.add_argument('-v', '--verbose', dest='verbose',
-                            action='store_true', help='Verbose operation')
-        parser.add_argument('-c', '--continue', dest='cont',
-                            action='store_true', help='Continue import into existing tracker')
-        parser.add_argument('-C', '--converter', dest='converter',
-                            default='html2text',
-                            help='Converter to use on wiki text. '
-                                 'Available options: '
-                                 'html2text (default) or regex')
-
-        return parser
-
-    @classmethod
-    def execute(cls, options):
-        with NamedTemporaryFile() as f:
-            WikiExporter(options.trac_url, options).export(f)
-            f.flush()
-            load_data(f.name, cls.parser(), options)
-
-
-if __name__ == '__main__':
-    WikiFromTrac.main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/scripts/allura_import.py
----------------------------------------------------------------------
diff --git a/scripts/allura_import.py b/scripts/allura_import.py
index 0f2f715..56bc5d5 100644
--- a/scripts/allura_import.py
+++ b/scripts/allura_import.py
@@ -20,7 +20,7 @@ from optparse import OptionParser
 
 from allura.lib.import_api import AlluraImportApiClient
 from forgetracker.scripts.import_tracker import import_tracker
-from forgewiki.scripts.wiki_from_trac.loaders import import_wiki
+from tracwikiimporter.scripts.wiki_from_trac.loaders import import_wiki
 
 
 def main():

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/2050da06/scripts/wiki-export.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-export.py b/scripts/wiki-export.py
index e096949..3c51ed2 100755
--- a/scripts/wiki-export.py
+++ b/scripts/wiki-export.py
@@ -22,7 +22,7 @@ import json
 import sys
 from optparse import OptionParser
 
-from forgewiki.scripts.wiki_from_trac.extractors import WikiExporter
+from tracwikiimporter.scripts.wiki_from_trac.extractors import WikiExporter
 
 
 def parse_options():


[13/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/model/forum.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/model/forum.py b/ForgeDiscussion/forgediscussion/model/forum.py
index c13bb57..1d74375 100644
--- a/ForgeDiscussion/forgediscussion/model/forum.py
+++ b/ForgeDiscussion/forgediscussion/model/forum.py
@@ -34,9 +34,11 @@ config = utils.ConfigProxy(
 
 log = logging.getLogger(__name__)
 
+
 class Forum(M.Discussion):
+
     class __mongometa__:
-        name='forum'
+        name = 'forum'
     type_s = 'Discussion'
 
     parent_id = FieldProperty(schema.ObjectId, if_missing=None)
@@ -74,14 +76,15 @@ class Forum(M.Discussion):
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return '%s@%s%s' % (self.shortname.replace('/', '.'), domain, config.common_suffix)
 
     @LazyProperty
     def announcements(self):
         return self.thread_class().query.find(dict(
-                app_config_id=self.app_config_id,
-                flags='Announcement')).all()
+            app_config_id=self.app_config_id,
+            flags='Announcement')).all()
 
     def breadcrumbs(self):
         if self.parent:
@@ -113,12 +116,14 @@ class Forum(M.Discussion):
             subject = data['headers'].get('Subject', subject)
         if parent_id is not None:
             parent = self.post_class().query.get(_id=parent_id)
-            if parent: return parent.thread, parent_id
+            if parent:
+                return parent.thread, parent_id
         if message_id:
             post = self.post_class().query.get(_id=message_id)
-            if post: return post.thread, None
+            if post:
+                return post.thread, None
         # Otherwise it's a new thread
-        return self.thread_class()(discussion_id=self._id,subject=subject), None
+        return self.thread_class()(discussion_id=self._id, subject=subject), None
 
     @property
     def discussion_thread(self):
@@ -138,12 +143,15 @@ class Forum(M.Discussion):
                     self.app.config.options.mount_point)))
         return super(Forum, self).get_mail_footer(notification, toaddr)
 
+
 class ForumFile(M.File):
-    forum_id=FieldProperty(schema.ObjectId)
+    forum_id = FieldProperty(schema.ObjectId)
+
 
 class ForumThread(M.Thread):
+
     class __mongometa__:
-        name='forum_thread'
+        name = 'forum_thread'
         indexes = [
             'flags',
             'discussion_id',
@@ -178,7 +186,8 @@ class ForumThread(M.Thread):
         return self
 
     def post(self, subject, text, message_id=None, parent_id=None, **kw):
-        post = super(ForumThread, self).post(text, message_id=message_id, parent_id=parent_id, **kw)
+        post = super(ForumThread, self).post(
+            text, message_id=message_id, parent_id=parent_id, **kw)
         if not self.first_post_id:
             self.first_post_id = post._id
             self.num_replies = 1
@@ -188,22 +197,25 @@ class ForumThread(M.Thread):
     def set_forum(self, new_forum):
         self.post_class().query.update(
             dict(discussion_id=self.discussion_id, thread_id=self._id),
-            {'$set':dict(discussion_id=new_forum._id)}, multi=True)
+            {'$set': dict(discussion_id=new_forum._id)}, multi=True)
         self.attachment_class().query.update(
-            {'discussion_id':self.discussion_id, 'thread_id':self._id},
-            {'$set':dict(discussion_id=new_forum._id)})
+            {'discussion_id': self.discussion_id, 'thread_id': self._id},
+            {'$set': dict(discussion_id=new_forum._id)})
         self.discussion_id = new_forum._id
 
 
 class ForumPostHistory(M.PostHistory):
+
     class __mongometa__:
-        name='post_history'
+        name = 'post_history'
 
     artifact_id = ForeignIdProperty('ForumPost')
 
+
 class ForumPost(M.Post):
+
     class __mongometa__:
-        name='forum_post'
+        name = 'forum_post'
         history_class = ForumPostHistory
         indexes = [
             'timestamp',  # for the posts_24hr site_stats query
@@ -252,32 +264,35 @@ class ForumPost(M.Post):
         # Set the thread ID on my replies and attachments
         old_slug = self.slug + '/', self.full_slug + '/'
         reply_re = re.compile(self.slug + '/.*')
-        self.slug, self.full_slug = self.make_slugs(parent=parent, timestamp=self.timestamp)
+        self.slug, self.full_slug = self.make_slugs(
+            parent=parent, timestamp=self.timestamp)
         placeholder.text = 'Discussion moved to [here](%s#post-%s)' % (
             thread.url(), self.slug)
         new_slug = self.slug + '/', self.full_slug + '/'
-        self.discussion_id=thread.discussion_id
-        self.thread_id=thread._id
-        self.parent_id=new_parent_id
+        self.discussion_id = thread.discussion_id
+        self.thread_id = thread._id
+        self.parent_id = new_parent_id
         self.text = 'Discussion moved from [here](%s#post-%s)\n\n%s' % (
             placeholder.thread.url(), placeholder.slug, self.text)
         reply_tree = self.query.find(dict(slug=reply_re)).all()
         for post in reply_tree:
             post.slug = new_slug[0] + post.slug[len(old_slug[0]):]
             post.full_slug = new_slug[1] + post.slug[len(old_slug[1]):]
-            post.discussion_id=self.discussion_id
-            post.thread_id=self.thread_id
-        for post in [ self ] + reply_tree:
+            post.discussion_id = self.discussion_id
+            post.thread_id = self.thread_id
+        for post in [self] + reply_tree:
             for att in post.attachments:
-                att.discussion_id=self.discussion_id
-                att.thread_id=self.thread_id
+                att.discussion_id = self.discussion_id
+                att.thread_id = self.thread_id
+
 
 class ForumAttachment(M.DiscussionAttachment):
-    DiscussionClass=Forum
-    ThreadClass=ForumThread
-    PostClass=ForumPost
+    DiscussionClass = Forum
+    ThreadClass = ForumThread
+    PostClass = ForumPost
+
     class __mongometa__:
-        polymorphic_identity='ForumAttachment'
-    attachment_type=FieldProperty(str, if_missing='ForumAttachment')
+        polymorphic_identity = 'ForumAttachment'
+    attachment_type = FieldProperty(str, if_missing='ForumAttachment')
 
 Mapper.compile_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tasks.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tasks.py b/ForgeDiscussion/forgediscussion/tasks.py
index 3384ca2..9566818 100644
--- a/ForgeDiscussion/forgediscussion/tasks.py
+++ b/ForgeDiscussion/forgediscussion/tasks.py
@@ -22,6 +22,7 @@ from allura.lib.decorators import task
 
 log = logging.getLogger(__name__)
 
+
 @task
 def calc_forum_stats(shortname):
     from forgediscussion import model as DM
@@ -32,6 +33,7 @@ def calc_forum_stats(shortname):
         return
     forum.update_stats()
 
+
 @task
 def calc_thread_stats(thread_id):
     from forgediscussion import model as DM

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/functional/test_forum.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/functional/test_forum.py b/ForgeDiscussion/forgediscussion/tests/functional/test_forum.py
index 7980bee..e3b5682 100644
--- a/ForgeDiscussion/forgediscussion/tests/functional/test_forum.py
+++ b/ForgeDiscussion/forgediscussion/tests/functional/test_forum.py
@@ -38,6 +38,7 @@ from forgediscussion import model as FM
 
 log = logging.getLogger(__name__)
 
+
 class TestForumEmail(TestController):
 
     def setUp(self):
@@ -50,7 +51,7 @@ class TestForumEmail(TestController):
         r.forms[1].submit()
         r = self.app.get('/admin/discussion/forums')
         assert 'testforum' in r
-        self.email_address=c.user.email_addresses[0]
+        self.email_address = c.user.email_addresses[0]
         h.set_context('test', 'discussion', neighborhood='Projects')
         self.forum = FM.Forum.query.get(shortname='testforum')
 
@@ -58,7 +59,7 @@ class TestForumEmail(TestController):
         msg = MIMEText('This is a test message')
         self._post_email(
             self.email_address,
-            [ self.forum.email_address ],
+            [self.forum.email_address],
             'Test Simple Thread',
             msg)
         r = self.app.get('/p/test/discussion/testforum/')
@@ -69,10 +70,10 @@ class TestForumEmail(TestController):
             'alternative',
             _subparts=[
                 MIMEText('This is a test message'),
-                MIMEText('This is a <em>test</em> message', 'html') ])
+                MIMEText('This is a <em>test</em> message', 'html')])
         self._post_email(
             self.email_address,
-            [ self.forum.email_address ],
+            [self.forum.email_address],
             'Test Simple Thread',
             msg)
         r = self.app.get('/p/test/discussion/testforum/')
@@ -90,16 +91,17 @@ class TestForumEmail(TestController):
                     _subparts=[
                         MIMEText('This is a test message'),
                         MIMEText('This is a <em>test</em> message', 'html')
-                        ])
-                ])
+                    ])
+            ])
         with open(pkg_resources.resource_filename(
                 'forgediscussion', 'tests/data/python-logo.png'), 'rb') as fp:
             img = MIMEImage(fp.read())
-            img.add_header('Content-Disposition', 'attachment', filename='python-logo.png')
+            img.add_header('Content-Disposition', 'attachment',
+                           filename='python-logo.png')
             msg.attach(img)
         self._post_email(
             self.email_address,
-            [ self.forum.email_address ],
+            [self.forum.email_address],
             'Test Simple Thread',
             msg)
         r = self.app.get('/p/test/discussion/testforum/')
@@ -123,6 +125,7 @@ class TestForumEmail(TestController):
             data=msg.as_string())
         M.artifact_orm_session.flush()
 
+
 class TestForumAsync(TestController):
 
     def setUp(self):
@@ -144,7 +147,8 @@ class TestForumAsync(TestController):
 
     def test_has_access(self):
         assert False == c.app.has_access(M.User.anonymous(), 'testforum')
-        assert True == c.app.has_access(M.User.query.get(username='root'), 'testforum')
+        assert True == c.app.has_access(
+            M.User.query.get(username='root'), 'testforum')
 
     def test_post(self):
         self._post('testforum', 'Test Thread', 'Nothing here')
@@ -159,15 +163,17 @@ class TestForumAsync(TestController):
         posts = FM.ForumPost.query.find()
         assert_equal(posts.count(), 1)
         assert_equal(FM.ForumThread.query.get().num_replies, 1)
-        assert_equal(FM.ForumThread.query.get().first_post_id, 'test_reply@sf.net')
+        assert_equal(FM.ForumThread.query.get()
+                     .first_post_id, 'test_reply@sf.net')
 
         post = posts.first()
         self._post('testforum', 'Test Reply', 'Nothing here, either',
-                   message_id=post.thread.url()+post._id,
-                   in_reply_to=[ 'test_reply@sf.net' ])
+                   message_id=post.thread.url() + post._id,
+                   in_reply_to=['test_reply@sf.net'])
         assert_equal(FM.ForumThread.query.find().count(), 1)
         assert_equal(FM.ForumPost.query.find().count(), 2)
-        assert_equal(FM.ForumThread.query.get().first_post_id, 'test_reply@sf.net')
+        assert_equal(FM.ForumThread.query.get()
+                     .first_post_id, 'test_reply@sf.net')
 
     def test_attach(self):
         self._post('testforum', 'Attachment Thread', 'This is a text file',
@@ -189,7 +195,8 @@ class TestForumAsync(TestController):
         self.app.get('/discussion/testforum/thread/foobar/', status=404)
 
     def test_posts(self):
-        c.user = M.User.by_username('test-admin') # not sure why this fails when set to root (to match self.user_id)
+        # not sure why this fails when set to root (to match self.user_id)
+        c.user = M.User.by_username('test-admin')
         self._post('testforum', 'Test', 'test')
         thd = FM.ForumThread.query.find().first()
         thd_url = str('/discussion/testforum/thread/%s/' % thd._id)
@@ -197,28 +204,30 @@ class TestForumAsync(TestController):
         p = FM.ForumPost.query.find().first()
         url = str('/discussion/testforum/thread/%s/%s/' % (thd._id, p.slug))
         r = self.app.get(url)
-        f = r.html.find('form',{'action': '/p/test' + url})
+        f = r.html.find('form', {'action': '/p/test' + url})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params['subject'] = 'New Subject'
         params['text'] = 'Asdf'
         r = self.app.post(url, params=params)
         assert 'Asdf' in self.app.get(url)
         r = self.app.get(url, params=dict(version='1'))
-        post_form = r.html.find('form',{'action':'/p/test' + url + 'reply'})
+        post_form = r.html.find('form', {'action': '/p/test' + url + 'reply'})
         params = dict()
         inputs = post_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[post_form.find('textarea')['name']] = 'text'
         r = self.app.post(url + 'reply', params=params)
         self._post('testforum', 'Test Reply', 'Nothing here, either',
                    message_id='test_posts@sf.net',
-                   in_reply_to=[ p._id ])
+                   in_reply_to=[p._id])
         reply = FM.ForumPost.query.get(_id='test_posts@sf.net')
         r = self.app.get(thd_url + reply.slug + '/')
         # Check attachments
@@ -228,8 +237,9 @@ class TestForumAsync(TestController):
                           upload_files=[('file_info', 'test.asdfasdtxt',
                                          'This is a textfile')])
         r = self.app.post(url + 'attach',
-                          upload_files=[('file_info', 'test1.txt','This is a textfile'),
-                                        ('file_info', 'test2.txt','This is a textfile')])
+                          upload_files=[(
+                              'file_info', 'test1.txt', 'This is a textfile'),
+                              ('file_info', 'test2.txt', 'This is a textfile')])
         r = self.app.get(url)
         assert "test1.txt" in r
         assert "test2.txt" in r
@@ -242,8 +252,8 @@ class TestForumAsync(TestController):
                           params=dict(subject='New Thread', delete='', promote='on'))
         # Find new location
         r = self.app.get(url)
-        link = [ a for a in r.html.findAll('a')
-                 if a.renderContents() == 'here' ]
+        link = [a for a in r.html.findAll('a')
+                if a.renderContents() == 'here']
         url, slug = str(link[0]['href']).split('#')
         slug = slug.split('-')[-1]
         reply_slug = slug + str(reply.slug[4:])
@@ -265,6 +275,7 @@ class TestForumAsync(TestController):
                      message_id=message_id))
         M.artifact_orm_session.flush()
 
+
 class TestForum(TestController):
 
     def setUp(self):
@@ -299,7 +310,8 @@ class TestForum(TestController):
         r = self.app.get('/admin/discussion/forums')
         r.forms[1]['add_forum.shortname'] = 'tester'
         r.forms[1]['add_forum.name'] = 'Tester'
-        r.forms[1]['add_forum.description'] = '<a href="http://cnn.com">This is CNN</a>'
+        r.forms[1][
+            'add_forum.description'] = '<a href="http://cnn.com">This is CNN</a>'
         r.forms[1].submit()
         r = self.app.get('/discussion/')
         assert_equal(len(r.html.findAll('a', rel='nofollow')), 1)
@@ -315,13 +327,13 @@ class TestForum(TestController):
 
     def test_forum_subscribe(self):
         r = self.app.post('/discussion/subscribe', params={
-                'forum-0.shortname':'testforum',
-                'forum-0.subscribed':'on',
-                })
+            'forum-0.shortname': 'testforum',
+            'forum-0.subscribed': 'on',
+        })
         r = self.app.post('/discussion/subscribe', params={
-                'forum-0.shortname':'testforum',
-                'forum-0.subscribed':'',
-                })
+            'forum-0.shortname': 'testforum',
+            'forum-0.subscribed': '',
+        })
 
     def test_forum_index(self):
         r = self.app.get('/discussion/testforum/')
@@ -338,19 +350,24 @@ class TestForum(TestController):
 
         def _post_pending():
             r = self.app.get('/discussion/create_topic/')
-            f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+            f = r.html.find(
+                'form', {'action': '/p/test/discussion/save_new_topic'})
             params = dict()
             inputs = f.findAll('input')
             for field in inputs:
                 if field.has_key('name'):
-                    params[field['name']] = field.has_key('value') and field['value'] or ''
-            params[f.find('textarea')['name']] = '1st post in Zero Posts thread'
+                    params[field['name']] = field.has_key(
+                        'value') and field['value'] or ''
+            params[f.find('textarea')['name']
+                   ] = '1st post in Zero Posts thread'
             params[f.find('select')['name']] = 'testforum'
-            params[f.find('input',{'style':'width: 90%'})['name']] = 'Test Zero Posts'
+            params[f.find('input', {'style': 'width: 90%'})
+                   ['name']] = 'Test Zero Posts'
             r = self.app.post('/discussion/save_new_topic', params=params,
                               extra_environ=dict(username='*anonymous'),
                               status=302)
-            assert r.location.startswith('http://localhost/p/test/discussion/testforum/thread/'), r.location
+            assert r.location.startswith(
+                'http://localhost/p/test/discussion/testforum/thread/'), r.location
 
         def _check():
             r = self.app.get('/discussion/')
@@ -363,9 +380,9 @@ class TestForum(TestController):
         r = self.app.get('/discussion/testforum/moderate?status=pending')
         post_id = r.html.find('input', {'name': 'post-0._id'})['value']
         r = self.app.post('/discussion/testforum/moderate/save_moderation', params={
-                'post-0._id': post_id,
-                'post-0.checked': 'on',
-                'spam': 'Spam Marked'})
+            'post-0._id': post_id,
+            'post-0.checked': 'on',
+            'spam': 'Spam Marked'})
         _check()
 
         # test posts deleted
@@ -374,22 +391,25 @@ class TestForum(TestController):
         post_id = r.html.find('input', {'name': 'post-0._id'})['value']
         r = self.app.post('/discussion/testforum/moderate/save_'
                           'moderation', params={
-                'post-0._id': post_id,
-                'post-0.checked': 'on',
-                'delete': 'Delete Marked'})
+                              'post-0._id': post_id,
+                              'post-0.checked': 'on',
+                              'delete': 'Delete Marked'})
         _check()
 
     def test_posting(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'This is a *test thread*'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'Test Thread'
+        params[f.find('input', {'style': 'width: 90%'})
+               ['name']] = 'Test Thread'
         r = self.app.post('/discussion/save_new_topic', params=params)
         r = self.app.get('/admin/discussion/forums')
         assert 'Message posted' in r
@@ -404,17 +424,21 @@ class TestForum(TestController):
 
     def test_notifications_escaping(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form', {'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'Post text'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input', {'style':'width: 90%'})['name']] = "this is <h2> o'clock"
+        params[f.find('input', {'style': 'width: 90%'})
+               ['name']] = "this is <h2> o'clock"
         r = self.app.post('/discussion/save_new_topic', params=params)
-        n = M.Notification.query.find(dict(subject="[test:discussion] this is <h2> o'clock")).first()
+        n = M.Notification.query.find(
+            dict(subject="[test:discussion] this is <h2> o'clock")).first()
         assert_in('---\n\n[this is &lt;h2&gt; o&#39;clock]', n.text)
 
     def _set_anon_allowed(self):
@@ -424,32 +448,36 @@ class TestForum(TestController):
         opt_auth = select.find(text='*authenticated').parent
         opt_admin = select.find(text='Admin').parent
         r = self.app.post('/admin/discussion/update', params={
-                'card-0.value': opt_admin['value'],
-                'card-0.id': 'admin',
-                'card-4.id': 'read',
-                'card-4.value': opt_anon['value'],
-                'card-3.value': opt_auth['value'],
-                'card-3.new': opt_anon['value'],
-                'card-3.id': 'post'})
+            'card-0.value': opt_admin['value'],
+            'card-0.id': 'admin',
+            'card-4.id': 'read',
+            'card-4.value': opt_anon['value'],
+            'card-3.value': opt_auth['value'],
+            'card-3.new': opt_anon['value'],
+            'card-3.id': 'post'})
 
     @mock.patch('allura.model.discuss.g.spam_checker')
     def test_anonymous_post(self, spam_checker):
         spam_checker.check.return_value = True
         self._set_anon_allowed()
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'Post content'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'Test Thread'
+        params[f.find('input', {'style': 'width: 90%'})
+               ['name']] = 'Test Thread'
         thread = self.app.post('/discussion/save_new_topic', params=params,
                                extra_environ=dict(username='*anonymous')).follow()
 
-        r = self.app.get(thread.request.url, extra_environ=dict(username='*anonymous'))
+        r = self.app.get(thread.request.url,
+                         extra_environ=dict(username='*anonymous'))
         assert 'Post awaiting moderation' in r
         assert 'name="delete"' not in r
         assert 'name="approve"' not in r
@@ -458,98 +486,121 @@ class TestForum(TestController):
         r = self.app.get(thread.request.url)
         assert '<div class="display_post moderate">' in r
         assert '<a href="" class="reply_post btn" style="display:none">' in r
-        assert r.html.find('a',{'class': 'little_link shortlink', 'style': 'display:none'}) is not None
+        assert r.html.find(
+            'a', {'class': 'little_link shortlink', 'style': 'display:none'}) is not None
         assert 'name="delete"' in r
         assert 'name="approve"' in r
         assert 'name="spam"' in r
         assert 'Post content' in r
         r = self.app.get('/discussion/testforum/moderate/')
         post = FM.ForumPost.query.get(text='Post content')
-        link = '<a href="%s">[%s]</a>' % (post.thread.url() + '?limit=25#' + post.slug, post.shorthand_id())
+        link = '<a href="%s">[%s]</a>' % (post.thread.url()
+                                          + '?limit=25#' + post.slug, post.shorthand_id())
         assert link in r, link
 
     def test_thread(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'AAA'
-        thread = self.app.post('/discussion/save_new_topic', params=params).follow()
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'AAA'
+        thread = self.app.post(
+            '/discussion/save_new_topic', params=params).follow()
         url = thread.request.url
 
         # test reply to post
-        f = thread.html.find('div',{'class':'row reply_post_form'}).find('form')
+        f = thread.html.find(
+            'div', {'class': 'row reply_post_form'}).find('form')
         rep_url = f.get('action')
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'bbb'
         thread = self.app.post(str(rep_url), params=params)
         thread = self.app.get(url)
         # beautiful soup is getting some unicode error here - test without it
-        assert thread.html.findAll('div',{'class':'display_post'})[0].find('p').string == 'aaa'
-        assert thread.html.findAll('div',{'class':'display_post'})[1].find('p').string == 'bbb'
-        assert thread.response.body.count('<div class="promote_to_thread_form') == 1
-        assert thread.response.body.count('<div class="row reply_post_form') == 2
+        assert thread.html.findAll(
+            'div', {'class': 'display_post'})[0].find('p').string == 'aaa'
+        assert thread.html.findAll(
+            'div', {'class': 'display_post'})[1].find('p').string == 'bbb'
+        assert thread.response.body.count(
+            '<div class="promote_to_thread_form') == 1
+        assert thread.response.body.count(
+            '<div class="row reply_post_form') == 2
         assert thread.response.body.count('<div class="edit_post_form') == 2
 
         # test edit post
         thread_url = thread.request.url
         r = thread
-        reply_form = r.html.find('div',{'class':'edit_post_form reply'}).find('form')
+        reply_form = r.html.find(
+            'div', {'class': 'edit_post_form reply'}).find('form')
         post_link = str(reply_form['action'])
         params = dict()
         inputs = reply_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[reply_form.find('textarea')['name']] = 'zzz'
         self.app.post(post_link, params)
         r = self.app.get(thread_url)
-        assert 'zzz' in str(r.html.find('div',{'class':'display_post'}))
-        assert 'Last edit: Test Admin less than 1 minute ago' in str(r.html.find('div',{'class':'display_post'}))
+        assert 'zzz' in str(r.html.find('div', {'class': 'display_post'}))
+        assert 'Last edit: Test Admin less than 1 minute ago' in str(
+            r.html.find('div', {'class': 'display_post'}))
 
     def test_subscription_controls(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'Post text'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'Post subject'
-        thread = self.app.post('/discussion/save_new_topic', params=params).follow()
-        assert M.Notification.query.find(dict(subject='[test:discussion] Post subject')).count() == 1
+        params[f.find('input', {'style': 'width: 90%'})
+               ['name']] = 'Post subject'
+        thread = self.app.post(
+            '/discussion/save_new_topic', params=params).follow()
+        assert M.Notification.query.find(
+            dict(subject='[test:discussion] Post subject')).count() == 1
         r = self.app.get('/discussion/testforum/')
-        f = r.html.find('form',{'class':'follow_form'})
+        f = r.html.find('form', {'class': 'follow_form'})
         subscribe_url = f.get('action')
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name') and 'subscription' not in field['name']:
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         self.app.post(str(subscribe_url), params=params)
         self.app.get('/discussion/general/subscribe_to_forum?subscribe=True')
         url = thread.request.url
-        f = thread.html.find('div',{'class':'row reply_post_form'}).find('form')
+        f = thread.html.find(
+            'div', {'class': 'row reply_post_form'}).find('form')
         rep_url = f.get('action')
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'Reply 2'
         thread_reply = self.app.post(str(rep_url), params=params)
-        assert M.Notification.query.find(dict(subject='[test:discussion] Re: Post subject')).count() == 1
+        assert M.Notification.query.find(
+            dict(subject='[test:discussion] Re: Post subject')).count() == 1
 
     def get_table_rows(self, response, closest_id):
         tbody = response.html.find('div', {'id': closest_id}).find('tbody')
@@ -565,22 +616,24 @@ class TestForum(TestController):
 
     def test_thread_announcement(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'AAAA'
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'AAAA'
         r = self.app.post('/discussion/save_new_topic', params=params).follow()
         url = r.request.url
         thread_id = url.rstrip('/').rsplit('/', 1)[-1]
         thread = FM.ForumThread.query.get(_id=thread_id)
         r = self.app.post(url + 'moderate', params=dict(
-                flags='Announcement',
-                discussion='testforum'))
+            flags='Announcement',
+            discussion='testforum'))
         thread2 = FM.ForumThread.query.get(_id=thread_id)
         assert_equal(thread2.flags, ['Announcement'])
 
@@ -595,29 +648,33 @@ class TestForum(TestController):
 
     def test_thread_sticky(self):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'topic1'
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'topic1'
         r = self.app.post('/discussion/save_new_topic', params=params).follow()
         url1 = r.request.url
         tid1 = url1.rstrip('/').rsplit('/', 1)[-1]
 
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'topic2'
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'topic2'
         r = self.app.post('/discussion/save_new_topic', params=params).follow()
         url2 = r.request.url
         tid2 = url2.rstrip('/').rsplit('/', 1)[-1]
@@ -631,8 +688,8 @@ class TestForum(TestController):
 
         # Make oldest thread Sticky
         r = self.app.post(url1 + 'moderate', params=dict(
-                flags='Sticky',
-                discussion='testforum'))
+            flags='Sticky',
+            discussion='testforum'))
         thread1 = FM.ForumThread.query.get(_id=tid1)
         assert_equal(thread1.flags, ['Sticky'])
 
@@ -645,8 +702,8 @@ class TestForum(TestController):
 
         # Reset Sticky flag
         r = self.app.post(url1 + 'moderate', params=dict(
-                flags='',
-                discussion='testforum'))
+            flags='',
+            discussion='testforum'))
         thread1 = FM.ForumThread.query.get(_id=tid1)
         assert_equal(thread1.flags, [])
 
@@ -662,81 +719,94 @@ class TestForum(TestController):
     def test_move_thread(self):
         # make the topic
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'topic1'
-        thread = self.app.post('/discussion/save_new_topic', params=params).follow()
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'topic1'
+        thread = self.app.post(
+            '/discussion/save_new_topic', params=params).follow()
         url = thread.request.url
         # make a reply
-        f = thread.html.find('div',{'class':'row reply_post_form'}).find('form')
+        f = thread.html.find(
+            'div', {'class': 'row reply_post_form'}).find('form')
         rep_url = f.get('action')
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'bbb'
         thread = self.app.post(str(rep_url), params=params)
         thread = self.app.get(url)
         # make sure the posts are in the original thread
-        posts = thread.html.find('div',{'id':'comment'}).findAll('div',{'class':'discussion-post'})
+        posts = thread.html.find('div', {'id': 'comment'}).findAll(
+            'div', {'class': 'discussion-post'})
         assert_equal(len(posts), 2)
         # move the thread
         r = self.app.post(url + 'moderate', params=dict(
-                flags='',
-                discussion='general')).follow()
+            flags='',
+            discussion='general')).follow()
         # make sure all the posts got moved
-        posts = r.html.find('div',{'id':'comment'}).findAll('div',{'class':'discussion-post'})
+        posts = r.html.find('div', {'id': 'comment'}).findAll(
+            'div', {'class': 'discussion-post'})
         assert_equal(len(posts), 2)
 
     def test_sidebar_menu(self):
         r = self.app.get('/discussion/')
-        sidebarmenu = str(r.html.find('div',{'id':'sidebar'}))
+        sidebarmenu = str(r.html.find('div', {'id': 'sidebar'}))
         assert '<a href="/p/test/discussion/create_topic/"><b data-icon="+" class="ico ico-plus"></b> <span>Create Topic</span></a>' in sidebarmenu
         assert '<a href="/p/test/discussion/new_forum"><b data-icon="q" class="ico ico-conversation"></b> <span>Add Forum</span></a>' in sidebarmenu
         assert '<h3 class="">Help</h3>' in sidebarmenu
         assert '<a href="/p/test/discussion/markdown_syntax"><span>Formatting Help</span></a>' in sidebarmenu
         assert '<a href="flag_as_spam" class="sidebar_thread_spam"><b data-icon="^" class="ico ico-flag"></b> <span>Mark as Spam</span></a>' not in sidebarmenu
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'AAA'
-        thread = self.app.post('/discussion/save_new_topic', params=params).follow()
-        thread_sidebarmenu = str(thread.html.find('div',{'id':'sidebar'}))
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'AAA'
+        thread = self.app.post(
+            '/discussion/save_new_topic', params=params).follow()
+        thread_sidebarmenu = str(thread.html.find('div', {'id': 'sidebar'}))
         assert '<a href="flag_as_spam" class="sidebar_thread_spam"><b data-icon="^" class="ico ico-flag"></b> <span>Mark as Spam</span></a>' in thread_sidebarmenu
 
     def test_sidebar_menu_anon(self):
         r = self.app.get('/discussion/')
-        sidebarmenu = str(r.html.find('div',{'id':'sidebar'}))
+        sidebarmenu = str(r.html.find('div', {'id': 'sidebar'}))
         assert '<a href="/p/test/discussion/create_topic/"><b data-icon="+" class="ico ico-plus"></b> <span>Create Topic</span></a>' in sidebarmenu
         assert '<a href="/p/test/discussion/new_forum"><b data-icon="q" class="ico ico-conversation"></b> <span>Add Forum</span></a>' in sidebarmenu
         assert '<h3 class="">Help</h3>' in sidebarmenu
         assert '<a href="/p/test/discussion/markdown_syntax"><span>Formatting Help</span></a>' in sidebarmenu
         assert '<a href="flag_as_spam" class="sidebar_thread_spam"><b data-icon="^" class="ico ico-flag"></b> <span>Mark as Spam</span></a>' not in sidebarmenu
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'aaa'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'AAA'
-        thread = self.app.post('/discussion/save_new_topic', params=params).follow(extra_environ=dict(username='*anonymous'))
-        thread_sidebarmenu = str(thread.html.find('div',{'id':'sidebar'}))
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'AAA'
+        thread = self.app.post('/discussion/save_new_topic',
+                               params=params).follow(extra_environ=dict(username='*anonymous'))
+        thread_sidebarmenu = str(thread.html.find('div', {'id': 'sidebar'}))
         assert '<a href="flag_as_spam" class="sidebar_thread_spam"><b data-icon="^" class="ico ico-flag"></b> <span>Mark as Spam</span></a>' not in thread_sidebarmenu
 
     def test_feed(self):
@@ -760,7 +830,8 @@ class TestForum(TestController):
         r.forms[1].submit()
         r = self.app.get('/admin/discussion/forums')
         assert u'téstforum'.encode('utf-8') in r
-        r = self.app.get(u'/p/test/discussion/create_topic/téstforum/'.encode('utf-8'))
+        r = self.app.get(
+            u'/p/test/discussion/create_topic/téstforum/'.encode('utf-8'))
         assert u'<option value="téstforum" selected>Tést Forum</option>' in r
 
 
@@ -769,7 +840,8 @@ class TestForumStats(TestController):
     def test_stats(self):
         self.app.get('/discussion/stats', status=200)
 
-    @mock.patch('ming.session.Session.aggregate')  # mim doesn't support aggregate
+    # mim doesn't support aggregate
+    @mock.patch('ming.session.Session.aggregate')
     def test_stats_data(self, aggregate):
         # partial data, some days are implicit 0
         aggregate.return_value = {'result': [
@@ -778,21 +850,22 @@ class TestForumStats(TestController):
                 "month": 1,
                 "day": 2},
              "posts": 3
-            },
+             },
             {"_id": {
                 "year": 2013,
                 "month": 1,
                 "day": 3},
              "posts": 5
-            },
+             },
             {"_id": {
                 "year": 2013,
                 "month": 1,
                 "day": 5},
              "posts": 2
-            },
+             },
         ]}
-        r = self.app.get('/discussion/stats_data?begin=2013-01-01&end=2013-01-06')
+        r = self.app.get(
+            '/discussion/stats_data?begin=2013-01-01&end=2013-01-06')
         assert_equal(r.json, {
             'begin': '2013-01-01 00:00:00',
             'end': '2013-01-06 00:00:00',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/functional/test_forum_admin.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/functional/test_forum_admin.py b/ForgeDiscussion/forgediscussion/tests/functional/test_forum_admin.py
index 8a92969..2f28cbc 100644
--- a/ForgeDiscussion/forgediscussion/tests/functional/test_forum_admin.py
+++ b/ForgeDiscussion/forgediscussion/tests/functional/test_forum_admin.py
@@ -45,12 +45,12 @@ class TestForumAdmin(TestController):
         h.set_context('test', 'Forum', neighborhood='Projects')
         frm = FM.Forum.query.get(shortname='testforum')
         r = self.app.post('/admin/discussion/update_forums',
-                          params={'forum-0.delete':'',
-                                  'forum-0.id':str(frm._id),
-                                  'forum-0.name':'New Test Forum',
-                                  'forum-0.shortname':'NewTestForum',
-                                  'forum-0.description':'My desc',
-                                  'forum-0.monitoring_email':''})
+                          params={'forum-0.delete': '',
+                                  'forum-0.id': str(frm._id),
+                                  'forum-0.name': 'New Test Forum',
+                                  'forum-0.shortname': 'NewTestForum',
+                                  'forum-0.description': 'My desc',
+                                  'forum-0.monitoring_email': ''})
         r = self.app.get('/admin/discussion/forums')
         assert 'New Test Forum' in r
         assert 'My desc' in r
@@ -101,11 +101,11 @@ class TestForumAdmin(TestController):
         h.set_context('test', 'Forum', neighborhood='Projects')
         forum_a = FM.Forum.query.get(shortname='a')
         self.app.post('/admin/discussion/update_forums',
-                        params={'forum-0.delete':'on',
-                                'forum-0.id':str(forum_a._id),
-                                'forum-0.name':'Forum A',
-                                'forum-0.description':''
-                               })
+                      params={'forum-0.delete': 'on',
+                              'forum-0.id': str(forum_a._id),
+                              'forum-0.name': 'Forum A',
+                              'forum-0.description': ''
+                              })
         # Now we have two forums: 'a', and 'b'.  'a' is deleted.
         # Let's try to create new forums with these names.
         r = self.app.get('/admin/discussion/forums')
@@ -121,7 +121,8 @@ class TestForumAdmin(TestController):
 
     def test_forum_icon(self):
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('add_forum.icon', file_name, file_data)
 
@@ -129,16 +130,16 @@ class TestForumAdmin(TestController):
         r = self.app.get('/admin/discussion/forums')
         app_id = r.forms[1]['add_forum.app_id'].value
         r = self.app.post('/admin/discussion/add_forum',
-                          params={'add_forum.shortname':'testforum',
-                                  'add_forum.app_id':app_id,
-                                  'add_forum.name':'Test Forum',
-                                  'add_forum.description':'',
-                                  'add_forum.parent':'',
+                          params={'add_forum.shortname': 'testforum',
+                                  'add_forum.app_id': app_id,
+                                  'add_forum.name': 'Test Forum',
+                                  'add_forum.description': '',
+                                  'add_forum.parent': '',
                                   },
                           upload_files=[upload]),
         r = self.app.get('/discussion/testforum/icon')
         image = PIL.Image.open(StringIO(r.body))
-        assert image.size == (48,48)
+        assert image.size == (48, 48)
 
     def test_delete_undelete(self):
         r = self.app.get('/admin/discussion/forums')
@@ -147,24 +148,24 @@ class TestForumAdmin(TestController):
         r.forms[1]['add_forum.name'] = 'Test Forum'
         r = r.forms[1].submit()
         r = self.app.get('/admin/discussion/forums')
-        assert len(r.html.findAll('input',{'value':'Delete'})) == 2
+        assert len(r.html.findAll('input', {'value': 'Delete'})) == 2
         h.set_context('test', 'Forum', neighborhood='Projects')
         frm = FM.Forum.query.get(shortname='testforum')
 
         r = self.app.post('/admin/discussion/update_forums',
-                          params={'forum-0.delete':'on',
-                                  'forum-0.id':str(frm._id),
-                                  'forum-0.name':'New Test Forum',
-                                  'forum-0.description':'My desc'})
+                          params={'forum-0.delete': 'on',
+                                  'forum-0.id': str(frm._id),
+                                  'forum-0.name': 'New Test Forum',
+                                  'forum-0.description': 'My desc'})
         r = self.app.get('/admin/discussion/forums')
-        assert len(r.html.findAll('input',{'value':'Delete'})) == 1
+        assert len(r.html.findAll('input', {'value': 'Delete'})) == 1
         r = self.app.post('/admin/discussion/update_forums',
-                          params={'forum-0.undelete':'on',
-                                  'forum-0.id':str(frm._id),
-                                  'forum-0.name':'New Test Forum',
-                                  'forum-0.description':'My desc'})
+                          params={'forum-0.undelete': 'on',
+                                  'forum-0.id': str(frm._id),
+                                  'forum-0.name': 'New Test Forum',
+                                  'forum-0.description': 'My desc'})
         r = self.app.get('/admin/discussion/forums')
-        assert len(r.html.findAll('input',{'value':'Delete'})) == 2
+        assert len(r.html.findAll('input', {'value': 'Delete'})) == 2
 
     def test_members_only(self):
         # make a forum anyone can see
@@ -174,47 +175,56 @@ class TestForumAdmin(TestController):
         r.forms[1].submit()
         # forum can be viewed by member and non-member
         self.app.get('/discussion/secret')
-        self.app.get('/discussion/secret',extra_environ=dict(username='test-user'))
-        # make a post in the forum and confirm it is also viewable by member and non-member
+        self.app.get('/discussion/secret',
+                     extra_environ=dict(username='test-user'))
+        # make a post in the forum and confirm it is also viewable by member
+        # and non-member
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'secret text'
         params[f.find('select')['name']] = 'secret'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'secret topic'
+        params[f.find('input', {'style': 'width: 90%'})
+               ['name']] = 'secret topic'
         r = self.app.post('/discussion/save_new_topic', params=params).follow()
-        thread_url= r.request.url
+        thread_url = r.request.url
         self.app.get(thread_url)
-        self.app.get(thread_url,extra_environ=dict(username='test-user'))
+        self.app.get(thread_url, extra_environ=dict(username='test-user'))
         # link shows up in app for member and non-member
         r = self.app.get('/discussion/')
         assert '/secret/' in r
-        r = self.app.get('/discussion/',extra_environ=dict(username='test-user'))
+        r = self.app.get('/discussion/',
+                         extra_environ=dict(username='test-user'))
         assert '/secret/' in r
         # make the forum member only viewable
         secret = FM.Forum.query.get(shortname='secret')
         self.app.post('/admin/discussion/update_forums',
-                        params={'forum-0.members_only':'on',
-                                'forum-0.id':str(secret._id),
-                                'forum-0.name':'Secret',
-                                'forum-0.shortname':'secret',
-                                'forum-0.description':'',
-                                'forum-0.monitoring_email':''
-                               })
+                      params={'forum-0.members_only': 'on',
+                              'forum-0.id': str(secret._id),
+                              'forum-0.name': 'Secret',
+                              'forum-0.shortname': 'secret',
+                              'forum-0.description': '',
+                              'forum-0.monitoring_email': ''
+                              })
         # member can see the forum, but non-member gets 403
         self.app.get('/discussion/secret')
-        self.app.get('/discussion/secret',extra_environ=dict(username='test-user'), status=403)
+        self.app.get('/discussion/secret',
+                     extra_environ=dict(username='test-user'), status=403)
         # member can see a thread in the forum, but non-member gets 403
         self.app.get(thread_url)
-        self.app.get(thread_url,extra_environ=dict(username='test-user'), status=403)
+        self.app.get(thread_url,
+                     extra_environ=dict(username='test-user'), status=403)
         # link shows up in app for member but not non-member
         r = self.app.get('/discussion/')
         assert '/secret/' in r
-        r = self.app.get('/discussion/',extra_environ=dict(username='test-user'))
+        r = self.app.get('/discussion/',
+                         extra_environ=dict(username='test-user'))
         assert '/secret/' not in r
 
     def test_anon_posts(self):
@@ -225,38 +235,43 @@ class TestForumAdmin(TestController):
         r.forms[1].submit()
         # try to post in the forum and get a 403
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'post text'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'post topic'
-        r = self.app.post('/discussion/save_new_topic', params=params, extra_environ=dict(username='*anonymous'))
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'post topic'
+        r = self.app.post('/discussion/save_new_topic',
+                          params=params, extra_environ=dict(username='*anonymous'))
         assert r.location == 'http://localhost/auth/'
         # allow anon posts in the forum
         testforum = FM.Forum.query.get(shortname='testforum')
         self.app.post('/admin/discussion/update_forums',
-                        params={'forum-0.anon_posts':'on',
-                                'forum-0.id':str(testforum._id),
-                                'forum-0.name':'Test Forum',
-                                'forum-0.shortname':'testforum',
-                                'forum-0.description':'',
-                                'forum-0.monitoring_email':''
-                               })
+                      params={'forum-0.anon_posts': 'on',
+                              'forum-0.id': str(testforum._id),
+                              'forum-0.name': 'Test Forum',
+                              'forum-0.shortname': 'testforum',
+                              'forum-0.description': '',
+                              'forum-0.monitoring_email': ''
+                              })
         # successfully post to the forum
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'post text'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'post topic'
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'post topic'
         r = self.app.post('/discussion/save_new_topic', params=params)
         assert 'http://localhost/p/test/discussion/testforum/thread/' in r.location
 
@@ -267,26 +282,31 @@ class TestForumAdmin(TestController):
         r.forms[1].submit()
         testforum = FM.Forum.query.get(shortname='testforum')
         self.app.post('/admin/discussion/update_forums',
-                        params={'forum-0.anon_posts':'on',
-                                'forum-0.id':str(testforum._id),
-                                'forum-0.name':'Test Forum',
-                                'forum-0.shortname':'testforum',
-                                'forum-0.description':'',
-                                'forum-0.monitoring_email':'email@monitoring.com'
-                               })
+                      params={'forum-0.anon_posts': 'on',
+                              'forum-0.id': str(testforum._id),
+                              'forum-0.name': 'Test Forum',
+                              'forum-0.shortname': 'testforum',
+                              'forum-0.description': '',
+                              'forum-0.monitoring_email': 'email@monitoring.com'
+                              })
 
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form',{'action':'/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'post text'
         params[f.find('select')['name']] = 'testforum'
-        params[f.find('input',{'style':'width: 90%'})['name']] = 'post topic'
+        params[f.find('input', {'style': 'width: 90%'})['name']] = 'post topic'
         r = self.app.post('/discussion/save_new_topic', params=params)
         M.MonQTask.run_ready()
-        email_tasks = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
-        assert 'Sent from sourceforge.net because email@monitoring.com is subscribed to http://localhost/p/test/discussion/testforum/' in email_tasks[0].kwargs['text'],email_tasks[0].kwargs['text']
-        assert 'a project admin can change settings at http://localhost/p/test/admin/discussion/forums' in email_tasks[0].kwargs['text']
+        email_tasks = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
+        assert 'Sent from sourceforge.net because email@monitoring.com is subscribed to http://localhost/p/test/discussion/testforum/' in email_tasks[
+            0].kwargs['text'], email_tasks[0].kwargs['text']
+        assert 'a project admin can change settings at http://localhost/p/test/admin/discussion/forums' in email_tasks[
+            0].kwargs['text']

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/functional/test_import.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/functional/test_import.py b/ForgeDiscussion/forgediscussion/tests/functional/test_import.py
index 76d7896..b641b2d 100644
--- a/ForgeDiscussion/forgediscussion/tests/functional/test_import.py
+++ b/ForgeDiscussion/forgediscussion/tests/functional/test_import.py
@@ -27,7 +27,7 @@ from allura import model as M
 from alluratest.controller import TestController, TestRestApiBase
 
 
-class TestImportController(TestRestApiBase):#TestController):
+class TestImportController(TestRestApiBase):  # TestController):
 
     def setUp(self):
         super(TestImportController, self).setUp()
@@ -38,27 +38,28 @@ class TestImportController(TestRestApiBase):#TestController):
     def test_no_capability(self):
         self.set_api_ticket({'import2': ['Projects', 'test']})
         resp = self.api_post('/rest/p/test/discussion/perform_import',
-            doc=self.json_text)
+                             doc=self.json_text)
         assert resp.status_int == 403
 
         self.set_api_ticket({'import': ['Projects', 'test2']})
         resp = self.api_post('/rest/p/test/discussion/perform_import',
-            doc=self.json_text)
+                             doc=self.json_text)
         assert resp.status_int == 403
 
         self.set_api_ticket({'import': ['Projects', 'test']})
         resp = self.api_post('/rest/p/test/discussion/perform_import',
-            doc=self.json_text)
+                             doc=self.json_text)
         assert resp.status_int == 200
 
     def test_validate_import(self):
         r = self.api_post('/rest/p/test/discussion/validate_import',
-            doc=self.json_text)
+                          doc=self.json_text)
         assert not r.json['errors']
 
     def test_import_anon(self):
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -69,15 +70,17 @@ class TestImportController(TestRestApiBase):#TestController):
         assert 'Open Discussion' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
         for link in r.html.findAll('a'):
-            if 'Welcome to Open Discussion' in str(link): break
+            if 'Welcome to Open Discussion' in str(link):
+                break
         r = self.app.get(link.get('href'))
         assert '2009-11-19' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
         assert 'Anonymous' in str(r)
 
     def test_import_map(self):
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -89,7 +92,8 @@ class TestImportController(TestRestApiBase):#TestController):
         assert 'Open Discussion' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
         for link in r.html.findAll('a'):
-            if 'Welcome to Open Discussion' in str(link): break
+            if 'Welcome to Open Discussion' in str(link):
+                break
         r = self.app.get(link.get('href'))
         assert '2009-11-19' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
@@ -97,8 +101,9 @@ class TestImportController(TestRestApiBase):#TestController):
         assert 'Anonymous' not in str(r)
 
     def test_import_create(self):
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -109,7 +114,8 @@ class TestImportController(TestRestApiBase):#TestController):
         assert 'Open Discussion' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
         for link in r.html.findAll('a'):
-            if 'Welcome to Open Discussion' in str(link): break
+            if 'Welcome to Open Discussion' in str(link):
+                break
         r = self.app.get(link.get('href'))
         assert '2009-11-19' in str(r)
         assert 'Welcome to Open Discussion' in str(r)
@@ -131,8 +137,11 @@ class TestImportController(TestRestApiBase):#TestController):
         assert_equal(from_api['description'], org['description'])
         assert_equal(from_api['summary'], org['summary'])
         assert_equal(from_api['ticket_num'], org['id'])
-        assert_equal(from_api['created_date'], self.time_normalize(org['date']))
-        assert_equal(from_api['mod_date'], self.time_normalize(org['date_updated']))
-        assert_equal(from_api['custom_fields']['_resolution'], org['resolution'])
+        assert_equal(from_api['created_date'],
+                     self.time_normalize(org['date']))
+        assert_equal(from_api['mod_date'],
+                     self.time_normalize(org['date_updated']))
+        assert_equal(from_api['custom_fields']
+                     ['_resolution'], org['resolution'])
         assert_equal(from_api['custom_fields']['_cc'], org['cc'])
         assert_equal(from_api['custom_fields']['_private'], org['private'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/functional/test_rest.py b/ForgeDiscussion/forgediscussion/tests/functional/test_rest.py
index d78a7fb..dd02429 100644
--- a/ForgeDiscussion/forgediscussion/tests/functional/test_rest.py
+++ b/ForgeDiscussion/forgediscussion/tests/functional/test_rest.py
@@ -49,12 +49,14 @@ class TestDiscussionApiBase(TestRestApiBase):
 
     def create_topic(self, forum, subject, text):
         r = self.app.get('/discussion/create_topic/')
-        f = r.html.find('form', {'action': '/p/test/discussion/save_new_topic'})
+        f = r.html.find(
+            'form', {'action': '/p/test/discussion/save_new_topic'})
         params = dict()
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = text
         params[f.find('select')['name']] = forum
         params[f.find('input', {'style': 'width: 90%'})['name']] = subject
@@ -69,23 +71,27 @@ class TestRootRestController(TestDiscussionApiBase):
         assert_equal(len(forums), 2)
         forums = sorted(forums, key=lambda x: x['name'])
         assert_equal(forums[0]['name'], 'General Discussion')
-        assert_equal(forums[0]['description'], 'Forum about anything you want to talk about.')
+        assert_equal(
+            forums[0]['description'], 'Forum about anything you want to talk about.')
         assert_equal(forums[0]['num_topics'], 2)
-        assert_equal(forums[0]['url'], 'http://localhost/rest/p/test/discussion/general/')
+        assert_equal(
+            forums[0]['url'], 'http://localhost/rest/p/test/discussion/general/')
         assert_equal(forums[0]['last_post']['subject'], 'Hi guys')
         assert_equal(forums[0]['last_post']['author'], 'test-admin')
         assert_equal(forums[0]['last_post']['text'], 'Hi boys and girls')
         assert_equal(forums[1]['name'], u'Say Héllo')
         assert_equal(forums[1]['description'], u'Say héllo here')
         assert_equal(forums[1]['num_topics'], 0)
-        assert_equal(forums[1]['url'], 'http://localhost/rest/p/test/discussion/h%C3%A9llo/')
+        assert_equal(
+            forums[1]['url'], 'http://localhost/rest/p/test/discussion/h%C3%A9llo/')
         assert_equal(forums[1]['last_post'], None)
 
     def test_forum(self):
         forum = self.api_get('/rest/p/test/discussion/general/')
         forum = forum.json['forum']
         assert_equal(forum['name'], 'General Discussion')
-        assert_equal(forum['description'], 'Forum about anything you want to talk about.')
+        assert_equal(
+            forum['description'], 'Forum about anything you want to talk about.')
         topics = forum['topics']
         assert_equal(len(topics), 2)
         assert_equal(topics[0]['subject'], 'Hi guys')
@@ -126,7 +132,8 @@ class TestRootRestController(TestDiscussionApiBase):
         forum = self.api_get('/rest/p/test/discussion/general/')
         forum = forum.json['forum']
         assert_equal(forum['name'], 'General Discussion')
-        assert_equal(forum['description'], 'Forum about anything you want to talk about.')
+        assert_equal(
+            forum['description'], 'Forum about anything you want to talk about.')
         topics = forum['topics']
         topic = self.api_get(topics[0]['url'][len('http://localhost'):])
         topic = topic.json['topic']

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/test_app.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/test_app.py b/ForgeDiscussion/forgediscussion/tests/test_app.py
index 3666821..76f2071 100644
--- a/ForgeDiscussion/forgediscussion/tests/test_app.py
+++ b/ForgeDiscussion/forgediscussion/tests/test_app.py
@@ -45,14 +45,18 @@ class TestBulkExport(TestDiscussionApiBase):
         forums = sorted(discussion['forums'], key=lambda x: x['name'])
 
         assert_equal(forums[0]['shortname'], u'general')
-        assert_equal(forums[0]['description'], u'Forum about anything you want to talk about.')
+        assert_equal(
+            forums[0]['description'], u'Forum about anything you want to talk about.')
         assert_equal(forums[0]['name'], u'General Discussion')
         forums[0]['threads'] = sorted(forums[0]['threads'],
-            key=lambda x: x['posts'][0]['subject'])
-        assert_equal(forums[0]['threads'][0]['posts'][0]['text'], u'Hi boys and girls')
-        assert_equal(forums[0]['threads'][0]['posts'][0]['subject'], u'Hi guys')
+                                      key=lambda x: x['posts'][0]['subject'])
+        assert_equal(
+            forums[0]['threads'][0]['posts'][0]['text'], u'Hi boys and girls')
+        assert_equal(
+            forums[0]['threads'][0]['posts'][0]['subject'], u'Hi guys')
         assert_equal(forums[0]['threads'][1]['posts'][0]['text'], u'1st post')
-        assert_equal(forums[0]['threads'][1]['posts'][0]['subject'], u"Let's talk")
+        assert_equal(
+            forums[0]['threads'][1]['posts'][0]['subject'], u"Let's talk")
         assert_equal(forums[1]['shortname'], u'héllo')
         assert_equal(forums[1]['description'], u'Say héllo here')
         assert_equal(forums[1]['name'], u'Say Héllo')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/tests/test_forum_roles.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/tests/test_forum_roles.py b/ForgeDiscussion/forgediscussion/tests/test_forum_roles.py
index 18b1f26..44d6989 100644
--- a/ForgeDiscussion/forgediscussion/tests/test_forum_roles.py
+++ b/ForgeDiscussion/forgediscussion/tests/test_forum_roles.py
@@ -22,15 +22,18 @@ from allura import model as M
 from allura.lib import security
 from allura.tests import decorators as td
 
+
 def setUp():
     setup_basic_test()
     setup_global_objects()
 
+
 @td.with_discussion
 def test_role_assignments():
     admin = M.User.by_username('test-admin')
     user = M.User.by_username('test-user')
     anon = M.User.anonymous()
+
     def check_access(perm):
         pred = security.has_access(c.app, perm)
         return pred(user=admin), pred(user=user), pred(user=anon)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/utils.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/utils.py b/ForgeDiscussion/forgediscussion/utils.py
index 3d88d9a..d9128d9 100644
--- a/ForgeDiscussion/forgediscussion/utils.py
+++ b/ForgeDiscussion/forgediscussion/utils.py
@@ -23,34 +23,38 @@ from allura.lib import helpers as h
 from allura.model import ProjectRole, ACE, ALL_PERMISSIONS, DENY_ALL
 from forgediscussion import model as DM
 
+
 def save_forum_icon(forum, icon):
-    if forum.icon: forum.icon.delete()
+    if forum.icon:
+        forum.icon.delete()
     DM.ForumFile.save_image(
         icon.filename, icon.file, content_type=icon.type,
         square=True, thumbnail_size=(48, 48),
         thumbnail_meta=dict(forum_id=forum._id))
 
+
 def create_forum(app, new_forum):
     if 'parent' in new_forum and new_forum['parent']:
         parent_id = ObjectId(str(new_forum['parent']))
         shortname = (DM.Forum.query.get(_id=parent_id).shortname + '/'
-                        + new_forum['shortname'])
+                     + new_forum['shortname'])
     else:
-        parent_id=None
+        parent_id = None
         shortname = new_forum['shortname']
-    description = new_forum.get('description','')
+    description = new_forum.get('description', '')
 
     f = DM.Forum(app_config_id=app.config._id,
-                    parent_id=parent_id,
-                    name=h.really_unicode(new_forum['name']),
-                    shortname=h.really_unicode(shortname),
-                    description=h.really_unicode(description),
-                    members_only=new_forum.get('members_only', False),
-                    anon_posts=new_forum.get('anon_posts', False),
-                    monitoring_email=new_forum.get('monitoring_email', None),
-                    )
+                 parent_id=parent_id,
+                 name=h.really_unicode(new_forum['name']),
+                 shortname=h.really_unicode(shortname),
+                 description=h.really_unicode(description),
+                 members_only=new_forum.get('members_only', False),
+                 anon_posts=new_forum.get('anon_posts', False),
+                 monitoring_email=new_forum.get('monitoring_email', None),
+                 )
     if f.members_only and f.anon_posts:
-        flash('You cannot have anonymous posts in a members only forum.', 'warning')
+        flash('You cannot have anonymous posts in a members only forum.',
+              'warning')
         f.anon_posts = False
     if f.members_only:
         role_developer = ProjectRole.by_name('Developer')._id

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/widgets/admin.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/widgets/admin.py b/ForgeDiscussion/forgediscussion/widgets/admin.py
index 9f1c449..bb954b1 100644
--- a/ForgeDiscussion/forgediscussion/widgets/admin.py
+++ b/ForgeDiscussion/forgediscussion/widgets/admin.py
@@ -29,10 +29,11 @@ from allura.lib.widgets import form_fields as ffw
 from allura.lib import helpers as h
 from forgediscussion import model as DM
 
+
 class OptionsAdmin(ff.AdminForm):
-    defaults=dict(
+    defaults = dict(
         ff.ForgeForm.defaults,
-        submit_text = 'Save')
+        submit_text='Save')
 
     @property
     def fields(self):
@@ -41,48 +42,57 @@ class OptionsAdmin(ff.AdminForm):
                 name='PostingPolicy',
                 label='Posting Policy',
                 options=[
-                    ew.Option(py_value='ApproveOnceModerated', label='Approve Once Moderated'),
+                    ew.Option(py_value='ApproveOnceModerated',
+                              label='Approve Once Moderated'),
                     ew.Option(py_value='ApproveAll', label='Approve All')])
         ]
         return fields
 
+
 class AddForum(ff.AdminForm):
     template = 'jinja:forgediscussion:templates/discussion_widgets/add_forum.html'
-    defaults=dict(
+    defaults = dict(
         ff.ForgeForm.defaults,
         name="add_forum",
         value=None,
         app=None,
-        submit_text = 'Save')
+        submit_text='Save')
 
     @property
     def fields(self):
         fields = [
             ew.HiddenField(name='app_id', label='App'),
-            ew.TextField(name='name', label='Name', validator=fev.UnicodeString()),
+            ew.TextField(name='name', label='Name',
+                         validator=fev.UnicodeString()),
             ew.TextField(name='shortname', label='Short Name',
                          validator=All(
-                                 fev.Regex(ur"^[^\s\/\.]*$", not_empty=True, messages={
-                                    'invalid':'Shortname cannot contain space . or /',
-                                    'empty':'You must create a short name for the forum.'}),
-                                 UniqueForumShortnameValidator())),
+                             fev.Regex(ur"^[^\s\/\.]*$", not_empty=True, messages={
+                                 'invalid': 'Shortname cannot contain space . or /',
+                                 'empty': 'You must create a short name for the forum.'}),
+                             UniqueForumShortnameValidator())),
             ew.TextField(name='parent', label='Parent Forum'),
-            ew.TextField(name='description', label='Description',validator=fev.UnicodeString()),
-            ew.TextField(name='monitoring_email', label='Monitoring Email',validator=fev.Email()),
+            ew.TextField(name='description', label='Description',
+                         validator=fev.UnicodeString()),
+            ew.TextField(name='monitoring_email',
+                         label='Monitoring Email', validator=fev.Email()),
             ffw.FileChooser(name='icon', label='Icon'),
             ew.Checkbox(name="members_only", label="Developer Only"),
             ew.Checkbox(name="anon_posts", label="Allow Anonymous Posts")
         ]
         return fields
 
+
 class AddForumShort(AddForum):
     template = 'jinja:forgediscussion:templates/discussion_widgets/add_forum_short.html'
 
+
 class UniqueForumShortnameValidator(fev.FancyValidator):
 
     def _to_python(self, value, state):
-        forums = DM.Forum.query.find(dict(app_config_id=ObjectId(state.full_dict['app_id']))).all()
+        forums = DM.Forum.query.find(
+            dict(app_config_id=ObjectId(state.full_dict['app_id']))).all()
         value = h.really_unicode(value.lower() or '')
-        if value in [ f.shortname for f in forums ]:
-            raise formencode.Invalid('A forum already exists with that short name, please choose another.', value, state)
+        if value in [f.shortname for f in forums]:
+            raise formencode.Invalid(
+                'A forum already exists with that short name, please choose another.', value, state)
         return value


[22/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/repo_refresh.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/repo_refresh.py b/Allura/allura/model/repo_refresh.py
index 2a8af51..732d6a5 100644
--- a/Allura/allura/model/repo_refresh.py
+++ b/Allura/allura/model/repo_refresh.py
@@ -39,7 +39,8 @@ from allura.model.auth import User
 
 log = logging.getLogger(__name__)
 
-QSIZE=100
+QSIZE = 100
+
 
 def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     all_commit_ids = commit_ids = list(repo.all_commit_ids())
@@ -63,8 +64,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     seen = set()
     for i, oid in enumerate(commit_ids):
         repo.refresh_commit_info(oid, seen, not all_commits)
-        if (i+1) % 100 == 0:
-            log.info('Refresh commit info %d: %s', (i+1), oid)
+        if (i + 1) % 100 == 0:
+            log.info('Refresh commit info %d: %s', (i + 1), oid)
 
     refresh_commit_repos(all_commit_ids, repo)
 
@@ -72,15 +73,17 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     for i, oid in enumerate(commit_ids):
         ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
         refresh_children(ci)
-        if (i+1) % 100 == 0:
-            log.info('Refresh child info %d for parents of %s', (i+1), ci._id)
+        if (i + 1) % 100 == 0:
+            log.info('Refresh child info %d for parents of %s',
+                     (i + 1), ci._id)
 
     if repo._refresh_precompute:
         # Refresh commit runs
         commit_run_ids = commit_ids
         # Check if the CommitRuns for the repo are in a good state by checking for
         # a CommitRunDoc that contains the last known commit. If there isn't one,
-        # the CommitRuns for this repo are in a bad state - rebuild them entirely.
+        # the CommitRuns for this repo are in a bad state - rebuild them
+        # entirely.
         if commit_run_ids != all_commit_ids:
             last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
             log.info('Last known commit id: %s', last_commit)
@@ -101,8 +104,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
         for i, oid in enumerate(commit_ids):
             ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
             cache = refresh_commit_trees(ci, cache)
-            if (i+1) % 100 == 0:
-                log.info('Refresh commit trees %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Refresh commit trees %d: %s', (i + 1), ci._id)
 
     # Compute diffs
     cache = {}
@@ -115,8 +118,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             ci = mapper(Commit).create(cid, dict(instrument=False))
             ci.set_context(repo)
             compute_diffs(repo._id, cache, ci)
-            if (i+1) % 100 == 0:
-                log.info('Compute diffs %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Compute diffs %d: %s', (i + 1), ci._id)
 
     if repo._refresh_precompute:
         model_cache = ModelCache()
@@ -126,8 +129,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             ci.set_context(repo)
             compute_lcds(ci, model_cache, lcid_cache)
             ThreadLocalORMSession.flush_all()
-            if (i+1) % 100 == 0:
-                log.info('Compute last commit info %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Compute last commit info %d: %s', (i + 1), ci._id)
 
     if not all_commits and not new_clone:
         for commit in commit_ids:
@@ -138,7 +141,7 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             if user is not None:
                 g.statsUpdater.newCommit(new, repo.app_config.project, user)
                 g.director.create_activity(user, 'committed', new,
-                        related_nodes=[repo.app_config.project])
+                                           related_nodes=[repo.app_config.project])
 
     log.info('Refresh complete for %s', repo.full_fs_path)
     g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)
@@ -147,56 +150,60 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     if notify:
         send_notifications(repo, commit_ids)
 
+
 def refresh_commit_trees(ci, cache):
     '''Refresh the list of trees included withn a commit'''
-    if ci.tree_id is None: return cache
+    if ci.tree_id is None:
+        return cache
     trees_doc = TreesDoc(dict(
-            _id=ci._id,
-            tree_ids = list(trees(ci.tree_id, cache))))
+        _id=ci._id,
+        tree_ids=list(trees(ci.tree_id, cache))))
     trees_doc.m.save(safe=False)
     new_cache = dict(
         (oid, cache[oid])
         for oid in trees_doc.tree_ids)
     return new_cache
 
+
 def refresh_commit_repos(all_commit_ids, repo):
     '''Refresh the list of repositories within which a set of commits are
     contained'''
     for oids in utils.chunked_iter(all_commit_ids, QSIZE):
         for ci in CommitDoc.m.find(dict(
-                _id={'$in':list(oids)},
+                _id={'$in': list(oids)},
                 repo_ids={'$ne': repo._id})):
             oid = ci._id
             ci.repo_ids.append(repo._id)
             index_id = 'allura.model.repo.Commit#' + oid
             ref = ArtifactReferenceDoc(dict(
-                    _id=index_id,
-                    artifact_reference=dict(
-                        cls=bson.Binary(dumps(Commit)),
-                        project_id=repo.app.config.project_id,
-                    app_config_id=repo.app.config._id,
-                        artifact_id=oid),
-                    references=[]))
-            link0 = ShortlinkDoc(dict(
-                    _id=bson.ObjectId(),
-                    ref_id=index_id,
+                _id=index_id,
+                artifact_reference=dict(
+                    cls=bson.Binary(dumps(Commit)),
                     project_id=repo.app.config.project_id,
                     app_config_id=repo.app.config._id,
-                    link=repo.shorthand_for_commit(oid)[1:-1],
-                    url=repo.url_for_commit(oid)))
+                    artifact_id=oid),
+                references=[]))
+            link0 = ShortlinkDoc(dict(
+                _id=bson.ObjectId(),
+                ref_id=index_id,
+                project_id=repo.app.config.project_id,
+                app_config_id=repo.app.config._id,
+                link=repo.shorthand_for_commit(oid)[1:-1],
+                url=repo.url_for_commit(oid)))
             # Always create a link for the full commit ID
             link1 = ShortlinkDoc(dict(
-                    _id=bson.ObjectId(),
-                    ref_id=index_id,
-                    project_id=repo.app.config.project_id,
-                    app_config_id=repo.app.config._id,
-                    link=oid,
-                    url=repo.url_for_commit(oid)))
+                _id=bson.ObjectId(),
+                ref_id=index_id,
+                project_id=repo.app.config.project_id,
+                app_config_id=repo.app.config._id,
+                link=oid,
+                url=repo.url_for_commit(oid)))
             ci.m.save(safe=False, validate=False)
             ref.m.save(safe=False, validate=False)
             link0.m.save(safe=False, validate=False)
             link1.m.save(safe=False, validate=False)
 
+
 def refresh_children(ci):
     '''Refresh the list of children of the given commit'''
     CommitDoc.m.update_partial(
@@ -204,12 +211,14 @@ def refresh_children(ci):
         {'$addToSet': dict(child_ids=ci._id)},
         multi=True)
 
+
 class CommitRunBuilder(object):
+
     '''Class used to build up linear runs of single-parent commits'''
 
     def __init__(self, commit_ids):
         self.commit_ids = commit_ids
-        self.run_index = {} # by commit ID
+        self.run_index = {}  # by commit ID
         self.runs = {}          # by run ID
         self.reasons = {}    # reasons to stop merging runs
 
@@ -217,14 +226,15 @@ class CommitRunBuilder(object):
         '''Build up the runs'''
         for oids in utils.chunked_iter(self.commit_ids, QSIZE):
             oids = list(oids)
-            for ci in CommitDoc.m.find(dict(_id={'$in':oids})):
-                if ci._id in self.run_index: continue
+            for ci in CommitDoc.m.find(dict(_id={'$in': oids})):
+                if ci._id in self.run_index:
+                    continue
                 self.run_index[ci._id] = ci._id
                 self.runs[ci._id] = CommitRunDoc(dict(
-                        _id=ci._id,
-                        parent_commit_ids=ci.parent_ids,
-                        commit_ids=[ci._id],
-                        commit_times=[ci.authored['date']]))
+                    _id=ci._id,
+                    parent_commit_ids=ci.parent_ids,
+                    commit_ids=[ci._id],
+                    commit_times=[ci.authored['date']]))
             self.merge_runs()
         log.info('%d runs', len(self.runs))
         for rid, run in sorted(self.runs.items()):
@@ -246,11 +256,12 @@ class CommitRunBuilder(object):
         runs = runs.values()
         while runs:
             run = runs.pop()
-            if run._id in seen_run_ids: continue
+            if run._id in seen_run_ids:
+                continue
             seen_run_ids.add(run._id)
             yield run
             for run in CommitRunDoc.m.find(
-                dict(commit_ids={'$in':run.parent_commit_ids})):
+                    dict(commit_ids={'$in': run.parent_commit_ids})):
                 runs.append(run)
 
     def cleanup(self):
@@ -260,9 +271,11 @@ class CommitRunBuilder(object):
             for run in self._all_runs())
         for rid, run in runs.items():
             p_cis = run['parent_commit_ids']
-            if len(p_cis) != 1: continue
+            if len(p_cis) != 1:
+                continue
             parent_run = runs.get(p_cis[0], None)
-            if parent_run is None: continue
+            if parent_run is None:
+                continue
             run['commit_ids'] += parent_run['commit_ids']
             run['commit_times'] += parent_run['commit_times']
             run['parent_commit_ids'] = parent_run['parent_commit_ids']
@@ -272,13 +285,14 @@ class CommitRunBuilder(object):
         for run1 in runs.values():
             # if run1 is a subset of another run, delete it
             if CommitRunDoc.m.find(dict(commit_ids={'$all': run1.commit_ids},
-                    _id={'$ne': run1._id})).count():
+                                        _id={'$ne': run1._id})).count():
                 log.info('... delete %r (subset of another run)', run1)
                 run1.m.delete()
                 continue
             for run2 in CommitRunDoc.m.find(dict(
                     commit_ids=run1.commit_ids[0])):
-                if run1._id == run2._id: continue
+                if run1._id == run2._id:
+                    continue
                 log.info('... delete %r (part of %r)', run2, run1)
                 run2.m.delete()
 
@@ -287,7 +301,8 @@ class CommitRunBuilder(object):
         while True:
             for run_id, run in self.runs.iteritems():
                 if len(run.parent_commit_ids) != 1:
-                    self.reasons[run_id] = '%d parents' % len(run.parent_commit_ids)
+                    self.reasons[run_id] = '%d parents' % len(
+                        run.parent_commit_ids)
                     continue
                 p_oid = run.parent_commit_ids[0]
                 p_run_id = self.run_index.get(p_oid)
@@ -299,7 +314,8 @@ class CommitRunBuilder(object):
                     self.reasons[run_id] = 'parent run not found'
                     continue
                 if p_run.commit_ids[0] != p_oid:
-                    self.reasons[run_id] = 'parent does not start with parent commit'
+                    self.reasons[
+                        run_id] = 'parent does not start with parent commit'
                     continue
                 run.commit_ids += p_run.commit_ids
                 run.commit_times += p_run.commit_times
@@ -311,44 +327,48 @@ class CommitRunBuilder(object):
                 break
             del self.runs[p_run_id]
 
+
 def trees(id, cache):
     '''Recursively generate the list of trees contained within a given tree ID'''
     yield id
     entries = cache.get(id, None)
     if entries is None:
         t = TreeDoc.m.get(_id=id)
-        entries = [ o.id for o in t.tree_ids ]
+        entries = [o.id for o in t.tree_ids]
         cache[id] = entries
     for i in entries:
         for x in trees(i, cache):
             yield x
 
+
 def unknown_commit_ids(all_commit_ids):
     '''filter out all commit ids that have already been cached'''
     result = []
     for chunk in utils.chunked_iter(all_commit_ids, QSIZE):
         chunk = list(chunk)
-        q = CommitDoc.m.find(dict(_id={'$in':chunk}))
+        q = CommitDoc.m.find(dict(_id={'$in': chunk}))
         known_commit_ids = set(ci._id for ci in q)
-        result += [ oid for oid in chunk if oid not in known_commit_ids ]
+        result += [oid for oid in chunk if oid not in known_commit_ids]
     return result
 
+
 def compute_diffs(repo_id, tree_cache, rhs_ci):
     '''compute simple differences between a commit and its first parent'''
-    if rhs_ci.tree_id is None: return tree_cache
+    if rhs_ci.tree_id is None:
+        return tree_cache
 
     def _update_cache(lhs_tree_ids, rhs_tree_ids):
         # crazy cache logic that I'm not certain I understand
         new_tree_ids = [
             tid for tid in chain(lhs_tree_ids, rhs_tree_ids)
-            if tid not in tree_cache ]
+            if tid not in tree_cache]
         tree_index = dict(
-            (t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}),validate=False))
+            (t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}), validate=False))
         tree_index.update(tree_cache)
         rhs_tree_ids_set = set(rhs_tree_ids)
         tree_cache.clear()
         tree_cache.update(
-            (id, t) for id,t in tree_index.iteritems() if id in rhs_tree_ids_set)
+            (id, t) for id, t in tree_index.iteritems() if id in rhs_tree_ids_set)
         return tree_index
 
     empty_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[])
@@ -356,20 +376,24 @@ def compute_diffs(repo_id, tree_cache, rhs_ci):
     differences = []
     rhs_treesdoc = TreesDoc.m.get(_id=rhs_ci._id)
     if not rhs_treesdoc:
-        # FIXME: These sometimes don't exist for unknown reasons; they should be auto-gen'ed
+        # FIXME: These sometimes don't exist for unknown reasons; they should
+        # be auto-gen'ed
         log.error('Missing TreesDoc: %s', rhs_ci)
         return tree_cache
     for lhs_cid in rhs_ci.parent_ids:
         lhs_ci = CommitDoc.m.get(_id=lhs_cid)
         if lhs_ci is None:
-            log.error('Commit ID referenced as parent but not found: %s parent of %s', lhs_cid, rhs_ci)
+            log.error(
+                'Commit ID referenced as parent but not found: %s parent of %s', lhs_cid, rhs_ci)
             continue
         lhs_treesdoc = TreesDoc.m.get(_id=lhs_cid)
         if not lhs_treesdoc:
-            # FIXME: These sometimes don't exist for unknown reasons; they should be auto-gen'ed
+            # FIXME: These sometimes don't exist for unknown reasons; they
+            # should be auto-gen'ed
             log.error('Missing TreesDoc: %s', rhs_ci)
             continue
-        tree_index = _update_cache(lhs_treesdoc.tree_ids, rhs_treesdoc.tree_ids)
+        tree_index = _update_cache(
+            lhs_treesdoc.tree_ids, rhs_treesdoc.tree_ids)
         rhs_tree = tree_index[rhs_ci.tree_id]
         lhs_tree = tree_index.get(lhs_ci.tree_id, empty_tree)
         for name, lhs_id, rhs_id in _diff_trees(lhs_tree, rhs_tree, tree_index):
@@ -384,11 +408,12 @@ def compute_diffs(repo_id, tree_cache, rhs_ci):
                 dict(name=name, lhs_id=lhs_id, rhs_id=rhs_id))
     # Build the diffinfo
     di = DiffInfoDoc(dict(
-            _id=rhs_ci._id,
-            differences=differences))
+        _id=rhs_ci._id,
+        differences=differences))
     di.m.save()
     return tree_cache
 
+
 def send_notifications(repo, commit_ids):
     '''Create appropriate notification and feed objects for a refresh'''
     from allura.model import Feed, Notification
@@ -398,7 +423,7 @@ def send_notifications(repo, commit_ids):
         chunk = list(oids)
         index = dict(
             (doc._id, doc)
-            for doc in Commit.query.find(dict(_id={'$in':chunk})))
+            for doc in Commit.query.find(dict(_id={'$in': chunk})))
         for oid in chunk:
             ci = index[oid]
             href = repo.url_for_commit(oid)
@@ -414,13 +439,13 @@ def send_notifications(repo, commit_ids):
                 unique_id=href)
             branches = repo.symbolics_for_commit(ci)[0]
             commit_msgs.append('%s: %s by %s %s%s' % (
-                    ",".join(b for b in branches),
-                    summary, ci.authored.name, base_url, ci.url()))
+                ",".join(b for b in branches),
+                summary, ci.authored.name, base_url, ci.url()))
     if commit_msgs:
         if len(commit_msgs) > 1:
             subject = '%d new commits to %s %s' % (
                 len(commit_msgs), repo.app.project.name, repo.app.config.options.mount_label)
-            text='\n\n'.join(commit_msgs)
+            text = '\n\n'.join(commit_msgs)
         else:
             subject = '{0} - {1}: {2}'.format(
                 repo.shorthand_for_commit(ci._id),
@@ -428,10 +453,10 @@ def send_notifications(repo, commit_ids):
                 summary)
             branches = repo.symbolics_for_commit(ci)[0]
             text_branches = ('%s: ' % ",".join(b for b in branches)
-                    if branches else '')
+                             if branches else '')
             text = "%s%s %s%s" % (text_branches,
-                               ci.message,
-                               base_url, ci.url())
+                                  ci.message,
+                                  base_url, ci.url())
 
         Notification.post(
             artifact=repo,
@@ -448,24 +473,29 @@ def _title(message):
 
 
 def _summarize(message):
-    if not message: return ''
+    if not message:
+        return ''
     summary = []
     for line in message.splitlines():
         line = line.rstrip()
-        if line: summary.append(line)
-        else: break
+        if line:
+            summary.append(line)
+        else:
+            break
     return ' '.join(summary)
 
+
 def _diff_trees(lhs, rhs, index, *path):
     def _fq(name):
         return '/'.join(reversed(
-                (name,) + path))
+            (name,) + path))
     # Diff the trees (and keep deterministic order)
     rhs_tree_ids = OrderedDict(
         (o.name, o.id)
         for o in rhs.tree_ids)
     for o in lhs.tree_ids:
-        rhs_id = rhs_tree_ids.pop(o.name, None)  # remove so won't be picked up as added, below
+        # remove so won't be picked up as added, below
+        rhs_id = rhs_tree_ids.pop(o.name, None)
         if rhs_id == o.id:  # no change
             continue
         elif rhs_id is None:  # removed
@@ -487,7 +517,7 @@ def _diff_trees(lhs, rhs, index, *path):
     for o in lhs.blob_ids:
         rhs_id = rhs_blob_ids.pop(o.name, None)
         if rhs_id == o.id:
-            continue # no change
+            continue  # no change
         elif rhs_id is None:
             yield (_fq(o.name), o.id, None)
         else:
@@ -495,11 +525,13 @@ def _diff_trees(lhs, rhs, index, *path):
     for name, id in rhs_blob_ids.items():
         yield (_fq(name), None, id)
 
+
 def get_commit_info(commit):
     if not isinstance(commit, Commit):
         commit = mapper(Commit).create(commit, dict(instrument=False))
     sess = session(commit)
-    if sess: sess.expunge(commit)
+    if sess:
+        sess.expunge(commit)
     return dict(
         id=commit._id,
         author=commit.authored.name,
@@ -508,7 +540,8 @@ def get_commit_info(commit):
         author_url=commit.author_url,
         shortlink=commit.shorthand_id(),
         summary=commit.summary
-        )
+    )
+
 
 def last_known_commit_id(all_commit_ids, new_commit_ids):
     """
@@ -520,8 +553,10 @@ def last_known_commit_id(all_commit_ids, new_commit_ids):
         new_commit_ids: Commit ids that are not yet cached in mongo, sorted
                         oldest to newest.
     """
-    if not all_commit_ids: return None
-    if not new_commit_ids: return all_commit_ids[-1]
+    if not all_commit_ids:
+        return None
+    if not new_commit_ids:
+        return all_commit_ids[-1]
     return all_commit_ids[all_commit_ids.index(new_commit_ids[0]) - 1]
 
 
@@ -540,6 +575,7 @@ def compute_lcds(commit, model_cache, lcid_cache):
         for changed_path in tree.commit.changed_paths:
             lcid_cache[changed_path] = tree.commit._id
 
+
 def _compute_lcds(tree, cache):
     path = tree.path().strip('/')
     if path not in tree.commit.changed_paths:
@@ -550,6 +586,7 @@ def _compute_lcds(tree, cache):
         sub_tree = _pull_tree(cache, x.id, tree, x.name)
         _compute_lcds(sub_tree, cache)
 
+
 def _pull_tree(cache, tree_id, *context):
     '''
     Since the Tree instances stick around in our cache,
@@ -559,15 +596,16 @@ def _pull_tree(cache, tree_id, *context):
     '''
     cache_tree = cache.get(Tree, dict(_id=tree_id))
     new_tree = Tree(
-            _id=cache_tree._id,
-            tree_ids=cache_tree.tree_ids,
-            blob_ids=cache_tree.blob_ids,
-            other_ids=cache_tree.other_ids,
-        )
+        _id=cache_tree._id,
+        tree_ids=cache_tree.tree_ids,
+        blob_ids=cache_tree.blob_ids,
+        other_ids=cache_tree.other_ids,
+    )
     session(new_tree).expunge(new_tree)
     new_tree.set_context(*context)
     return new_tree
 
+
 def _update_tree_cache(tree_ids, cache):
     current_ids = set(tree_ids)
     cached_ids = set(cache.instance_ids(Tree))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/repository.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/repository.py b/Allura/allura/model/repository.py
index 950d3bc..aaa97cc 100644
--- a/Allura/allura/model/repository.py
+++ b/Allura/allura/model/repository.py
@@ -65,26 +65,28 @@ config = utils.ConfigProxy(
     common_prefix='forgemail.url')
 
 README_RE = re.compile('^README(\.[^.]*)?$', re.IGNORECASE)
-VIEWABLE_EXTENSIONS = ['.php','.py','.js','.java','.html','.htm','.yaml','.sh',
-    '.rb','.phtml','.txt','.bat','.ps1','.xhtml','.css','.cfm','.jsp','.jspx',
-    '.pl','.php4','.php3','.rhtml','.svg','.markdown','.json','.ini','.tcl','.vbs','.xsl']
+VIEWABLE_EXTENSIONS = [
+    '.php', '.py', '.js', '.java', '.html', '.htm', '.yaml', '.sh',
+    '.rb', '.phtml', '.txt', '.bat', '.ps1', '.xhtml', '.css', '.cfm', '.jsp', '.jspx',
+    '.pl', '.php4', '.php3', '.rhtml', '.svg', '.markdown', '.json', '.ini', '.tcl', '.vbs', '.xsl']
+
 
 class RepositoryImplementation(object):
 
     # Repository-specific code
-    def init(self): # pragma no cover
+    def init(self):  # pragma no cover
         raise NotImplementedError, 'init'
 
-    def clone_from(self, source_url): # pragma no cover
+    def clone_from(self, source_url):  # pragma no cover
         raise NotImplementedError, 'clone_from'
 
-    def commit(self, revision): # pragma no cover
+    def commit(self, revision):  # pragma no cover
         raise NotImplementedError, 'commit'
 
-    def all_commit_ids(self): # pragma no cover
+    def all_commit_ids(self):  # pragma no cover
         raise NotImplementedError, 'all_commit_ids'
 
-    def new_commits(self, all_commits=False): # pragma no cover
+    def new_commits(self, all_commits=False):  # pragma no cover
         '''Return a list of native commits in topological order (heads first).
 
         "commit" is a repo-native object, NOT a Commit object.
@@ -92,21 +94,22 @@ class RepositoryImplementation(object):
         '''
         raise NotImplementedError, 'new_commits'
 
-    def commit_parents(self, commit): # pragma no cover
+    def commit_parents(self, commit):  # pragma no cover
         '''Return a list of native commits for the parents of the given (native)
         commit'''
         raise NotImplementedError, 'commit_parents'
 
-    def refresh_commit_info(self, oid, lazy=True): # pragma no cover
+    def refresh_commit_info(self, oid, lazy=True):  # pragma no cover
         '''Refresh the data in the commit with id oid'''
         raise NotImplementedError, 'refresh_commit_info'
 
-    def _setup_hooks(self, source_path=None): # pragma no cover
+    def _setup_hooks(self, source_path=None):  # pragma no cover
         '''Install a hook in the repository that will ping the refresh url for
         the repo.  Optionally provide a path from which to copy existing hooks.'''
         raise NotImplementedError, '_setup_hooks'
 
-    def log(self, revs=None, path=None, exclude=None, id_only=True, **kw): # pragma no cover
+    # pragma no cover
+    def log(self, revs=None, path=None, exclude=None, id_only=True, **kw):
         """
         Returns a generator that returns information about commits reachable
         by revs.
@@ -128,11 +131,11 @@ class RepositoryImplementation(object):
         """
         raise NotImplementedError, 'log'
 
-    def compute_tree_new(self, commit, path='/'): # pragma no cover
+    def compute_tree_new(self, commit, path='/'):  # pragma no cover
         '''Used in hg and svn to compute a git-like-tree lazily with the new models'''
         raise NotImplementedError, 'compute_tree'
 
-    def open_blob(self, blob): # pragma no cover
+    def open_blob(self, blob):  # pragma no cover
         '''Return a file-like object that contains the contents of the blob'''
         raise NotImplementedError, 'open_blob'
 
@@ -168,7 +171,8 @@ class RepositoryImplementation(object):
             object_id = commit._id
 
         if '/' in object_id:
-            object_id = os.path.join(object_id, self._repo.app.END_OF_REF_ESCAPE)
+            object_id = os.path.join(
+                object_id, self._repo.app.END_OF_REF_ESCAPE)
 
         return os.path.join(self._repo.url(), url_type, object_id) + '/'
 
@@ -178,7 +182,8 @@ class RepositoryImplementation(object):
         If create_repo_dir is True, also ensure that the directory
         of the repo itself exists.
         '''
-        if not self._repo.fs_path.endswith('/'): self._repo.fs_path += '/'
+        if not self._repo.fs_path.endswith('/'):
+            self._repo.fs_path += '/'
         fullname = self._repo.fs_path + self._repo.name
         # make the base dir for repo, regardless
         if not os.path.exists(self._repo.fs_path):
@@ -188,10 +193,11 @@ class RepositoryImplementation(object):
         return fullname
 
     def _setup_special_files(self, source_path=None):
-        magic_file = os.path.join(self._repo.fs_path, self._repo.name, '.SOURCEFORGE-REPOSITORY')
+        magic_file = os.path.join(
+            self._repo.fs_path, self._repo.name, '.SOURCEFORGE-REPOSITORY')
         with open(magic_file, 'w') as f:
             f.write(self._repo.repo_id)
-        os.chmod(magic_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
+        os.chmod(magic_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
         self._setup_hooks(source_path)
 
     @property
@@ -232,17 +238,20 @@ class RepositoryImplementation(object):
         lcd_chunk_size = asint(tg.config.get('lcd_thread_chunk_size', 10))
         num_threads = 0
         for s in range(0, len(paths), lcd_chunk_size):
-            chunks.put(paths[s:s+lcd_chunk_size])
+            chunks.put(paths[s:s + lcd_chunk_size])
             num_threads += 1
+
         def get_ids():
             paths = set(chunks.get())
             try:
                 commit_id = commit._id
                 while paths and commit_id:
                     if time() - start_time >= timeout:
-                        log.error('last_commit_ids timeout for %s on %s', commit._id, ', '.join(paths))
+                        log.error('last_commit_ids timeout for %s on %s',
+                                  commit._id, ', '.join(paths))
                         break
-                    commit_id, changes = self._get_last_commit(commit._id, paths)
+                    commit_id, changes = self._get_last_commit(
+                        commit._id, paths)
                     if commit_id is None:
                         break
                     changed = prefix_paths_union(paths, changes)
@@ -288,27 +297,29 @@ class RepositoryImplementation(object):
         """
         raise NotImplemented('get_changes')
 
+
 class Repository(Artifact, ActivityObject):
-    BATCH_SIZE=100
+    BATCH_SIZE = 100
+
     class __mongometa__:
-        name='generic-repository'
+        name = 'generic-repository'
         indexes = ['upstream_repo.name']
     _impl = None
-    repo_id='repo'
-    type_s='Repository'
+    repo_id = 'repo'
+    type_s = 'Repository'
     _refresh_precompute = True
 
-    name=FieldProperty(str)
-    tool=FieldProperty(str)
-    fs_path=FieldProperty(str)
-    url_path=FieldProperty(str)
-    status=FieldProperty(str)
-    email_address=''
-    additional_viewable_extensions=FieldProperty(str)
+    name = FieldProperty(str)
+    tool = FieldProperty(str)
+    fs_path = FieldProperty(str)
+    url_path = FieldProperty(str)
+    status = FieldProperty(str)
+    email_address = ''
+    additional_viewable_extensions = FieldProperty(str)
     heads = FieldProperty(S.Deprecated)
     branches = FieldProperty(S.Deprecated)
     repo_tags = FieldProperty(S.Deprecated)
-    upstream_repo = FieldProperty(dict(name=str,url=str))
+    upstream_repo = FieldProperty(dict(name=str, url=str))
     default_branch_name = FieldProperty(str)
 
     def __init__(self, **kw):
@@ -358,7 +369,8 @@ class Repository(Artifact, ActivityObject):
         return urljoin(tg.config.get('scm.repos.tarball.url_prefix', '/'), r)
 
     def get_tarball_status(self, revision, path=None):
-        pathname = os.path.join(self.tarball_path, self.tarball_filename(revision, path))
+        pathname = os.path.join(
+            self.tarball_path, self.tarball_filename(revision, path))
         filename = '%s%s' % (pathname, '.zip')
         if os.path.isfile(filename):
             return 'complete'
@@ -368,12 +380,11 @@ class Repository(Artifact, ActivityObject):
             'task_name': 'allura.tasks.repo_tasks.tarball',
             'args': [revision, path or ''],
             'state': {'$in': ['busy', 'ready']},
-            })
+        })
 
         return task.state if task else None
 
-
-    def __repr__(self): # pragma no cover
+    def __repr__(self):  # pragma no cover
         return '<%s %s>' % (
             self.__class__.__name__,
             self.full_fs_path)
@@ -381,32 +392,46 @@ class Repository(Artifact, ActivityObject):
     # Proxy to _impl
     def init(self):
         return self._impl.init()
+
     def commit(self, rev):
         return self._impl.commit(rev)
+
     def all_commit_ids(self):
         return self._impl.all_commit_ids()
+
     def refresh_commit_info(self, oid, seen, lazy=True):
         return self._impl.refresh_commit_info(oid, seen, lazy)
+
     def open_blob(self, blob):
         return self._impl.open_blob(blob)
+
     def blob_size(self, blob):
         return self._impl.blob_size(blob)
+
     def shorthand_for_commit(self, oid):
         return self._impl.shorthand_for_commit(oid)
+
     def symbolics_for_commit(self, commit):
         return self._impl.symbolics_for_commit(commit)
+
     def url_for_commit(self, commit, url_type='ci'):
         return self._impl.url_for_commit(commit, url_type)
+
     def compute_tree_new(self, commit, path='/'):
         return self._impl.compute_tree_new(commit, path)
+
     def last_commit_ids(self, commit, paths):
         return self._impl.last_commit_ids(commit, paths)
+
     def get_changes(self, commit_id):
         return self._impl.get_changes(commit_id)
+
     def is_empty(self):
         return self._impl.is_empty()
+
     def is_file(self, path, rev=None):
         return self._impl.is_file(path, rev)
+
     def get_heads(self):
         """
         Return list of heads for the repo.
@@ -416,6 +441,7 @@ class Repository(Artifact, ActivityObject):
         try to remove the deprecated fields and clean this up.
         """
         return self._impl.heads
+
     def get_branches(self):
         """
         Return list of branches for the repo.
@@ -425,6 +451,7 @@ class Repository(Artifact, ActivityObject):
         should try to remove the deprecated fields and clean this up.
         """
         return self._impl.branches
+
     def get_tags(self):
         """
         Return list of tags for the repo.
@@ -434,15 +461,18 @@ class Repository(Artifact, ActivityObject):
         should try to remove the deprecated fields and clean this up.
         """
         return self._impl.tags
+
     @property
     def head(self):
         return self._impl.head
+
     def set_default_branch(self, name):
         return self._impl.set_default_branch(name)
 
     def _log(self, rev, skip, limit):
         head = self.commit(rev)
-        if head is None: return
+        if head is None:
+            return
         for _id in self.commitlog([head._id], skip, limit):
             ci = head.query.get(_id=_id)
             ci.set_context(self)
@@ -491,7 +521,7 @@ class Repository(Artifact, ActivityObject):
             branch = self.app.default_branch_name
         try:
             return self.commit(branch)
-        except: # pragma no cover
+        except:  # pragma no cover
             log.exception('Cannot get latest commit for a branch', branch)
             return None
 
@@ -500,17 +530,18 @@ class Repository(Artifact, ActivityObject):
 
     def refresh_url(self):
         return '/'.join([
-                tg.config.get('base_url', 'http://localhost:8080').rstrip('/'),
-                'auth/refresh_repo',
-                self.url().lstrip('/'),
-            ])
+            tg.config.get('base_url', 'http://localhost:8080').rstrip('/'),
+            'auth/refresh_repo',
+            self.url().lstrip('/'),
+        ])
 
     def shorthand_id(self):
         return self.name
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return u'noreply@%s%s' % (domain, config.common_suffix)
 
     def index(self):
@@ -532,8 +563,9 @@ class Repository(Artifact, ActivityObject):
         '''Return a URL string suitable for copy/paste that describes _this_ repo,
            e.g., for use in a clone/checkout command
         '''
-        tpl = string.Template(tg.config.get('scm.host.%s.%s' % (category, self.tool)))
-        return tpl.substitute(dict(username=username, path=self.url_path+self.name))
+        tpl = string.Template(
+            tg.config.get('scm.host.%s.%s' % (category, self.tool)))
+        return tpl.substitute(dict(username=username, path=self.url_path + self.name))
 
     def clone_command(self, category, username=''):
         '''Return a string suitable for copy/paste that would clone this repo locally
@@ -544,20 +576,21 @@ class Repository(Artifact, ActivityObject):
         tpl = string.Template(tg.config.get('scm.clone.%s.%s' % (category, self.tool)) or
                               tg.config.get('scm.clone.%s' % self.tool))
         return tpl.substitute(dict(username=username,
-                                   source_url=self.clone_url(category, username),
+                                   source_url=self.clone_url(
+                                       category, username),
                                    dest_path=self.suggested_clone_dest_path()))
 
     def merge_requests_by_statuses(self, *statuses):
         return MergeRequest.query.find(dict(
-                app_config_id=self.app.config._id,
-                status={'$in':statuses})).sort(
+            app_config_id=self.app.config._id,
+            status={'$in': statuses})).sort(
             'request_number')
 
     @LazyProperty
     def _additional_viewable_extensions(self):
         ext_list = self.additional_viewable_extensions or ''
         ext_list = [ext.strip() for ext in ext_list.split(',') if ext]
-        ext_list += [ '.ini', '.gitignore', '.svnignore', 'README' ]
+        ext_list += ['.ini', '.gitignore', '.svnignore', 'README']
         return ext_list
 
     def guess_type(self, name):
@@ -586,16 +619,16 @@ class Repository(Artifact, ActivityObject):
             self.set_status('ready')
 
     def push_upstream_context(self):
-        project, rest=h.find_project(self.upstream_repo.name)
+        project, rest = h.find_project(self.upstream_repo.name)
         with h.push_context(project._id):
             app = project.app_instance(rest[0])
         return h.push_context(project._id, app_config_id=app.config._id)
 
     def pending_upstream_merges(self):
         q = {
-            'downstream.project_id':self.project_id,
-            'downstream.mount_point':self.app.config.options.mount_point,
-            'status':'open'}
+            'downstream.project_id': self.project_id,
+            'downstream.mount_point': self.app.config.options.mount_point,
+            'status': 'open'}
         with self.push_upstream_context():
             return MergeRequest.query.find(q).count()
 
@@ -634,26 +667,28 @@ class Repository(Artifact, ActivityObject):
         self.status = status
         session(self).flush(self)
 
+
 class MergeRequest(VersionedArtifact, ActivityObject):
-    statuses=['open', 'merged', 'rejected']
+    statuses = ['open', 'merged', 'rejected']
+
     class __mongometa__:
-        name='merge-request'
-        indexes=['commit_id']
-        unique_indexes=[('app_config_id', 'request_number')]
-    type_s='MergeRequest'
-
-    request_number=FieldProperty(int)
-    status=FieldProperty(str, if_missing='open')
-    downstream=FieldProperty(dict(
-            project_id=S.ObjectId,
-            mount_point=str,
-            commit_id=str))
-    source_branch=FieldProperty(str,if_missing='')
-    target_branch=FieldProperty(str)
-    creator_id=FieldProperty(S.ObjectId, if_missing=lambda:c.user._id)
-    created=FieldProperty(datetime, if_missing=datetime.utcnow)
-    summary=FieldProperty(str)
-    description=FieldProperty(str)
+        name = 'merge-request'
+        indexes = ['commit_id']
+        unique_indexes = [('app_config_id', 'request_number')]
+    type_s = 'MergeRequest'
+
+    request_number = FieldProperty(int)
+    status = FieldProperty(str, if_missing='open')
+    downstream = FieldProperty(dict(
+        project_id=S.ObjectId,
+        mount_point=str,
+        commit_id=str))
+    source_branch = FieldProperty(str, if_missing='')
+    target_branch = FieldProperty(str)
+    creator_id = FieldProperty(S.ObjectId, if_missing=lambda: c.user._id)
+    created = FieldProperty(datetime, if_missing=datetime.utcnow)
+    summary = FieldProperty(str)
+    description = FieldProperty(str)
 
     @property
     def activity_name(self):
@@ -701,13 +736,13 @@ class MergeRequest(VersionedArtifact, ActivityObject):
     @classmethod
     def upsert(cls, **kw):
         num = cls.query.find(dict(
-                app_config_id=c.app.config._id)).count()+1
+            app_config_id=c.app.config._id)).count() + 1
         while True:
             try:
                 r = cls(request_number=num, **kw)
                 session(r).flush(r)
                 return r
-            except pymongo.errors.DuplicateKeyError: # pragma no cover
+            except pymongo.errors.DuplicateKeyError:  # pragma no cover
                 session(r).expunge(r)
                 num += 1
 
@@ -725,6 +760,7 @@ class MergeRequest(VersionedArtifact, ActivityObject):
 
 
 class GitLikeTree(object):
+
     '''
     A tree node similar to that which is used in git
 
@@ -734,19 +770,22 @@ class GitLikeTree(object):
 
     def __init__(self):
         self.blobs = {}  # blobs[name] = oid
-        self.trees = defaultdict(GitLikeTree) #trees[name] = GitLikeTree()
+        self.trees = defaultdict(GitLikeTree)  # trees[name] = GitLikeTree()
         self._hex = None
 
     def get_tree(self, path):
-        if path.startswith('/'): path = path[1:]
-        if not path: return self
+        if path.startswith('/'):
+            path = path[1:]
+        if not path:
+            return self
         cur = self
         for part in path.split('/'):
             cur = cur.trees[part]
         return cur
 
     def get_blob(self, path):
-        if path.startswith('/'): path = path[1:]
+        if path.startswith('/'):
+            path = path[1:]
         path_parts = path.split('/')
         dirpath, last = path_parts[:-1], path_parts[-1]
         cur = self
@@ -755,7 +794,8 @@ class GitLikeTree(object):
         return cur.blobs[last]
 
     def set_blob(self, path, oid):
-        if path.startswith('/'): path = path[1:]
+        if path.startswith('/'):
+            path = path[1:]
         path_parts = path.split('/')
         dirpath, filename = path_parts[:-1], path_parts[-1]
         cur = self
@@ -774,9 +814,9 @@ class GitLikeTree(object):
     def __repr__(self):
         # this can't change, is used in hex() above
         lines = ['t %s %s' % (t.hex(), name)
-                  for name, t in self.trees.iteritems() ]
+                 for name, t in self.trees.iteritems()]
         lines += ['b %s %s' % (oid, name)
-                  for name, oid in self.blobs.iteritems() ]
+                  for name, oid in self.blobs.iteritems()]
         return h.really_unicode('\n'.join(sorted(lines))).encode('utf-8')
 
     def __unicode__(self):
@@ -784,14 +824,16 @@ class GitLikeTree(object):
 
     def pretty_tree(self, indent=0, recurse=True, show_id=True):
         '''For debugging, show a nice tree representation'''
-        lines = [' '*indent + 't %s %s' %
-                 (name, '\n'+t.unicode_full_tree(indent+2, show_id=show_id) if recurse else t.hex())
-                  for name, t in sorted(self.trees.iteritems()) ]
-        lines += [' '*indent + 'b %s %s' % (name, oid if show_id else '')
-                  for name, oid in sorted(self.blobs.iteritems()) ]
+        lines = [' ' * indent + 't %s %s' %
+                 (name, '\n' + t.unicode_full_tree(indent + 2, show_id=show_id)
+                  if recurse else t.hex())
+                 for name, t in sorted(self.trees.iteritems())]
+        lines += [' ' * indent + 'b %s %s' % (name, oid if show_id else '')
+                  for name, oid in sorted(self.blobs.iteritems())]
         output = h.really_unicode('\n'.join(lines)).encode('utf-8')
         return output
 
+
 def topological_sort(graph):
     '''Return the topological sort of a graph.
 
@@ -810,7 +852,8 @@ def topological_sort(graph):
         if not parents:
             graph.pop(nid)
             roots.append(nid)
-        for p_nid in parents: children[p_nid].append(nid)
+        for p_nid in parents:
+            children[p_nid].append(nid)
     # Topo sort
     while roots:
         n = roots.pop()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/session.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/session.py b/Allura/allura/model/session.py
index 9a2c061..92f339d 100644
--- a/Allura/allura/model/session.py
+++ b/Allura/allura/model/session.py
@@ -28,6 +28,7 @@ from allura.tasks import index_tasks
 
 log = logging.getLogger(__name__)
 
+
 class ArtifactSessionExtension(SessionExtension):
 
     def __init__(self, session):
@@ -41,14 +42,14 @@ class ArtifactSessionExtension(SessionExtension):
             self.objects_added = list(self.session.uow.new)
             self.objects_modified = list(self.session.uow.dirty)
             self.objects_deleted = list(self.session.uow.deleted)
-        else: # pragma no cover
+        else:  # pragma no cover
             st = state(obj)
             if st.status == st.new:
-                self.objects_added = [ obj ]
+                self.objects_added = [obj]
             elif st.status == st.dirty:
-                self.objects_modified = [ obj ]
+                self.objects_modified = [obj]
             elif st.status == st.deleted:
-                self.objects_deleted = [ obj ]
+                self.objects_deleted = [obj]
 
     def after_flush(self, obj=None):
         "Update artifact references, and add/update this artifact to solr"
@@ -61,13 +62,14 @@ class ArtifactSessionExtension(SessionExtension):
             try:
                 arefs = [
                     ArtifactReference.from_artifact(obj)
-                    for obj in self.objects_added + self.objects_modified ]
+                    for obj in self.objects_added + self.objects_modified]
                 for obj in self.objects_added + self.objects_modified:
                     Shortlink.from_artifact(obj)
                 # Flush shortlinks
                 main_orm_session.flush()
             except Exception:
-                log.exception("Failed to update artifact references. Is this a borked project migration?")
+                log.exception(
+                    "Failed to update artifact references. Is this a borked project migration?")
             self.update_index(self.objects_deleted, arefs)
             for obj in self.objects_added:
                 g.zarkov_event('create', extra=obj.index_id())
@@ -88,7 +90,9 @@ class ArtifactSessionExtension(SessionExtension):
         if arefs:
             index_tasks.add_artifacts.post([aref._id for aref in arefs])
 
+
 class BatchIndexer(ArtifactSessionExtension):
+
     """
     Tracks needed search index operations over the life of a
     :class:`ming.odm.session.ThreadLocalODMSession` session, and performs them
@@ -116,7 +120,7 @@ class BatchIndexer(ArtifactSessionExtension):
         from .index import ArtifactReference
         del_index_ids = [obj.index_id() for obj in objects_deleted]
         deleted_aref_ids = [aref._id for aref in
-            ArtifactReference.query.find(dict(_id={'$in': del_index_ids}))]
+                            ArtifactReference.query.find(dict(_id={'$in': del_index_ids}))]
         cls = self.__class__
         cls.to_add -= set(deleted_aref_ids)
         cls.to_delete |= set(del_index_ids)
@@ -157,7 +161,8 @@ class BatchIndexer(ArtifactSessionExtension):
         try:
             task_func.post(chunk)
         except pymongo.errors.InvalidDocument as e:
-            # there are many types of InvalidDocument, only recurse if its expected to help
+            # there are many types of InvalidDocument, only recurse if its
+            # expected to help
             if str(e).startswith('BSON document too large'):
                 cls._post(task_func, chunk[:len(chunk) // 2])
                 cls._post(task_func, chunk[len(chunk) // 2:])
@@ -172,6 +177,7 @@ def substitute_extensions(session, extensions=None):
     :class:`ming.odm.session.ThreadLocalODMSession` session.
     """
     original_exts = session._kwargs.get('extensions', [])
+
     def _set_exts(exts):
         session.flush()
         session.close()
@@ -181,7 +187,6 @@ def substitute_extensions(session, extensions=None):
     _set_exts(original_exts)
 
 
-
 main_doc_session = Session.by_name('main')
 project_doc_session = Session.by_name('project')
 task_doc_session = Session.by_name('task')
@@ -190,7 +195,7 @@ project_orm_session = ThreadLocalORMSession(project_doc_session)
 task_orm_session = ThreadLocalORMSession(task_doc_session)
 artifact_orm_session = ThreadLocalORMSession(
     doc_session=project_doc_session,
-    extensions = [ ArtifactSessionExtension ])
+    extensions=[ArtifactSessionExtension])
 repository_orm_session = ThreadLocalORMSession(
     doc_session=main_doc_session,
-    extensions = [  ])
+    extensions=[])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/stats.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/stats.py b/Allura/allura/model/stats.py
index 3946557..03423e5 100644
--- a/Allura/allura/model/stats.py
+++ b/Allura/allura/model/stats.py
@@ -34,33 +34,35 @@ import difflib
 from allura.model.session import main_orm_session
 from allura.lib import helpers as h
 
+
 class Stats(MappedClass):
+
     class __mongometa__:
-        name='basestats'
+        name = 'basestats'
         session = main_orm_session
-        unique_indexes = [ '_id']
+        unique_indexes = ['_id']
 
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
 
-    visible = FieldProperty(bool, if_missing = True)
+    visible = FieldProperty(bool, if_missing=True)
     registration_date = FieldProperty(datetime)
     general = FieldProperty([dict(
-        category = S.ObjectId,
-        messages = [dict(
-            messagetype = str,
-            created = int,
-            modified = int)],
-        tickets = dict(
-            solved = int,
-            assigned = int,
-            revoked = int,
-            totsolvingtime = int),
-        commits = [dict(
-            lines = int,
-            number = int,
-            language = S.ObjectId)])])
-
-    lastmonth=FieldProperty(dict(
+        category=S.ObjectId,
+        messages=[dict(
+            messagetype=str,
+            created=int,
+            modified=int)],
+        tickets=dict(
+            solved=int,
+            assigned=int,
+            revoked=int,
+            totsolvingtime=int),
+        commits=[dict(
+            lines=int,
+            number=int,
+            language=S.ObjectId)])])
+
+    lastmonth = FieldProperty(dict(
         messages=[dict(
             datetime=datetime,
             created=bool,
@@ -89,33 +91,33 @@ class Stats(MappedClass):
         The user may have registered before stats were collected,
         making calculations based on registration date unfair."""
         min_date = config.get('userstats.start_date', '0001-1-1')
-        return max(datetime.strptime(min_date,'%Y-%m-%d'), self.registration_date)
+        return max(datetime.strptime(min_date, '%Y-%m-%d'), self.registration_date)
 
     def getCodeContribution(self):
-        days=(datetime.today() - self.start_date).days
+        days = (datetime.today() - self.start_date).days
         if not days:
-            days=1
+            days = 1
         for val in self['general']:
             if val['category'] is None:
                 for commits in val['commits']:
                     if commits['language'] is None:
                         if days > 30:
-                            return round(float(commits.lines)/days*30, 2)
+                            return round(float(commits.lines) / days * 30, 2)
                         else:
                             return float(commits.lines)
         return 0
 
     def getDiscussionContribution(self):
-        days=(datetime.today() - self.start_date).days
+        days = (datetime.today() - self.start_date).days
         if not days:
-            days=1
+            days = 1
         for val in self['general']:
             if val['category'] is None:
                 for artifact in val['messages']:
                     if artifact['messagetype'] is None:
-                        tot = artifact.created+artifact.modified
+                        tot = artifact.created + artifact.modified
                         if days > 30:
-                            return round(float(tot)/days*30,2)
+                            return round(float(tot) / days * 30, 2)
                         else:
                             return float(tot)
         return 0
@@ -129,30 +131,30 @@ class Stats(MappedClass):
                 return round(float(tickets.solved) / tickets.assigned, 2)
         return 0
 
-    def getCommits(self, category = None):
-        i = getElementIndex(self.general, category = category)
+    def getCommits(self, category=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(number=0, lines=0)
         cat = self.general[i]
-        j = getElementIndex(cat.commits, language = None)
+        j = getElementIndex(cat.commits, language=None)
         if j is None:
             return dict(number=0, lines=0)
         return dict(
             number=cat.commits[j]['number'],
             lines=cat.commits[j]['lines'])
 
-    def getArtifacts(self, category = None, art_type = None):
-        i = getElementIndex(self.general, category = category)
+    def getArtifacts(self, category=None, art_type=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(created=0, modified=0)
         cat = self.general[i]
-        j = getElementIndex(cat.messages, messagetype = art_type)
+        j = getElementIndex(cat.messages, messagetype=art_type)
         if j is None:
             return dict(created=0, modified=0)
         return dict(created=cat.messages[j].created, modified=cat.messages[j].modified)
 
-    def getTickets(self, category = None):
-        i = getElementIndex(self.general, category = category)
+    def getTickets(self, category=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(
                 assigned=0,
@@ -177,20 +179,20 @@ class Stats(MappedClass):
         by_cat = {}
         for entry in self.general:
             cat = entry.category
-            i = getElementIndex(entry.commits, language = None)
+            i = getElementIndex(entry.commits, language=None)
             if i is None:
                 n, lines = 0, 0
             else:
                 n, lines = entry.commits[i].number, entry.commits[i].lines
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(number=n, lines=lines)
         return by_cat
 
-    #For the moment, commit stats by language are not used, since each project
-    #can be linked to more than one programming language and we don't know how
-    #to which programming language should be credited a line of code modified
-    #within a project including two or more languages.
+    # For the moment, commit stats by language are not used, since each project
+    # can be linked to more than one programming language and we don't know how
+    # to which programming language should be credited a line of code modified
+    # within a project including two or more languages.
     def getCommitsByLanguage(self):
         langlist = []
         by_lang = {}
@@ -207,7 +209,7 @@ class Stats(MappedClass):
         for entry in self.general:
             cat = entry.category
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             if detailed:
                 by_cat[cat] = entry.messages
             else:
@@ -219,13 +221,13 @@ class Stats(MappedClass):
         return by_cat
 
     def getArtifactsByType(self, category=None):
-        i = getElementIndex(self.general, category = category)
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return {}
         entry = self.general[i].messages
         by_type = dict([(el.messagetype, dict(created=el.created,
                                               modified=el.modified))
-                         for el in entry])
+                        for el in entry])
         return by_type
 
     def getTicketsByCategory(self):
@@ -235,7 +237,7 @@ class Stats(MappedClass):
         for entry in self.general:
             cat = entry.category
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             a, s = entry.tickets.assigned, entry.tickets.solved
             r, time = entry.tickets.solved, entry.tickets.totsolvingtime
             if s:
@@ -249,7 +251,7 @@ class Stats(MappedClass):
                 averagesolvingtime=_convertTimeDiff(average))
         return by_cat
 
-    def getLastMonthCommits(self, category = None):
+    def getLastMonthCommits(self, category=None):
         self.checkOldArtifacts()
         lineslist = [el.lines for el in self.lastmonth.commits
                      if category in el.categories + [None]]
@@ -260,8 +262,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
 
         by_cat = {}
         for cat in catlist:
@@ -270,7 +272,7 @@ class Stats(MappedClass):
             n = len(lineslist)
             lines = sum(lineslist)
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(number=n, lines=lines)
         return by_cat
 
@@ -279,8 +281,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        langlist=[el.language for el in self.general
-                  if el.language not in seen and not seen.add(el.language)]
+        langlist = [el.language for el in self.general
+                    if el.language not in seen and not seen.add(el.language)]
 
         by_lang = {}
         for lang in langlist:
@@ -289,36 +291,36 @@ class Stats(MappedClass):
             n = len(lineslist)
             lines = sum(lineslist)
             if lang != None:
-                lang = TroveCategory.query.get(_id = lang)
+                lang = TroveCategory.query.get(_id=lang)
             by_lang[lang] = dict(number=n, lines=lines)
         return by_lang
 
-    def getLastMonthArtifacts(self, category = None, art_type = None):
+    def getLastMonthArtifacts(self, category=None, art_type=None):
         self.checkOldArtifacts()
         cre, mod = reduce(
             addtuple,
-            [(int(el.created),1-int(el.created))
+            [(int(el.created), 1 - int(el.created))
                 for el in self.lastmonth.messages
                 if (category is None or category in el.categories) and
                 (el.messagetype == art_type or art_type is None)],
-            (0,0))
+            (0, 0))
         return dict(created=cre, modified=mod)
 
-    def getLastMonthArtifactsByType(self, category = None):
+    def getLastMonthArtifactsByType(self, category=None):
         self.checkOldArtifacts()
         seen = set()
-        types=[el.messagetype for el in self.lastmonth.messages
-               if el.messagetype not in seen and not seen.add(el.messagetype)]
+        types = [el.messagetype for el in self.lastmonth.messages
+                 if el.messagetype not in seen and not seen.add(el.messagetype)]
 
         by_type = {}
         for t in types:
             cre, mod = reduce(
                 addtuple,
-                [(int(el.created),1-int(el.created))
+                [(int(el.created), 1 - int(el.created))
                  for el in self.lastmonth.messages
                  if el.messagetype == t and
-                 category in [None]+el.categories],
-                (0,0))
+                 category in [None] + el.categories],
+                (0, 0))
             by_type[t] = dict(created=cre, modified=mod)
         return by_type
 
@@ -327,22 +329,22 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
 
         by_cat = {}
         for cat in catlist:
             cre, mod = reduce(
                 addtuple,
-                [(int(el.created),1-int(el.created))
+                [(int(el.created), 1 - int(el.created))
                  for el in self.lastmonth.messages
-                 if cat in el.categories + [None]], (0,0))
+                 if cat in el.categories + [None]], (0, 0))
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(created=cre, modified=mod)
         return by_cat
 
-    def getLastMonthTickets(self, category = None):
+    def getLastMonthTickets(self, category=None):
         from allura.model.project import TroveCategory
 
         self.checkOldArtifacts()
@@ -355,8 +357,8 @@ class Stats(MappedClass):
             [(1, el.solvingtime)
              for el in self.lastmonth.solvedtickets
              if category in el.categories + [None]],
-            (0,0))
-        if category!=None:
+            (0, 0))
+        if category != None:
             category = TroveCategory.query.get(_id=category)
         if s > 0:
             time = time / s
@@ -373,8 +375,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
         by_cat = {}
         for cat in catlist:
             a = len([el for el in self.lastmonth.assignedtickets
@@ -383,9 +385,9 @@ class Stats(MappedClass):
                      if cat in el.categories + [None]])
             s, time = reduce(addtuple, [(1, el.solvingtime)
                                         for el in self.lastmonth.solvedtickets
-                                        if cat in el.categories+[None]],(0,0))
+                                        if cat in el.categories + [None]], (0, 0))
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             if s > 0:
                 time = time / s
             else:
@@ -436,8 +438,8 @@ class Stats(MappedClass):
 
     def addClosedTicket(self, open_datetime, close_datetime, project):
         topics = [t for t in project.trove_topic if t]
-        s_time=int((close_datetime-open_datetime).total_seconds())
-        self._updateTicketsStats(topics, 'solved', s_time = s_time)
+        s_time = int((close_datetime - open_datetime).total_seconds())
+        self._updateTicketsStats(topics, 'solved', s_time=s_time)
         self.lastmonth.solvedtickets.append(dict(
             datetime=close_datetime,
             categories=topics,
@@ -445,7 +447,7 @@ class Stats(MappedClass):
         self.checkOldArtifacts()
 
     def addCommit(self, newcommit, commit_datetime, project):
-        def _computeLines(newblob, oldblob = None):
+        def _computeLines(newblob, oldblob=None):
             if oldblob:
                 listold = list(oldblob)
             else:
@@ -462,7 +464,8 @@ class Stats(MappedClass):
                     listold, listnew,
                     ('old' + oldblob.path()).encode('utf-8'),
                     ('new' + newblob.path()).encode('utf-8'))
-                lines = len([l for l in diff if len(l) > 0 and l[0] == '+'])-1
+                lines = len(
+                    [l for l in diff if len(l) > 0 and l[0] == '+']) - 1
             else:
                 lines = 0
             return lines
@@ -506,16 +509,16 @@ class Stats(MappedClass):
             for changed in d.changed:
                 newblob = newcommit.tree.get_blob_by_path(changed)
                 oldblob = oldcommit.tree.get_blob_by_path(changed)
-                totlines+=_computeLines(newblob, oldblob)
+                totlines += _computeLines(newblob, oldblob)
 
             for copied in d.copied:
                 newblob = newcommit.tree.get_blob_by_path(copied['new'])
                 oldblob = oldcommit.tree.get_blob_by_path(copied['old'])
-                totlines+=_computeLines(newblob, oldblob)
+                totlines += _computeLines(newblob, oldblob)
 
             for added in d.added:
                 newblob = newcommit.tree.get_blob_by_path(added)
-                totlines+=_computeLines(newblob)
+                totlines += _computeLines(newblob)
 
         _addCommitData(self, topics, languages, totlines)
 
@@ -533,7 +536,7 @@ class Stats(MappedClass):
         lt = [None] + topics
         for mtype in [None, art_type]:
             for t in lt:
-                i = getElementIndex(self.general, category = t)
+                i = getElementIndex(self.general, category=t)
                 if i is None:
                     msg = dict(
                         category=t,
@@ -545,7 +548,7 @@ class Stats(MappedClass):
                             totsolvingtime=0),
                         messages=[])
                     self.general.append(msg)
-                    i = getElementIndex(self.general, category = t)
+                    i = getElementIndex(self.general, category=t)
                 j = getElementIndex(
                     self.general[i]['messages'], messagetype=mtype)
                 if j is None:
@@ -562,12 +565,12 @@ class Stats(MappedClass):
             messagetype=art_type))
         self.checkOldArtifacts()
 
-    def _updateTicketsStats(self, topics, action, s_time = None):
+    def _updateTicketsStats(self, topics, action, s_time=None):
         if action not in ['solved', 'assigned', 'revoked']:
             return
         lt = topics + [None]
         for t in lt:
-            i = getElementIndex(self.general, category = t)
+            i = getElementIndex(self.general, category=t)
             if i is None:
                 stats = dict(
                     category=t,
@@ -579,10 +582,11 @@ class Stats(MappedClass):
                         totsolvingtime=0),
                     messages=[])
                 self.general.append(stats)
-                i = getElementIndex(self.general, category = t)
+                i = getElementIndex(self.general, category=t)
             self.general[i]['tickets'][action] += 1
             if action == 'solved':
-                self.general[i]['tickets']['totsolvingtime']+=s_time
+                self.general[i]['tickets']['totsolvingtime'] += s_time
+
 
 def getElementIndex(el_list, **kw):
     for i in range(len(el_list)):
@@ -593,15 +597,17 @@ def getElementIndex(el_list, **kw):
             return i
     return None
 
+
 def addtuple(l1, l2):
     a, b = l1
     x, y = l2
-    return (a+x, b+y)
+    return (a + x, b + y)
+
 
 def _convertTimeDiff(int_seconds):
     if int_seconds is None:
         return None
-    diff = timedelta(seconds = int_seconds)
+    diff = timedelta(seconds=int_seconds)
     days, seconds = diff.days, diff.seconds
     hours = seconds / 3600
     seconds = seconds % 3600

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/timeline.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/timeline.py b/Allura/allura/model/timeline.py
index 8f8ac41..3487b9c 100644
--- a/Allura/allura/model/timeline.py
+++ b/Allura/allura/model/timeline.py
@@ -30,15 +30,17 @@ log = logging.getLogger(__name__)
 
 
 class Director(ActivityDirector):
+
     """Overrides the default ActivityDirector to kick off background
     timeline aggregations after an activity is created.
 
     """
+
     def create_activity(self, actor, verb, obj, target=None,
-            related_nodes=None):
+                        related_nodes=None):
         from allura.model.project import Project
         super(Director, self).create_activity(actor, verb, obj,
-                target=target, related_nodes=related_nodes)
+                                              target=target, related_nodes=related_nodes)
         # aggregate actor and follower's timelines
         create_timelines.post(actor.node_id)
         # aggregate project and follower's timelines
@@ -52,12 +54,14 @@ class Aggregator(BaseAggregator):
 
 
 class ActivityNode(NodeBase):
+
     @property
     def node_id(self):
         return "%s:%s" % (self.__class__.__name__, self._id)
 
 
 class ActivityObject(ActivityObjectBase):
+
     @property
     def activity_name(self):
         """Override this for each Artifact type."""
@@ -94,9 +98,11 @@ def perm_check(user):
         otherwise return False.
         """
         extras_dict = activity.obj.activity_extras
-        if not extras_dict: return True
+        if not extras_dict:
+            return True
         allura_id = extras_dict.get('allura_id')
-        if not allura_id: return True
+        if not allura_id:
+            return True
         classname, _id = allura_id.split(':', 1)
         cls = Mapper.by_classname(classname).mapped_class
         try:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/types.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/types.py b/Allura/allura/model/types.py
index ab7d341..12df733 100644
--- a/Allura/allura/model/types.py
+++ b/Allura/allura/model/types.py
@@ -20,7 +20,9 @@ from ming import schema as S
 
 EVERYONE, ALL_PERMISSIONS = None, '*'
 
+
 class MarkdownCache(S.Object):
+
     def __init__(self, **kw):
         super(MarkdownCache, self).__init__(
             fields=dict(
@@ -29,14 +31,17 @@ class MarkdownCache(S.Object):
                 render_time=S.Float()),
             **kw)
 
+
 class ACE(S.Object):
+
     '''ACE - access control entry'''
     ALLOW, DENY = 'ALLOW', 'DENY'
+
     def __init__(self, permissions, **kwargs):
         if permissions is None:
-            permission=S.String()
+            permission = S.String()
         else:
-            permission=S.OneOf('*', *permissions)
+            permission = S.OneOf('*', *permissions)
         super(ACE, self).__init__(
             fields=dict(
                 access=S.OneOf(self.ALLOW, self.DENY),
@@ -68,6 +73,7 @@ class ACE(S.Object):
             ace.role_id in (role_id, EVERYONE)
             and ace.permission in (permission, ALL_PERMISSIONS))
 
+
 class ACL(S.Array):
 
     def __init__(self, permissions=None, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/refresh_last_commits.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/refresh_last_commits.py b/Allura/allura/scripts/refresh_last_commits.py
index 3721795..b5cda5c 100644
--- a/Allura/allura/scripts/refresh_last_commits.py
+++ b/Allura/allura/scripts/refresh_last_commits.py
@@ -37,6 +37,7 @@ log = logging.getLogger(__name__)
 
 
 class RefreshLastCommits(ScriptTask):
+
     @classmethod
     def parser(cls):
         def _repo_type_list(s):
@@ -45,36 +46,38 @@ class RefreshLastCommits(ScriptTask):
                 repo_type = repo_type.strip()
                 if repo_type not in ['git', 'hg']:
                     raise argparse.ArgumentTypeError(
-                            '{0} is not a valid repo type.'.format(repo_type))
+                        '{0} is not a valid repo type.'.format(repo_type))
                 repo_types.append(repo_type)
             return repo_types
         parser = argparse.ArgumentParser(description='Using existing commit data, '
-                'refresh the last commit metadata in MongoDB. Run for all repos (no args), '
-                'or restrict by neighborhood, project, or code tool mount point.')
+                                         'refresh the last commit metadata in MongoDB. Run for all repos (no args), '
+                                         'or restrict by neighborhood, project, or code tool mount point.')
         parser.add_argument('--nbhd', action='store', default='', dest='nbhd',
-                help='Restrict update to a particular neighborhood, e.g. /p/.')
-        parser.add_argument('--project', action='store', default='', dest='project',
-                help='Restrict update to a particular project. To specify a '
-                'subproject, use a slash: project/subproject.')
+                            help='Restrict update to a particular neighborhood, e.g. /p/.')
+        parser.add_argument(
+            '--project', action='store', default='', dest='project',
+            help='Restrict update to a particular project. To specify a '
+            'subproject, use a slash: project/subproject.')
         parser.add_argument('--project-regex', action='store', default='',
-                dest='project_regex',
-                help='Restrict update to projects for which the shortname matches '
-                'the provided regex.')
-        parser.add_argument('--repo-types', action='store', type=_repo_type_list,
-                default=['git', 'hg'], dest='repo_types',
-                help='Only refresh last commits for repos of the given type(s). Defaults to: '
-                'git,hg. Example: --repo-types=git')
+                            dest='project_regex',
+                            help='Restrict update to projects for which the shortname matches '
+                            'the provided regex.')
+        parser.add_argument(
+            '--repo-types', action='store', type=_repo_type_list,
+            default=['git', 'hg'], dest='repo_types',
+            help='Only refresh last commits for repos of the given type(s). Defaults to: '
+            'git,hg. Example: --repo-types=git')
         parser.add_argument('--mount-point', default='', dest='mount_point',
-                help='Restrict update to repos at the given tool mount point. ')
+                            help='Restrict update to repos at the given tool mount point. ')
         parser.add_argument('--clean', action='store_true', dest='clean',
-                default=False, help='Remove last commit mongo docs for '
-                'project(s) being refreshed before doing the refresh.')
+                            default=False, help='Remove last commit mongo docs for '
+                            'project(s) being refreshed before doing the refresh.')
         parser.add_argument('--dry-run', action='store_true', dest='dry_run',
-                default=False, help='Log names of projects that would have their ')
+                            default=False, help='Log names of projects that would have their ')
         parser.add_argument('--diffs', action='store_true', dest='diffs',
-                default=False, help='Refresh / clean diffs as well as LCDs')
+                            default=False, help='Refresh / clean diffs as well as LCDs')
         parser.add_argument('--limit', action='store', type=int, dest='limit',
-                default=False, help='Limit of how many commits to process')
+                            default=False, help='Limit of how many commits to process')
         return parser
 
     @classmethod
@@ -94,7 +97,8 @@ class RefreshLastCommits(ScriptTask):
 
         for chunk in chunked_find(M.Project, q_project):
             for p in chunk:
-                log.info("Refreshing last commit data for project '%s'." % p.shortname)
+                log.info("Refreshing last commit data for project '%s'." %
+                         p.shortname)
                 if options.dry_run:
                     continue
                 c.project = p
@@ -109,17 +113,19 @@ class RefreshLastCommits(ScriptTask):
                         continue
                     if c.app.repo.tool.lower() not in options.repo_types:
                         log.info("Skipping %r: wrong type (%s)", c.app.repo,
-                                c.app.repo.tool.lower())
+                                 c.app.repo.tool.lower())
                         continue
 
                     c.app.repo.status = 'analyzing'
                     session(c.app.repo).flush(c.app.repo)
                     try:
-                        ci_ids = list(reversed(list(c.app.repo.all_commit_ids())))
+                        ci_ids = list(
+                            reversed(list(c.app.repo.all_commit_ids())))
                         if options.clean:
                             cls._clean(ci_ids, options.diffs)
 
-                        log.info('Refreshing all last commits in %r', c.app.repo)
+                        log.info('Refreshing all last commits in %r',
+                                 c.app.repo)
                         cls.refresh_repo_lcds(ci_ids, options)
                         new_commit_ids = app.repo.unknown_commit_ids()
                         if len(new_commit_ids) > 0:
@@ -141,14 +147,15 @@ class RefreshLastCommits(ScriptTask):
             for i, commit_id in enumerate(commit_ids):
                 commit = M.repo.Commit.query.get(_id=commit_id)
                 with time(timings):
-                    M.repo_refresh.compute_diffs(c.app.repo._id, tree_cache, commit)
+                    M.repo_refresh.compute_diffs(
+                        c.app.repo._id, tree_cache, commit)
                 if i % 1000 == 0:
                     cls._print_stats(i, timings, 1000)
 
         model_cache = M.repo.ModelCache(
-                max_instances={M.repo.LastCommit: 4000},
-                max_queries={M.repo.LastCommit: 4000},
-            )
+            max_instances={M.repo.LastCommit: 4000},
+            max_queries={M.repo.LastCommit: 4000},
+        )
         lcid_cache = {}
         timings = []
         print 'Processing last commits'
@@ -171,13 +178,17 @@ class RefreshLastCommits(ScriptTask):
     def _clean(cls, commit_ids, clean_diffs):
         if clean_diffs:
             # delete DiffInfoDocs
-            i = M.repo.DiffInfoDoc.m.find(dict(_id={'$in': commit_ids})).count()
-            log.info("Deleting %i DiffInfoDoc docs for %i commits...", i, len(commit_ids))
+            i = M.repo.DiffInfoDoc.m.find(
+                dict(_id={'$in': commit_ids})).count()
+            log.info("Deleting %i DiffInfoDoc docs for %i commits...",
+                     i, len(commit_ids))
             M.repo.DiffInfoDoc.m.remove(dict(_id={'$in': commit_ids}))
 
         # delete LastCommitDocs
-        i = M.repo.LastCommitDoc.m.find(dict(commit_id={'$in': commit_ids})).count()
-        log.info("Deleting %i LastCommitDoc docs for %i commits...", i, len(commit_ids))
+        i = M.repo.LastCommitDoc.m.find(
+            dict(commit_id={'$in': commit_ids})).count()
+        log.info("Deleting %i LastCommitDoc docs for %i commits...",
+                 i, len(commit_ids))
         M.repo.LastCommitDoc.m.remove(dict(commit_id={'$in': commit_ids}))
 
     @classmethod
@@ -187,7 +198,7 @@ class RefreshLastCommits(ScriptTask):
         at = tt / len(timings)
         mat = sum(timings[-debug_step:]) / debug_step
         print '  Processed %d commits (max: %f, avg: %f, mavg: %f, tot: %f)' % (
-                processed, mt, at, mat, tt)
+            processed, mt, at, mat, tt)
 
 
 @contextmanager
@@ -197,7 +208,6 @@ def time(timings):
     timings.append((datetime.utcnow() - s).total_seconds())
 
 
-
 if __name__ == '__main__':
     faulthandler.enable()
     RefreshLastCommits.main()


[26/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/repository.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/repository.py b/Allura/allura/lib/repository.py
index 65b68c8..6e5ef42 100644
--- a/Allura/allura/lib/repository.py
+++ b/Allura/allura/lib/repository.py
@@ -40,7 +40,7 @@ log = logging.getLogger(__name__)
 
 
 class RepositoryApp(Application):
-    END_OF_REF_ESCAPE='~'
+    END_OF_REF_ESCAPE = '~'
     __version__ = version.__version__
     permissions = [
         'read', 'write', 'create',
@@ -56,19 +56,19 @@ class RepositoryApp(Application):
         ConfigOption('cloned_from_project_id', ObjectId, None),
         ConfigOption('cloned_from_repo_id', ObjectId, None),
         ConfigOption('init_from_url', str, None)
-        ]
-    tool_label='Repository'
-    default_mount_label='Code'
-    default_mount_point='code'
-    relaxed_mount_points=True
-    ordinal=2
-    forkable=False
-    default_branch_name=None # master or default or some such
-    repo=None # override with a property in child class
-    icons={
-        24:'images/code_24.png',
-        32:'images/code_32.png',
-        48:'images/code_48.png'
+    ]
+    tool_label = 'Repository'
+    default_mount_label = 'Code'
+    default_mount_point = 'code'
+    relaxed_mount_points = True
+    ordinal = 2
+    forkable = False
+    default_branch_name = None  # master or default or some such
+    repo = None  # override with a property in child class
+    icons = {
+        24: 'images/code_24.png',
+        32: 'images/code_32.png',
+        48: 'images/code_48.png'
     }
 
     def __init__(self, project, config):
@@ -79,9 +79,9 @@ class RepositoryApp(Application):
         '''Apps should provide their entries to be added to the main nav
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
         '''
-        return [ SitemapEntry(
-                self.config.options.mount_label,
-                '.')]
+        return [SitemapEntry(
+            self.config.options.mount_label,
+            '.')]
 
     @property
     @h.exceptionless([], log)
@@ -89,13 +89,17 @@ class RepositoryApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     def admin_menu(self):
-        admin_url = c.project.url()+'admin/'+self.config.options.mount_point+'/'
-        links = [SitemapEntry('Viewable Files', admin_url + 'extensions', className='admin_modal')]
+        admin_url = c.project.url() + 'admin/' + \
+            self.config.options.mount_point + '/'
+        links = [
+            SitemapEntry('Viewable Files', admin_url + 'extensions', className='admin_modal')]
         links.append(SitemapEntry('Refresh Repository',
-                                  c.project.url() + self.config.options.mount_point + '/refresh',
+                                  c.project.url() +
+                                  self.config.options.mount_point +
+                                  '/refresh',
                                   ))
         links += super(RepositoryApp, self).admin_menu()
         [links.remove(l) for l in links[:] if l.label == 'Options']
@@ -105,41 +109,47 @@ class RepositoryApp(Application):
     def sidebar_menu(self):
         if not self.repo or self.repo.status != 'ready':
             return []
-        links = [SitemapEntry('Browse Commits', c.app.url + 'commit_browser', ui_icon=g.icons['folder'])]
+        links = [SitemapEntry('Browse Commits', c.app.url +
+                              'commit_browser', ui_icon=g.icons['folder'])]
         if self.forkable and self.repo.status == 'ready':
-            links.append(SitemapEntry('Fork', c.app.url + 'fork', ui_icon=g.icons['fork']))
-        merge_request_count = self.repo.merge_requests_by_statuses('open').count()
+            links.append(
+                SitemapEntry('Fork', c.app.url + 'fork', ui_icon=g.icons['fork']))
+        merge_request_count = self.repo.merge_requests_by_statuses(
+            'open').count()
         if merge_request_count:
             links += [
                 SitemapEntry(
                     'Merge Requests', c.app.url + 'merge-requests/',
-                    small=merge_request_count) ]
+                    small=merge_request_count)]
         if self.repo.forks:
             links += [
-                SitemapEntry('Forks', c.app.url + 'forks/', small=len(self.repo.forks))
+                SitemapEntry('Forks', c.app.url + 'forks/',
+                             small=len(self.repo.forks))
             ]
         if self.repo.upstream_repo.name:
-            repo_path_parts = self.repo.upstream_repo.name.strip('/').split('/')
+            repo_path_parts = self.repo.upstream_repo.name.strip(
+                '/').split('/')
             links += [
                 SitemapEntry('Clone of'),
                 SitemapEntry('%s / %s' %
-                    (repo_path_parts[1], repo_path_parts[-1]),
-                    self.repo.upstream_repo.name)
-                ]
+                             (repo_path_parts[1], repo_path_parts[-1]),
+                             self.repo.upstream_repo.name)
+            ]
             if not c.app.repo.is_empty() and has_access(c.app.repo, 'admin'):
                 merge_url = c.app.url + 'request_merge'
                 if getattr(c, 'revision', None):
                     merge_url = merge_url + '?branch=' + h.urlquote(c.revision)
                 links.append(SitemapEntry('Request Merge', merge_url,
                              ui_icon=g.icons['merge'],
-                             ))
+                                          ))
             pending_upstream_merges = self.repo.pending_upstream_merges()
             if pending_upstream_merges:
                 links.append(SitemapEntry(
-                        'Pending Merges',
-                        self.repo.upstream_repo.name + 'merge-requests/',
-                        small=pending_upstream_merges))
-        ref_url = self.repo.url_for_commit(self.default_branch_name, url_type='ref')
+                    'Pending Merges',
+                    self.repo.upstream_repo.name + 'merge-requests/',
+                    small=pending_upstream_merges))
+        ref_url = self.repo.url_for_commit(
+            self.default_branch_name, url_type='ref')
         branches = self.repo.get_branches()
         if branches:
             links.append(SitemapEntry('Branches'))
@@ -151,28 +161,28 @@ class RepositoryApp(Application):
             max_branches = 10
             for branch in branches[:max_branches]:
                 links.append(SitemapEntry(
-                        branch.name,
-                        quote(self.repo.url_for_commit(branch.name) + 'tree/')))
+                    branch.name,
+                    quote(self.repo.url_for_commit(branch.name) + 'tree/')))
             if len(branches) > max_branches:
                 links.append(
                     SitemapEntry(
                         'More Branches',
                         ref_url + 'branches/',
-                        ))
+                    ))
         tags = self.repo.get_tags()
         if tags:
             links.append(SitemapEntry('Tags'))
             max_tags = 10
             for b in tags[:max_tags]:
                 links.append(SitemapEntry(
-                        b.name,
-                        quote(self.repo.url_for_commit(b.name) + 'tree/')))
+                    b.name,
+                    quote(self.repo.url_for_commit(b.name) + 'tree/')))
             if len(tags) > max_tags:
                 links.append(
                     SitemapEntry(
                         'More Tags',
                         ref_url + 'tags/',
-                        ))
+                    ))
         return links
 
     def install(self, project):
@@ -191,11 +201,12 @@ class RepositoryApp(Application):
             M.ACE.allow(role_developer, 'moderate'),
             M.ACE.allow(role_admin, 'configure'),
             M.ACE.allow(role_admin, 'admin'),
-            ]
+        ]
 
     def uninstall(self, project):
         allura.tasks.repo_tasks.uninstall.post()
 
+
 class RepoAdminController(DefaultAdminController):
 
     def __init__(self, app):
@@ -224,7 +235,8 @@ class RepoAdminController(DefaultAdminController):
     @expose()
     @require_post()
     def set_extensions(self, **post_data):
-        self.repo.additional_viewable_extensions = post_data['additional_viewable_extensions']
+        self.repo.additional_viewable_extensions = post_data[
+            'additional_viewable_extensions']
 
     @without_trailing_slash
     @expose('jinja:allura:templates/repo/default_branch.html')
@@ -235,4 +247,3 @@ class RepoAdminController(DefaultAdminController):
         else:
             return dict(app=self.app,
                         default_branch_name=self.app.default_branch_name)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/rest_api.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/rest_api.py b/Allura/allura/lib/rest_api.py
index f0d74f8..dcc0d7d 100644
--- a/Allura/allura/lib/rest_api.py
+++ b/Allura/allura/lib/rest_api.py
@@ -30,6 +30,7 @@ from formencode import variabledecode
 
 log = logging.getLogger(__name__)
 
+
 class RestClient(object):
 
     def __init__(self, api_key, secret_key, base_uri,
@@ -49,13 +50,18 @@ class RestClient(object):
             self._opener = urllib2.build_opener(redirect_handler)
 
     def sign_request(self, path, params):
-        if hasattr(params, 'items'): params = params.items()
+        if hasattr(params, 'items'):
+            params = params.items()
         has_api_key = has_api_timestamp = has_api_signature = False
-        for k,v in params:
-            if k == 'api_key': has_api_key = True
-            if k == 'api_timestamp': has_api_timestamp = True
-            if k == 'api_signature': has_api_signature = True
-        if not has_api_key: params.append(('api_key', self._api_key))
+        for k, v in params:
+            if k == 'api_key':
+                has_api_key = True
+            if k == 'api_timestamp':
+                has_api_timestamp = True
+            if k == 'api_signature':
+                has_api_signature = True
+        if not has_api_key:
+            params.append(('api_key', self._api_key))
         if not has_api_timestamp:
             params.append(('api_timestamp', datetime.utcnow().isoformat()))
         if not has_api_signature:
@@ -71,15 +77,17 @@ class RestClient(object):
 
     def _redirect_handler_class(self):
         client = self
+
         class RedirectHandler(urllib2.HTTPRedirectHandler):
+
             def redirect_request(self, req, fp, code, msg, headers, newurl):
                 m = req.get_method()
                 if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
-                    or code in (301, 302, 303) and m == "POST"):
+                        or code in (301, 302, 303) and m == "POST"):
                         newurl = newurl.replace(' ', '%20')
-                        newheaders = dict((k,v) for k,v in req.headers.items()
+                        newheaders = dict((k, v) for k, v in req.headers.items()
                                           if k.lower() not in ("content-length", "content-type")
-                                         )
+                                          )
                         result = urlparse(newurl)
                         log.debug('Redirect to %s' % result.path)
                         return client.Request(
@@ -88,39 +96,51 @@ class RestClient(object):
                             origin_req_host=req.get_origin_req_host(),
                             unverifiable=True)
                 else:
-                        raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
+                        raise urllib2.HTTPError(
+                            req.get_full_url(), code, msg, headers, fp)
         return RedirectHandler
 
     def _request_class(self):
         client = self
+
         class Request(urllib2.Request):
+
             def __init__(self, method, path, params=None, **kwargs):
-                if params is None: params = {}
-                params = variabledecode.variable_encode(params, add_repetitions=False)
+                if params is None:
+                    params = {}
+                params = variabledecode.variable_encode(
+                    params, add_repetitions=False)
                 params = client.sign_request(path, params)
                 self._method = method.upper()
                 if self._method == 'GET':
-                    url = urljoin(client.base_uri, path) + '?' + urlencode(params)
-                    data=None
+                    url = urljoin(client.base_uri, path) + \
+                        '?' + urlencode(params)
+                    data = None
                 else:
                     url = urljoin(client.base_uri, path)
-                    data=urlencode(params)
+                    data = urlencode(params)
                 urllib2.Request.__init__(self, url, data=data, **kwargs)
+
             def get_method(self):
                 return self._method
         return Request
 
+
 def generate_smart_str(params):
-    if isinstance(params, dict): iterparams = params.iteritems()
-    else: iterparams = iter(params)
+    if isinstance(params, dict):
+        iterparams = params.iteritems()
+    else:
+        iterparams = iter(params)
     for key, value in iterparams:
-        if value is None: continue
+        if value is None:
+            continue
         if isinstance(value, (list, tuple)):
             for item in value:
                 yield smart_str(key), smart_str(item)
         else:
             yield smart_str(key), smart_str(value)
 
+
 def urlencode(params):
     """
     A version of Python's urllib.urlencode() function that can operate on

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/search.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/search.py b/Allura/allura/lib/search.py
index d43af9c..419a473 100644
--- a/Allura/allura/lib/search.py
+++ b/Allura/allura/lib/search.py
@@ -34,10 +34,13 @@ from .markdown_extensions import ForgeExtension
 
 log = getLogger(__name__)
 
+
 def solarize(obj):
-    if obj is None: return None
+    if obj is None:
+        return None
     doc = obj.index()
-    if doc is None: return None
+    if doc is None:
+        return None
     # if index() returned doc without text, assume empty text
     if not doc.get('text'):
         doc['text'] = ''
@@ -48,16 +51,19 @@ def solarize(obj):
     doc['text'] = jinja2.Markup.escape(text).striptags()
     return doc
 
+
 class SearchError(SolrError):
     pass
 
+
 def inject_user(q, user=None):
     '''Replace $USER with current user's name.'''
     if user is None:
         user = c.user
     return q.replace('$USER', '"%s"' % user.username) if q else q
 
-def search(q,short_timeout=False,ignore_errors=True,**kw):
+
+def search(q, short_timeout=False, ignore_errors=True, **kw):
     q = inject_user(q)
     try:
         if short_timeout:
@@ -68,7 +74,9 @@ def search(q,short_timeout=False,ignore_errors=True,**kw):
         log.exception('Error in solr search')
         if not ignore_errors:
             match = re.search(r'<pre>(.*)</pre>', str(e))
-            raise SearchError('Error running search query: %s' % (match.group(1) if match else e))
+            raise SearchError('Error running search query: %s' %
+                              (match.group(1) if match else e))
+
 
 def search_artifact(atype, q, history=False, rows=10, short_timeout=False, **kw):
     """Performs SOLR search.
@@ -77,14 +85,15 @@ def search_artifact(atype, q, history=False, rows=10, short_timeout=False, **kw)
     """
     # first, grab an artifact and get the fields that it indexes
     a = atype.query.find().first()
-    if a is None: return # if there are no instance of atype, we won't find anything
+    if a is None:
+        return  # if there are no instance of atype, we won't find anything
     fields = a.index()
     # Now, we'll translate all the fld:
     q = atype.translate_query(q, fields)
     fq = [
         'type_s:%s' % fields['type_s'],
         'project_id_s:%s' % c.project._id,
-        'mount_point_s:%s' % c.app.config.options.mount_point ]
+        'mount_point_s:%s' % c.app.config.options.mount_point]
     if not history:
         fq.append('is_history_b:False')
     return search(q, fq=fq, rows=rows, short_timeout=short_timeout, ignore_errors=False, **kw)
@@ -97,8 +106,10 @@ def search_app(q='', fq=None, app=True, **kw):
     """
     history = kw.pop('history', None)
     if app and kw.pop('project', False):
-        # Used from app's search controller. If `project` is True, redirect to 'entire project search' page
-        redirect(c.project.url() + 'search/?' + urlencode(dict(q=q, history=history)))
+        # Used from app's search controller. If `project` is True, redirect to
+        # 'entire project search' page
+        redirect(c.project.url() + 'search/?' +
+                 urlencode(dict(q=q, history=history)))
     search_comments = kw.pop('search_comments', None)
     limit = kw.pop('limit', None)
     page = kw.pop('page', 0)
@@ -122,10 +133,11 @@ def search_app(q='', fq=None, app=True, **kw):
             allowed_types += ['Post']
         if app:
             fq = [
-                'project_id_s:%s'  % c.project._id,
+                'project_id_s:%s' % c.project._id,
                 'mount_point_s:%s' % c.app.config.options.mount_point,
                 '-deleted_b:true',
-                'type_s:(%s)' % ' OR '.join(['"%s"' % t for t in allowed_types])
+                'type_s:(%s)' % ' OR '.join(
+                    ['"%s"' % t for t in allowed_types])
             ] + fq
         search_params = {
             'qt': 'dismax',
@@ -138,7 +150,7 @@ def search_app(q='', fq=None, app=True, **kw):
             'sort': sort,
         }
         if not history:
-           search_params['fq'].append('is_history_b:False')
+            search_params['fq'].append('is_history_b:False')
         if parser == 'standard':
             search_params.pop('qt', None)
             search_params.pop('qf', None)
@@ -152,11 +164,14 @@ def search_app(q='', fq=None, app=True, **kw):
         if results:
             count = results.hits
             matches = results.highlighting
+
             def historize_urls(doc):
                 if doc.get('type_s', '').endswith(' Snapshot'):
                     if doc.get('url_s'):
-                        doc['url_s'] = doc['url_s'] + '?version=%s' % doc.get('version_i')
+                        doc['url_s'] = doc['url_s'] + \
+                            '?version=%s' % doc.get('version_i')
                 return doc
+
             def add_matches(doc):
                 m = matches.get(doc['id'], {})
                 title = h.get_first(m, 'title')
@@ -172,6 +187,7 @@ def search_app(q='', fq=None, app=True, **kw):
                 doc['title_match'] = title
                 doc['text_match'] = text or h.get_first(doc, 'text')
                 return doc
+
             def paginate_comment_urls(doc):
                 if doc.get('type_s', '') == 'Post':
                     aref = ArtifactReference.query.get(_id=doc.get('id'))
@@ -211,4 +227,4 @@ def find_shortlinks(text):
         output_format='html4')
     md.convert(text)
     link_index = md.treeprocessors['links'].alinks
-    return [ link for link in link_index if link is not None]
+    return [link for link in link_index if link is not None]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/security.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/security.py b/Allura/allura/lib/security.py
index 49d6415..a0d6c1e 100644
--- a/Allura/allura/lib/security.py
+++ b/Allura/allura/lib/security.py
@@ -31,7 +31,9 @@ from allura.lib.utils import TruthyCallable
 
 log = logging.getLogger(__name__)
 
+
 class Credentials(object):
+
     '''
     Role graph logic & caching
     '''
@@ -58,7 +60,8 @@ class Credentials(object):
 
     def clear_user(self, user_id, project_id=None):
         if project_id == '*':
-            to_remove = [(uid, pid) for uid, pid in self.users if uid == user_id]
+            to_remove = [(uid, pid)
+                         for uid, pid in self.users if uid == user_id]
         else:
             to_remove = [(user_id, project_id)]
         for uid, pid in to_remove:
@@ -68,8 +71,10 @@ class Credentials(object):
     def load_user_roles(self, user_id, *project_ids):
         '''Load the credentials with all user roles for a set of projects'''
         # Don't reload roles
-        project_ids = [ pid for pid in project_ids if self.users.get((user_id, pid)) is None ]
-        if not project_ids: return
+        project_ids = [
+            pid for pid in project_ids if self.users.get((user_id, pid)) is None]
+        if not project_ids:
+            return
         if user_id is None:
             q = self.project_role.find({
                 'user_id': None,
@@ -94,8 +99,10 @@ class Credentials(object):
     def load_project_roles(self, *project_ids):
         '''Load the credentials with all user roles for a set of projects'''
         # Don't reload roles
-        project_ids = [ pid for pid in project_ids if self.projects.get(pid) is None ]
-        if not project_ids: return
+        project_ids = [
+            pid for pid in project_ids if self.projects.get(pid) is None]
+        if not project_ids:
+            return
         q = self.project_role.find({
             'project_id': {'$in': project_ids}})
         roles_by_project = dict((pid, []) for pid in project_ids)
@@ -134,17 +141,18 @@ class Credentials(object):
 
     def user_has_any_role(self, user_id, project_id, role_ids):
         user_roles = self.user_roles(user_id=user_id, project_id=project_id)
-        return bool(set(role_ids)  & user_roles.reaching_ids_set)
+        return bool(set(role_ids) & user_roles.reaching_ids_set)
 
     def users_with_named_role(self, project_id, name):
         """ returns in sorted order """
         roles = self.project_roles(project_id)
-        return sorted(RoleCache(self, roles.find(name=name)).users_that_reach, key=lambda u:u.username)
+        return sorted(RoleCache(self, roles.find(name=name)).users_that_reach, key=lambda u: u.username)
 
     def userids_with_named_role(self, project_id, name):
         roles = self.project_roles(project_id)
         return RoleCache(self, roles.find(name=name)).userids_that_reach
 
+
 class RoleCache(object):
 
     def __init__(self, cred, q):
@@ -153,19 +161,23 @@ class RoleCache(object):
 
     def find(self, **kw):
         tests = kw.items()
+
         def _iter():
             for r in self:
-                for k,v in tests:
+                for k, v in tests:
                     val = r.get(k)
                     if callable(v):
-                        if not v(val): break
-                    elif v != val: break
+                        if not v(val):
+                            break
+                    elif v != val:
+                        break
                 else:
                     yield r
         return RoleCache(self.cred, _iter())
 
     def get(self, **kw):
-        for x in self.find(**kw): return x
+        for x in self.find(**kw):
+            return x
         return None
 
     def __iter__(self):
@@ -199,10 +211,12 @@ class RoleCache(object):
             to_visit = list(self)
             while to_visit:
                 r = to_visit.pop(0)
-                if r['_id'] in visited: continue
+                if r['_id'] in visited:
+                    continue
                 visited.add(r['_id'])
                 yield r
-                pr_rindex = self.cred.project_roles(r['project_id']).reverse_index
+                pr_rindex = self.cred.project_roles(
+                    r['project_id']).reverse_index
                 to_visit += pr_rindex[r['_id']]
         return RoleCache(self.cred, _iter())
 
@@ -214,7 +228,7 @@ class RoleCache(object):
 
     @LazyProperty
     def userids_that_reach(self):
-        return [ r['user_id'] for r in self.roles_that_reach ]
+        return [r['user_id'] for r in self.roles_that_reach]
 
     @LazyProperty
     def reaching_roles(self):
@@ -223,7 +237,8 @@ class RoleCache(object):
             visited = set()
             while to_visit:
                 (rid, role) = to_visit.pop()
-                if rid in visited: continue
+                if rid in visited:
+                    continue
                 yield role
                 pr_index = self.cred.project_roles(role['project_id']).index
                 if rid in pr_index:
@@ -234,12 +249,13 @@ class RoleCache(object):
 
     @LazyProperty
     def reaching_ids(self):
-        return [ r['_id'] for r in self.reaching_roles ]
+        return [r['_id'] for r in self.reaching_roles]
 
     @LazyProperty
     def reaching_ids_set(self):
         return set(self.reaching_ids)
 
+
 def has_access(obj, permission, user=None, project=None):
     '''Return whether the given user has the permission name on the given object.
 
@@ -280,11 +296,13 @@ def has_access(obj, permission, user=None, project=None):
       3. Otherwise, DENY access to the resource.
     '''
     from allura import model as M
+
     def predicate(obj=obj, user=user, project=project, roles=None):
         if obj is None:
             return False
         if roles is None:
-            if user is None: user = c.user
+            if user is None:
+                user = c.user
             assert user, 'c.user should always be at least M.User.anonymous()'
             cred = Credentials.get()
             if project is None:
@@ -298,12 +316,13 @@ def has_access(obj, permission, user=None, project=None):
                 else:
                     project = getattr(obj, 'project', None) or c.project
                     project = project.root_project
-            roles = cred.user_roles(user_id=user._id, project_id=project._id).reaching_ids
+            roles = cred.user_roles(
+                user_id=user._id, project_id=project._id).reaching_ids
 
         # TODO: move deny logic into loop below; see ticket [#6715]
         if user != M.User.anonymous():
             user_roles = Credentials.get().user_roles(user_id=user._id,
-                    project_id=project.root_project._id)
+                                                      project_id=project.root_project._id)
             for r in user_roles:
                 deny_user = M.ACE.deny(r['_id'], permission)
                 if M.ACL.contains(deny_user, obj.acl):
@@ -337,6 +356,7 @@ def has_access(obj, permission, user=None, project=None):
         return result
     return TruthyCallable(predicate)
 
+
 def all_allowed(obj, user_or_role=None, project=None):
     '''
     List all the permission names that a given user or named role
@@ -380,7 +400,8 @@ def all_allowed(obj, user_or_role=None, project=None):
         roles += [anon]  # auth inherits from anon
     else:
         roles += [auth, anon]  # named group or user inherits from auth + anon
-    role_ids = RoleCache(Credentials.get(), roles).reaching_ids  # match rules applicable to us
+    # match rules applicable to us
+    role_ids = RoleCache(Credentials.get(), roles).reaching_ids
     perms = set()
     denied = defaultdict(set)
     while obj:  # traverse parent contexts
@@ -395,13 +416,15 @@ def all_allowed(obj, user_or_role=None, project=None):
                     else:
                         # explicit DENY overrides any ALLOW for this permission
                         # for this role_id in this ACL or parent(s) (but an ALLOW
-                        # for a different role could still grant this permission)
+                        # for a different role could still grant this
+                        # permission)
                         denied[role_id].add(ace.permission)
         obj = obj.parent_security_context()
     if M.ALL_PERMISSIONS in perms:
         return set([M.ALL_PERMISSIONS])
     return perms
 
+
 def require(predicate, message=None):
     '''
     Example: require(has_access(c.app, 'read'))
@@ -412,7 +435,8 @@ def require(predicate, message=None):
     '''
 
     from allura import model as M
-    if predicate(): return
+    if predicate():
+        return
     if not message:
         message = """You don't have permission to do that.
                      You must ask a project administrator for rights to perform this task.
@@ -423,12 +447,15 @@ def require(predicate, message=None):
     else:
         raise exc.HTTPUnauthorized()
 
+
 def require_access(obj, permission, **kwargs):
     if obj is not None:
         predicate = has_access(obj, permission, **kwargs)
         return require(predicate, message='%s access required' % permission.capitalize())
     else:
-        raise exc.HTTPForbidden(detail="Could not verify permissions for this page.")
+        raise exc.HTTPForbidden(
+            detail="Could not verify permissions for this page.")
+
 
 def require_authenticated():
     '''
@@ -438,12 +465,15 @@ def require_authenticated():
     if c.user == M.User.anonymous():
         raise exc.HTTPUnauthorized()
 
+
 def simple_grant(acl, role_id, permission):
     from allura.model.types import ACE
     for ace in acl:
-        if ace.role_id == role_id and ace.permission == permission: return
+        if ace.role_id == role_id and ace.permission == permission:
+            return
     acl.append(ACE.allow(role_id, permission))
 
+
 def simple_revoke(acl, role_id, permission):
     remove = []
     for i, ace in enumerate(acl):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/solr.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/solr.py b/Allura/allura/lib/solr.py
index 4fa0c77..22adbe6 100644
--- a/Allura/allura/lib/solr.py
+++ b/Allura/allura/lib/solr.py
@@ -37,6 +37,7 @@ def make_solr_from_config(push_servers, query_server=None, **kwargs):
 
 
 class Solr(object):
+
     """Solr interface that pushes updates to multiple solr instances.
 
     `push_servers`: list of servers to push to.
@@ -88,6 +89,7 @@ class Solr(object):
 class MockSOLR(object):
 
     class MockHits(list):
+
         @property
         def hits(self):
             return len(self)
@@ -113,7 +115,8 @@ class MockSOLR(object):
         # Parse query
         preds = []
         q_parts = shlex.split(q)
-        if fq: q_parts += fq
+        if fq:
+            q_parts += fq
         for part in q_parts:
             if part == '&&':
                 continue

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/spam/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/spam/__init__.py b/Allura/allura/lib/spam/__init__.py
index cfb5c41..4bf3917 100644
--- a/Allura/allura/lib/spam/__init__.py
+++ b/Allura/allura/lib/spam/__init__.py
@@ -23,7 +23,9 @@ log = logging.getLogger(__name__)
 
 
 class SpamFilter(object):
+
     """Defines the spam checker interface and provides a default no-op impl."""
+
     def __init__(self, config):
         pass
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/spam/akismetfilter.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/spam/akismetfilter.py b/Allura/allura/lib/spam/akismetfilter.py
index 7020632..48f24cc 100644
--- a/Allura/allura/lib/spam/akismetfilter.py
+++ b/Allura/allura/lib/spam/akismetfilter.py
@@ -34,6 +34,7 @@ log = logging.getLogger(__name__)
 
 
 class AkismetSpamFilter(SpamFilter):
+
     """Spam checking implementation via Akismet service.
 
     To enable Akismet spam filtering in your Allura instance, first
@@ -47,10 +48,12 @@ class AkismetSpamFilter(SpamFilter):
         spam.method = akismet
         spam.key = <your Akismet key here>
     """
+
     def __init__(self, config):
         if not AKISMET_AVAILABLE:
             raise ImportError('akismet not available')
-        self.service = akismet.Akismet(config.get('spam.key'), config.get('base_url'))
+        self.service = akismet.Akismet(
+            config.get('spam.key'), config.get('base_url'))
         self.service.verify_key()
 
     def get_data(self, text, artifact=None, user=None, content_type='comment', **kw):
@@ -62,7 +65,8 @@ class AkismetSpamFilter(SpamFilter):
         user = user or c.user
         if user:
             kw['comment_author'] = user.display_name or user.username
-            kw['comment_author_email'] = user.email_addresses[0] if user.email_addresses else ''
+            kw['comment_author_email'] = user.email_addresses[
+                0] if user.email_addresses else ''
         user_ip = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
         kw['user_ip'] = user_ip.split(',')[0].strip()
         kw['user_agent'] = request.headers.get('USER_AGENT')
@@ -93,8 +97,8 @@ class AkismetSpamFilter(SpamFilter):
 
     def submit_ham(self, text, artifact=None, user=None, content_type='comment'):
         self.service.submit_ham(text,
-                                 data=self.get_data(text=text,
-                                                    artifact=artifact,
-                                                    user=user,
-                                                    content_type=content_type),
-                                 build_data=False)
+                                data=self.get_data(text=text,
+                                                   artifact=artifact,
+                                                   user=user,
+                                                   content_type=content_type),
+                                build_data=False)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/spam/mollomfilter.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/spam/mollomfilter.py b/Allura/allura/lib/spam/mollomfilter.py
index b00f978..d76ff59 100644
--- a/Allura/allura/lib/spam/mollomfilter.py
+++ b/Allura/allura/lib/spam/mollomfilter.py
@@ -34,6 +34,7 @@ log = logging.getLogger(__name__)
 
 
 class MollomSpamFilter(SpamFilter):
+
     """Spam checking implementation via Mollom service.
 
     To enable Mollom spam filtering in your Allura instance, first
@@ -48,6 +49,7 @@ class MollomSpamFilter(SpamFilter):
         spam.public_key = <your Mollom public key here>
         spam.private_key = <your Mollom private key here>
     """
+
     def __init__(self, config):
         if not MOLLOM_AVAILABLE:
             raise ImportError('Mollom not available')
@@ -71,7 +73,8 @@ class MollomSpamFilter(SpamFilter):
         user = user or c.user
         if user:
             kw['authorName'] = user.display_name or user.username
-            kw['authorMail'] = user.email_addresses[0] if user.email_addresses else ''
+            kw['authorMail'] = user.email_addresses[
+                0] if user.email_addresses else ''
         user_ip = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
         kw['authorIP'] = user_ip.split(',')[0].strip()
         # kw will be urlencoded, need to utf8-encode
@@ -79,7 +82,7 @@ class MollomSpamFilter(SpamFilter):
             kw[k] = h.really_unicode(v).encode('utf8')
         cc = self.service.checkContent(**kw)
         res = cc['spam'] == 2
-        artifact.spam_check_id = cc.get('session_id','')
+        artifact.spam_check_id = cc.get('session_id', '')
         log.info("spam=%s (mollom): %s" % (str(res), log_msg))
         return res
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/stats.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/stats.py b/Allura/allura/lib/stats.py
index ef6dc01..9864e30 100644
--- a/Allura/allura/lib/stats.py
+++ b/Allura/allura/lib/stats.py
@@ -20,6 +20,7 @@ from time import time
 from contextlib import contextmanager
 from pylons import request
 
+
 class StatsRecord(object):
 
     def __init__(self, request, active):
@@ -34,8 +35,8 @@ class StatsRecord(object):
 
     def __repr__(self):
         stats = ' '.join(
-            ('%s=%.0fms' % (k,v*1000))
-            for k,v in sorted(self.timers.iteritems()))
+            ('%s=%.0fms' % (k, v * 1000))
+            for k, v in sorted(self.timers.iteritems()))
         return '%s: %s' % (self.url, stats)
 
     def asdict(self):
@@ -53,12 +54,14 @@ class StatsRecord(object):
                 yield
             finally:
                 end = time()
-                self.timers[name] += end-begin
+                self.timers[name] += end - begin
                 self._now_timing.remove(name)
         else:
             yield
 
+
 class timing(object):
+
     '''Decorator to time a method call'''
 
     def __init__(self, timer):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/utils.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/utils.py b/Allura/allura/lib/utils.py
index d35b17e..95622c9 100644
--- a/Allura/allura/lib/utils.py
+++ b/Allura/allura/lib/utils.py
@@ -74,6 +74,7 @@ def guess_mime_type(filename):
 
 
 class ConfigProxy(object):
+
     '''Wrapper for loading config values at module-scope so we don't
     have problems when a module is imported before tg.config is initialized
     '''
@@ -92,6 +93,7 @@ class ConfigProxy(object):
 
 
 class lazy_logger(object):
+
     '''Lazy instatiation of a logger, to ensure that it does not get
     created before logging is configured (which would make it disabled)'''
 
@@ -103,15 +105,18 @@ class lazy_logger(object):
         return logging.getLogger(self._name)
 
     def __getattr__(self, name):
-        if name.startswith('_'): raise AttributeError, name
+        if name.startswith('_'):
+            raise AttributeError, name
         return getattr(self._logger, name)
 
+
 class TimedRotatingHandler(logging.handlers.BaseRotatingHandler):
 
     def __init__(self, strftime_pattern):
         self.pattern = strftime_pattern
         self.last_filename = self.current_filename()
-        logging.handlers.BaseRotatingHandler.__init__(self, self.last_filename, 'a')
+        logging.handlers.BaseRotatingHandler.__init__(
+            self, self.last_filename, 'a')
 
     def current_filename(self):
         return os.path.abspath(datetime.datetime.utcnow().strftime(self.pattern))
@@ -128,9 +133,11 @@ class TimedRotatingHandler(logging.handlers.BaseRotatingHandler):
         else:
             self.stream = open(self.baseFilename, 'w')
 
+
 class StatsHandler(TimedRotatingHandler):
-    fields=('action', 'action_type', 'tool_type', 'tool_mount', 'project', 'neighborhood',
-            'username', 'url', 'ip_address')
+    fields = (
+        'action', 'action_type', 'tool_type', 'tool_mount', 'project', 'neighborhood',
+        'username', 'url', 'ip_address')
 
     def __init__(self,
                  strftime_pattern,
@@ -151,13 +158,14 @@ class StatsHandler(TimedRotatingHandler):
             kwpairs[name] = getattr(record, name, None)
         kwpairs.update(getattr(record, 'kwpairs', {}))
         record.kwpairs = ','.join(
-            '%s=%s' % (k,v) for k,v in sorted(kwpairs.iteritems())
+            '%s=%s' % (k, v) for k, v in sorted(kwpairs.iteritems())
             if v is not None)
-        record.exc_info = None # Never put tracebacks in the rtstats log
+        record.exc_info = None  # Never put tracebacks in the rtstats log
         TimedRotatingHandler.emit(self, record)
 
 
 class CustomWatchedFileHandler(logging.handlers.WatchedFileHandler):
+
     """Custom log handler for Allura"""
 
     def format(self, record):
@@ -179,7 +187,8 @@ def chunked_find(cls, query=None, pagesize=1024, sort_key='_id', sort_dir=1):
     Pass an indexed sort_key for efficient queries.  Default _id should work
     in most cases.
     '''
-    if query is None: query = {}
+    if query is None:
+        query = {}
     page = 0
     max_id = None
     while True:
@@ -200,6 +209,7 @@ def chunked_find(cls, query=None, pagesize=1024, sort_key='_id', sort_dir=1):
         yield results
         page += 1
 
+
 def lsub_utf8(s, n):
     '''Useful for returning n bytes of a UTF-8 string, rather than characters'''
     while len(s) > n:
@@ -209,22 +219,26 @@ def lsub_utf8(s, n):
         return s[:k]
     return s
 
+
 def chunked_list(l, n):
     """ Yield successive n-sized chunks from l.
     """
     for i in xrange(0, len(l), n):
-        yield l[i:i+n]
+        yield l[i:i + n]
+
 
 def chunked_iter(iterable, max_size):
     '''return iterable 'chunks' from the iterable of max size max_size'''
     eiter = enumerate(iterable)
-    keyfunc = lambda (i,x): i//max_size
+    keyfunc = lambda (i, x): i // max_size
     for _, chunk in groupby(eiter, keyfunc):
-        yield (x for i,x in chunk)
+        yield (x for i, x in chunk)
+
 
 class AntiSpam(object):
+
     '''Helper class for bot-protecting forms'''
-    honey_field_template=string.Template('''<p class="$honey_class">
+    honey_field_template = string.Template('''<p class="$honey_class">
     <label for="$fld_id">You seem to have CSS turned off.
         Please don't fill out this field.</label><br>
     <input id="$fld_id" name="$fld_name" type="text"><br></p>''')
@@ -244,7 +258,7 @@ class AntiSpam(object):
             self.timestamp = int(self.timestamp_text)
             self.spinner = self._unwrap(self.spinner_text)
         self.spinner_ord = map(ord, self.spinner)
-        self.random_padding = [ random.randint(0,255) for x in self.spinner ]
+        self.random_padding = [random.randint(0, 255) for x in self.spinner]
         self.honey_class = self.enc(self.spinner_text, css_safe=True)
 
         # The counter is to ensure that multiple forms in the same page
@@ -289,10 +303,10 @@ class AntiSpam(object):
         '''
         # Plain starts with its length, includes the ordinals for its
         #   characters, and is padded with random data
-        plain = ([ len(plain) ]
+        plain = ([len(plain)]
                  + map(ord, plain)
                  + self.random_padding[:len(self.spinner_ord) - len(plain) - 1])
-        enc = ''.join(chr(p^s) for p, s in zip(plain, self.spinner_ord))
+        enc = ''.join(chr(p ^ s) for p, s in zip(plain, self.spinner_ord))
         enc = self._wrap(enc)
         if css_safe:
             enc = ''.join(ch for ch in enc if ch.isalpha())
@@ -301,8 +315,8 @@ class AntiSpam(object):
     def dec(self, enc):
         enc = self._unwrap(enc)
         enc = list(map(ord, enc))
-        plain = [e^s for e,s in zip(enc, self.spinner_ord)]
-        plain = plain[1:1+plain[0]]
+        plain = [e ^ s for e, s in zip(enc, self.spinner_ord)]
+        plain = plain[1:1 + plain[0]]
         plain = ''.join(map(chr, plain))
         return plain
 
@@ -313,15 +327,17 @@ class AntiSpam(object):
             fld_name = self.enc('honey%d' % (fldno))
             fld_id = self.enc('honey%d%d' % (self.counter, fldno))
             yield literal(self.honey_field_template.substitute(
-                    honey_class=self.honey_class,
-                    fld_id=fld_id,
-                    fld_name=fld_name))
+                honey_class=self.honey_class,
+                fld_id=fld_id,
+                fld_name=fld_name))
         self.counter += 1
 
     def make_spinner(self, timestamp=None):
-        if timestamp is None: timestamp = self.timestamp
+        if timestamp is None:
+            timestamp = self.timestamp
         try:
-            client_ip = self.request.headers.get('X_FORWARDED_FOR', self.request.remote_addr)
+            client_ip = self.request.headers.get(
+                'X_FORWARDED_FOR', self.request.remote_addr)
             client_ip = client_ip.split(',')[0].strip()
         except (TypeError, AttributeError), err:
             client_ip = '127.0.0.1'
@@ -331,17 +347,20 @@ class AntiSpam(object):
 
     @classmethod
     def validate_request(cls, request=None, now=None, params=None):
-        if request is None: request = pylons.request
-        if params is None: params = request.params
+        if request is None:
+            request = pylons.request
+        if params is None:
+            params = request.params
         new_params = dict(params)
         if not request.method == 'GET':
             new_params.pop('timestamp', None)
             new_params.pop('spinner', None)
             obj = cls(request)
-            if now is None: now = time.time()
+            if now is None:
+                now = time.time()
             if obj.timestamp > now + 5:
                 raise ValueError, 'Post from the future'
-            if now - obj.timestamp > 24*60*60:
+            if now - obj.timestamp > 24 * 60 * 60:
                 raise ValueError, 'Post from the distant past'
             if obj.spinner != obj.make_spinner(obj.timestamp):
                 raise ValueError, 'Bad spinner value'
@@ -365,21 +384,27 @@ class AntiSpam(object):
                 raise Invalid(error_msg, params, None)
         return before_validate(antispam_hook)
 
+
 class TruthyCallable(object):
+
     '''
     Wraps a callable to make it truthy in a boolean context.
 
     Assumes the callable returns a truthy value and can be called with no args.
     '''
+
     def __init__(self, callable):
         self.callable = callable
+
     def __call__(self, *args, **kw):
         return self.callable(*args, **kw)
+
     def __nonzero__(self):
         return self.callable()
 
 
 class TransformedDict(collections.MutableMapping):
+
     """
     A dictionary which applies an arbitrary
     key-altering function before accessing the keys.
@@ -389,7 +414,7 @@ class TransformedDict(collections.MutableMapping):
 
     def __init__(self, *args, **kwargs):
         self.store = dict()
-        self.update(dict(*args, **kwargs)) # use the free update to set keys
+        self.update(dict(*args, **kwargs))  # use the free update to set keys
 
     def __getitem__(self, key):
         return self.store[self.__keytransform__(key)]
@@ -416,17 +441,20 @@ class CaseInsensitiveDict(TransformedDict):
         return key.lower()
 
 
-def postmortem_hook(etype, value, tb): # pragma no cover
-    import sys, pdb, traceback
+def postmortem_hook(etype, value, tb):  # pragma no cover
+    import sys
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)
@@ -435,7 +463,9 @@ def postmortem_hook(etype, value, tb): # pragma no cover
         traceback.print_exception(etype, value, tb)
         pdb.post_mortem(tb)
 
+
 class LineAnchorCodeHtmlFormatter(HtmlFormatter):
+
     def _wrap_pre(self, inner):
         style = []
         if self.prestyles:
@@ -451,6 +481,7 @@ class LineAnchorCodeHtmlFormatter(HtmlFormatter):
             num += 1
         yield 0, '</pre>'
 
+
 def generate_code_stats(blob):
     stats = {'line_count': 0,
              'code_size': 0,
@@ -486,7 +517,8 @@ def serve_file(fp, filename, content_type, last_modified=None, cache_expires=Non
         etag_cache(etag)
     pylons.response.headers['Content-Type'] = ''
     pylons.response.content_type = content_type.encode('utf-8')
-    pylons.response.cache_expires = cache_expires or asint(tg.config.get('files_expires_header_secs', 60 * 60))
+    pylons.response.cache_expires = cache_expires or asint(
+        tg.config.get('files_expires_header_secs', 60 * 60))
     pylons.response.last_modified = last_modified
     if size:
         pylons.response.content_length = size

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/validators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/validators.py b/Allura/allura/lib/validators.py
index 3556fe0..1c0acfd 100644
--- a/Allura/allura/lib/validators.py
+++ b/Allura/allura/lib/validators.py
@@ -23,6 +23,7 @@ from pylons import tmpl_context as c
 from . import helpers as h
 from datetime import datetime
 
+
 class Ming(fev.FancyValidator):
 
     def __init__(self, cls, **kw):
@@ -41,15 +42,18 @@ class Ming(fev.FancyValidator):
     def _from_python(self, value, state):
         return value._id
 
+
 class UniqueOAuthApplicationName(fev.UnicodeString):
 
     def _to_python(self, value, state):
         from allura import model as M
         app = M.OAuthConsumerToken.query.get(name=value)
         if app is not None:
-            raise fe.Invalid('That name is already taken, please choose another', value, state)
+            raise fe.Invalid(
+                'That name is already taken, please choose another', value, state)
         return value
 
+
 class NullValidator(fev.Validator):
 
     def to_python(self, value, state):
@@ -61,21 +65,25 @@ class NullValidator(fev.Validator):
     def validate(self, value, state):
         return value
 
+
 class MaxBytesValidator(fev.FancyValidator):
-    max=255
+    max = 255
 
     def _to_python(self, value, state):
         value = h.really_unicode(value or '').encode('utf-8')
         if len(value) > self.max:
-            raise fe.Invalid("Please enter a value less than %s bytes long." % self.max, value, state)
+            raise fe.Invalid("Please enter a value less than %s bytes long." %
+                             self.max, value, state)
         return value
 
     def from_python(self, value, state):
         return h.really_unicode(value or '')
 
+
 class MountPointValidator(fev.UnicodeString):
+
     def __init__(self, app_class,
-            reserved_mount_points=('feed', 'index', 'icon', '_nav.json'), **kw):
+                 reserved_mount_points=('feed', 'index', 'icon', '_nav.json'), **kw):
         super(self.__class__, self).__init__(**kw)
         self.app_class = app_class
         self.reserved_mount_points = reserved_mount_points
@@ -86,13 +94,14 @@ class MountPointValidator(fev.UnicodeString):
             mount_point = mount_point.lower()
         if not App.validate_mount_point(mount_point):
             raise fe.Invalid('Mount point "%s" is invalid' % mount_point,
-                    value, state)
+                             value, state)
         if mount_point in self.reserved_mount_points:
             raise fe.Invalid('Mount point "%s" is reserved' % mount_point,
-                    value, state)
+                             value, state)
         if c.project and c.project.app_instance(mount_point) is not None:
-            raise fe.Invalid('Mount point "%s" is already in use' % mount_point,
-                    value, state)
+            raise fe.Invalid(
+                'Mount point "%s" is already in use' % mount_point,
+                value, state)
         return mount_point
 
     def empty_value(self, value):
@@ -104,13 +113,15 @@ class MountPointValidator(fev.UnicodeString):
             mount_point = base_mount_point + '-%d' % i
             i += 1
 
+
 class TaskValidator(fev.FancyValidator):
+
     def _to_python(self, value, state):
         try:
             mod, func = value.rsplit('.', 1)
         except ValueError:
             raise fe.Invalid('Invalid task name. Please provide the full '
-                    'dotted path to the python callable.', value, state)
+                             'dotted path to the python callable.', value, state)
         try:
             mod = __import__(mod, fromlist=[str(func)])
         except ImportError:
@@ -119,13 +130,16 @@ class TaskValidator(fev.FancyValidator):
         try:
             task = getattr(mod, func)
         except AttributeError:
-            raise fe.Invalid('Module has no attribute "%s"' % func, value, state)
+            raise fe.Invalid('Module has no attribute "%s"' %
+                             func, value, state)
 
         if not hasattr(task, 'post'):
             raise fe.Invalid('"%s" is not a task.' % value, value, state)
         return task
 
+
 class UserValidator(fev.FancyValidator):
+
     def _to_python(self, value, state):
         from allura import model as M
         user = M.User.by_username(value)
@@ -133,14 +147,16 @@ class UserValidator(fev.FancyValidator):
             raise fe.Invalid('Invalid username', value, state)
         return user
 
+
 class PathValidator(fev.FancyValidator):
+
     def _to_python(self, value, state):
         from allura import model as M
 
         parts = value.strip('/').split('/')
         if len(parts) < 2:
             raise fe.Invalid("You must specify at least a neighborhood and "
-                "project, i.e. '/nbhd/project'", value, state)
+                             "project, i.e. '/nbhd/project'", value, state)
         elif len(parts) == 2:
             nbhd_name, project_name, app_name = parts[0], parts[1], None
         elif len(parts) > 2:
@@ -150,24 +166,31 @@ class PathValidator(fev.FancyValidator):
         nbhd_url_prefix = '/%s/' % nbhd_name
         nbhd = M.Neighborhood.query.get(url_prefix=nbhd_url_prefix)
         if not nbhd:
-            raise fe.Invalid('Invalid neighborhood: %s' % nbhd_url_prefix, value, state)
+            raise fe.Invalid('Invalid neighborhood: %s' %
+                             nbhd_url_prefix, value, state)
 
-        project = M.Project.query.get(shortname=nbhd.shortname_prefix + project_name,
-                neighborhood_id=nbhd._id)
+        project = M.Project.query.get(
+            shortname=nbhd.shortname_prefix + project_name,
+            neighborhood_id=nbhd._id)
         if not project:
-            raise fe.Invalid('Invalid project: %s' % project_name, value, state)
+            raise fe.Invalid('Invalid project: %s' %
+                             project_name, value, state)
 
         path_parts['project'] = project
         if app_name:
             app = project.app_instance(app_name)
             if not app:
-                raise fe.Invalid('Invalid app mount point: %s' % app_name, value, state)
+                raise fe.Invalid('Invalid app mount point: %s' %
+                                 app_name, value, state)
             path_parts['app'] = app
 
         return path_parts
 
+
 class JsonValidator(fev.FancyValidator):
+
     """Validates a string as JSON and returns the original string"""
+
     def _to_python(self, value, state):
         try:
             json.loads(value)
@@ -175,8 +198,11 @@ class JsonValidator(fev.FancyValidator):
             raise fe.Invalid('Invalid JSON: ' + str(e), value, state)
         return value
 
+
 class JsonConverter(fev.FancyValidator):
+
     """Deserializes a string to JSON and returns a Python object"""
+
     def _to_python(self, value, state):
         try:
             obj = json.loads(value)
@@ -184,14 +210,19 @@ class JsonConverter(fev.FancyValidator):
             raise fe.Invalid('Invalid JSON: ' + str(e), value, state)
         return obj
 
+
 class JsonFile(fev.FieldStorageUploadConverter):
+
     """Validates that a file is JSON and returns the deserialized Python object
 
     """
+
     def _to_python(self, value, state):
         return JsonConverter.to_python(value.value)
 
+
 class UserMapJsonFile(JsonFile):
+
     """Validates that a JSON file conforms to this format:
 
     {str:str, ...}
@@ -199,6 +230,7 @@ class UserMapJsonFile(JsonFile):
     and returns a deserialized or stringified copy of it.
 
     """
+
     def __init__(self, as_string=False):
         self.as_string = as_string
 
@@ -211,8 +243,9 @@ class UserMapJsonFile(JsonFile):
             return json.dumps(value) if self.as_string else value
         except:
             raise fe.Invalid(
-                    'User map file must contain mapping of {str:str, ...}',
-                    value, state)
+                'User map file must contain mapping of {str:str, ...}',
+                value, state)
+
 
 class CreateTaskSchema(fe.Schema):
     task = TaskValidator(not_empty=True, strip=True)
@@ -220,7 +253,9 @@ class CreateTaskSchema(fe.Schema):
     user = UserValidator(strip=True, if_missing=None)
     path = PathValidator(strip=True, if_missing={}, if_empty={})
 
+
 class DateValidator(fev.FancyValidator):
+
     def _to_python(self, value, state):
         value = convertDate(value)
         if not value:
@@ -229,7 +264,9 @@ class DateValidator(fev.FancyValidator):
                 value, state)
         return value
 
+
 class TimeValidator(fev.FancyValidator):
+
     def _to_python(self, value, state):
         value = convertTime(value)
         if not value:
@@ -238,8 +275,10 @@ class TimeValidator(fev.FancyValidator):
                 value, state)
         return value
 
+
 class OneOfValidator(fev.FancyValidator):
-    def __init__(self, validvalues, not_empty = True):
+
+    def __init__(self, validvalues, not_empty=True):
         self.validvalues = validvalues
         self.not_empty = not_empty
         super(OneOfValidator, self).__init__()
@@ -257,12 +296,14 @@ class OneOfValidator(fev.FancyValidator):
                     allowed = allowed + ', '
                 allowed = allowed + '"%s"' % v
             raise fe.Invalid(
-                "Invalid value. The allowed values are %s." %allowed,
+                "Invalid value. The allowed values are %s." % allowed,
                 value, state)
         return value
 
+
 class MapValidator(fev.FancyValidator):
-    def __init__(self, mapvalues, not_empty = True):
+
+    def __init__(self, mapvalues, not_empty=True):
         self.map = mapvalues
         self.not_empty = not_empty
         super(MapValidator, self).__init__()
@@ -280,25 +321,27 @@ class MapValidator(fev.FancyValidator):
                 value, state)
         return conv_value
 
+
 def convertDate(datestring):
     formats = ['%Y-%m-%d', '%Y.%m.%d', '%Y/%m/%d', '%Y\%m\%d', '%Y %m %d',
                '%d-%m-%Y', '%d.%m.%Y', '%d/%m/%Y', '%d\%m\%Y', '%d %m %Y']
 
     for f in formats:
         try:
-            date = datetime.strptime(datestring, f)       
+            date = datetime.strptime(datestring, f)
             return date
         except:
             pass
     return None
 
+
 def convertTime(timestring):
     formats = ['%H:%M', '%H.%M', '%H %M', '%H,%M']
 
     for f in formats:
         try:
-            time = datetime.strptime(timestring, f)       
-            return {'h':time.hour, 'm':time.minute}
+            time = datetime.strptime(timestring, f)
+            return {'h': time.hour, 'm': time.minute}
         except:
             pass
     return None

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/analytics.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/analytics.py b/Allura/allura/lib/widgets/analytics.py
index cf31715..c2e85c5 100644
--- a/Allura/allura/lib/widgets/analytics.py
+++ b/Allura/allura/lib/widgets/analytics.py
@@ -17,8 +17,9 @@
 
 import ew
 
+
 class GoogleAnalytics(ew.Widget):
-    template='jinja:allura:templates/widgets/analytics.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/analytics.html'
+    defaults = dict(
         ew.Widget.defaults,
         accounts=['UA-XXXXX-X'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/auth_widgets.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/auth_widgets.py b/Allura/allura/lib/widgets/auth_widgets.py
index 93e8826..f6c1a28 100644
--- a/Allura/allura/lib/widgets/auth_widgets.py
+++ b/Allura/allura/lib/widgets/auth_widgets.py
@@ -28,9 +28,10 @@ from .forms import ForgeForm
 from allura.lib import plugin
 from allura import model as M
 
+
 class LoginForm(ForgeForm):
-    submit_text='Login'
-    style='wide'
+    submit_text = 'Login'
+    style = 'wide'
 
     @property
     def fields(self):
@@ -40,7 +41,8 @@ class LoginForm(ForgeForm):
         ]
         if plugin.AuthenticationProvider.get(request).forgotten_password_process:
             # only show link if auth provider has method of recovering password
-            fields.append(ew.HTMLField(name='link', text='<a href="forgotten_password">Forgot password?</a>'))
+            fields.append(
+                ew.HTMLField(name='link', text='<a href="forgotten_password">Forgot password?</a>'))
         return fields
 
     class hidden_fields(ew_core.NameList):
@@ -49,7 +51,8 @@ class LoginForm(ForgeForm):
     @validator
     def validate(self, value, state=None):
         try:
-            value['username'] = plugin.AuthenticationProvider.get(request).login()
+            value['username'] = plugin.AuthenticationProvider.get(
+                request).login()
         except exc.HTTPUnauthorized:
             msg = 'Invalid login'
             raise Invalid(
@@ -60,8 +63,8 @@ class LoginForm(ForgeForm):
 
 
 class ForgottenPasswordForm(ForgeForm):
-    submit_text='Recover password'
-    style='wide'
+    submit_text = 'Recover password'
+    style = 'wide'
 
     class fields(ew_core.NameList):
         email = ew.TextField(label='Your e-mail')
@@ -73,6 +76,6 @@ class ForgottenPasswordForm(ForgeForm):
         user = M.User.by_email_address(email)
         if user is None or not email_record.confirmed:
             raise Invalid(
-                    'Unable to recover password for this email',
-                    {'email': email}, None)
+                'Unable to recover password for this email',
+                {'email': email}, None)
         return value

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/discuss.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/discuss.py b/Allura/allura/lib/widgets/discuss.py
index 151a0c2..30dc128 100644
--- a/Allura/allura/lib/widgets/discuss.py
+++ b/Allura/allura/lib/widgets/discuss.py
@@ -27,34 +27,44 @@ from allura.lib.widgets import form_fields as ffw
 from allura.lib.widgets import forms as ff
 from allura import model as M
 
+
 class NullValidator(fev.FancyValidator):
-    perform_validation=True
+    perform_validation = True
+
+    def _to_python(self, value, state):
+        return value
 
-    def _to_python(self, value, state): return value
-    def _from_python(self, value, state): return value
+    def _from_python(self, value, state):
+        return value
 
 # Discussion forms
+
+
 class ModerateThread(ff.CsrfForm):
-    defaults=dict(
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text=None)
+
     class buttons(ew_core.NameList):
-        delete=ew.SubmitButton(label='Delete Thread')
+        delete = ew.SubmitButton(label='Delete Thread')
+
 
 class ModeratePost(ew.SimpleForm):
-    template='jinja:allura:templates/widgets/moderate_post.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/moderate_post.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text=None)
 
+
 class FlagPost(ew.SimpleForm):
-    template='jinja:allura:templates/widgets/flag_post.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/flag_post.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text=None)
 
+
 class AttachPost(ff.ForgeForm):
-    defaults=dict(
+    defaults = dict(
         ff.ForgeForm.defaults,
         submit_text='Attach File',
         enctype='multipart/form-data')
@@ -62,17 +72,21 @@ class AttachPost(ff.ForgeForm):
     @property
     def fields(self):
         fields = [
-            ew.InputField(name='file_info', field_type='file', label='New Attachment')
+            ew.InputField(name='file_info', field_type='file',
+                          label='New Attachment')
         ]
         return fields
 
+
 class ModeratePosts(ew.SimpleForm):
-    template='jinja:allura:templates/widgets/moderate_posts.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/moderate_posts.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text=None)
+
     def resources(self):
-        for r in super(ModeratePosts, self).resources(): yield r
+        for r in super(ModeratePosts, self).resources():
+            yield r
         yield ew.JSScript('''
       (function($){
           var tbl = $('form table');
@@ -86,8 +100,9 @@ class ModeratePosts(ew.SimpleForm):
           });
       }(jQuery));''')
 
+
 class PostFilter(ff.ForgeForm):
-    defaults=dict(
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text=None,
         method='GET')
@@ -96,22 +111,24 @@ class PostFilter(ff.ForgeForm):
             name='page',
             validator=fev.Int()),
         ew.FieldSet(label='Post Filter', fields=[
-                ew.SingleSelectField(
+            ew.SingleSelectField(
                     name='status',
                     label='Show posts with status',
                     options=[
                         ew.Option(py_value='-', label='Any'),
                         ew.Option(py_value='spam', label='Spam'),
-                        ew.Option(py_value='pending', label='Pending moderation'),
+                        ew.Option(py_value='pending',
+                                  label='Pending moderation'),
                         ew.Option(py_value='ok', label='Ok')],
                     if_missing='pending'),
-                ew.IntField(name='flag',
-                            label='Show posts with at least "n" flags',
-                            css_class='text',
-                            if_missing=0),
-                ew.SubmitButton(label='Filter Posts')
-                ])
-        ]
+            ew.IntField(name='flag',
+                        label='Show posts with at least "n" flags',
+                        css_class='text',
+                        if_missing=0),
+            ew.SubmitButton(label='Filter Posts')
+        ])
+    ]
+
 
 class TagPost(ff.ForgeForm):
 
@@ -123,15 +140,17 @@ class TagPost(ff.ForgeForm):
         result['buttons'] = [submit_button]
         return result
 
-    fields=[ffw.LabelEdit(label='Labels',name='labels', className='title')]
+    fields = [ffw.LabelEdit(label='Labels', name='labels', className='title')]
 
     def resources(self):
-        for r in ffw.LabelEdit(name='labels').resources(): yield r
+        for r in ffw.LabelEdit(name='labels').resources():
+            yield r
+
 
 class EditPost(ff.ForgeForm):
-    template='jinja:allura:templates/widgets/edit_post.html'
-    antispam=True
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/edit_post.html'
+    antispam = True
+    defaults = dict(
         ff.ForgeForm.defaults,
         show_subject=False,
         value=None,
@@ -142,12 +161,13 @@ class EditPost(ff.ForgeForm):
         fields = ew_core.NameList()
         fields.append(ffw.MarkdownEdit(
             name='text',
-            attrs={'style':'height:7em; width:97%'}))
+            attrs={'style': 'height:7em; width:97%'}))
         fields.append(ew.HiddenField(name='forum', if_missing=None))
         if ew_core.widget_context.widget:
             # we are being displayed
             if ew_core.widget_context.render_context.get('show_subject', self.show_subject):
-                fields.append(ew.TextField(name='subject',attrs=dict(style="width:97%")))
+                fields.append(
+                    ew.TextField(name='subject', attrs=dict(style="width:97%")))
         else:
             # We are being validated
             validator = fev.UnicodeString(not_empty=True, if_missing='')
@@ -156,8 +176,10 @@ class EditPost(ff.ForgeForm):
         return fields
 
     def resources(self):
-        for r in ew.TextField(name='subject').resources(): yield r
-        for r in ffw.MarkdownEdit(name='text').resources(): yield r
+        for r in ew.TextField(name='subject').resources():
+            yield r
+        for r in ffw.MarkdownEdit(name='text').resources():
+            yield r
         yield ew.JSScript('''$(document).ready(function () {
             $("a.attachment_form_add_button").click(function(evt){
                 $(this).hide();
@@ -171,48 +193,57 @@ class EditPost(ff.ForgeForm):
             });
          });''')
 
+
 class NewTopicPost(EditPost):
-    template='jinja:allura:templates/widgets/new_topic_post.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/new_topic_post.html'
+    defaults = dict(
         EditPost.defaults,
-        show_subject = True,
+        show_subject=True,
         forums=None)
 
+
 class _ThreadsTable(ew.TableField):
-    template='jinja:allura:templates/widgets/threads_table.html'
+    template = 'jinja:allura:templates/widgets/threads_table.html'
+
     class fields(ew_core.NameList):
-        _id=ew.HiddenField(validator=V.Ming(M.Thread))
-        subscription=ew.Checkbox(suppress_label=True)
-        subject=ffw.DisplayOnlyField(label='Topic')
-        url=ffw.DisplayOnlyField()
-        num_replies=ffw.DisplayOnlyField(label='Posts')
-        num_views=ffw.DisplayOnlyField(label='Views')
-        last_post=ffw.DisplayOnlyField(label='Last Post')
+        _id = ew.HiddenField(validator=V.Ming(M.Thread))
+        subscription = ew.Checkbox(suppress_label=True)
+        subject = ffw.DisplayOnlyField(label='Topic')
+        url = ffw.DisplayOnlyField()
+        num_replies = ffw.DisplayOnlyField(label='Posts')
+        num_views = ffw.DisplayOnlyField(label='Views')
+        last_post = ffw.DisplayOnlyField(label='Last Post')
+
 
 class SubscriptionForm(ew.SimpleForm):
-    template='jinja:allura:templates/widgets/subscription_form.html'
-    value=None
-    threads=None
-    show_subject=False
-    allow_create_thread=False
-    limit=None
-    page=0
-    count=0
-    submit_text='Update Subscriptions'
-    params=['value', 'threads', 'limit', 'page', 'count',
-            'show_subject', 'allow_create_thread']
+    template = 'jinja:allura:templates/widgets/subscription_form.html'
+    value = None
+    threads = None
+    show_subject = False
+    allow_create_thread = False
+    limit = None
+    page = 0
+    count = 0
+    submit_text = 'Update Subscriptions'
+    params = ['value', 'threads', 'limit', 'page', 'count',
+              'show_subject', 'allow_create_thread']
+
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
-        threads=_ThreadsTable()
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
+        threads = _ThreadsTable()
+
     def resources(self):
-        for r in super(SubscriptionForm, self).resources(): yield r
+        for r in super(SubscriptionForm, self).resources():
+            yield r
         yield ew.JSScript('''
         $(window).load(function () {
             $('tbody').children(':even').addClass('even');
         });''')
 
 # Widgets
+
+
 class HierWidget(ew_core.Widget):
     widgets = {}
 
@@ -228,37 +259,41 @@ class HierWidget(ew_core.Widget):
             for r in w.resources():
                 yield r
 
+
 class Attachment(ew_core.Widget):
-    template='jinja:allura:templates/widgets/attachment.html'
-    params=['value', 'post']
-    value=None
-    post=None
+    template = 'jinja:allura:templates/widgets/attachment.html'
+    params = ['value', 'post']
+    value = None
+    post = None
+
 
 class DiscussionHeader(HierWidget):
-    template='jinja:allura:templates/widgets/discussion_header.html'
-    params=['value']
-    value=None
-    widgets=dict(
+    template = 'jinja:allura:templates/widgets/discussion_header.html'
+    params = ['value']
+    value = None
+    widgets = dict(
         edit_post=EditPost(submit_text='New Thread'))
 
+
 class ThreadHeader(HierWidget):
-    template='jinja:allura:templates/widgets/thread_header.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/thread_header.html'
+    defaults = dict(
         HierWidget.defaults,
         value=None,
         page=None,
         limit=None,
         count=None,
         show_moderate=False)
-    widgets=dict(
+    widgets = dict(
         page_list=ffw.PageList(),
         page_size=ffw.PageSize(),
         moderate_thread=ModerateThread(),
         tag_post=TagPost())
 
+
 class Post(HierWidget):
-    template='jinja:allura:templates/widgets/post_widget.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/post_widget.html'
+    defaults = dict(
         HierWidget.defaults,
         value=None,
         indent=0,
@@ -266,13 +301,15 @@ class Post(HierWidget):
         limit=25,
         show_subject=False,
         suppress_promote=False)
-    widgets=dict(
+    widgets = dict(
         moderate_post=ModeratePost(),
         edit_post=EditPost(submit_text='Post'),
         attach_post=AttachPost(submit_text='Attach'),
         attachment=Attachment())
+
     def resources(self):
-        for r in super(Post, self).resources(): yield r
+        for r in super(Post, self).resources():
+            yield r
         for w in self.widgets.itervalues():
             for r in w.resources():
                 yield r
@@ -362,9 +399,10 @@ class Post(HierWidget):
         }());
         ''')
 
+
 class PostThread(ew_core.Widget):
-    template='jinja:allura:templates/widgets/post_thread.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/post_thread.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         value=None,
         indent=0,
@@ -375,10 +413,11 @@ class PostThread(ew_core.Widget):
         parent=None,
         children=None)
 
+
 class Thread(HierWidget):
-    template='jinja:allura:templates/widgets/thread_widget.html'
-    name='thread'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/thread_widget.html'
+    name = 'thread'
+    defaults = dict(
         HierWidget.defaults,
         value=None,
         page=None,
@@ -386,14 +425,16 @@ class Thread(HierWidget):
         count=None,
         show_subject=False,
         new_post_text='+ New Comment')
-    widgets=dict(
+    widgets = dict(
         page_list=ffw.PageList(),
         thread_header=ThreadHeader(),
         post_thread=PostThread(),
         post=Post(),
         edit_post=EditPost(submit_text='Submit'))
+
     def resources(self):
-        for r in super(Thread, self).resources(): yield r
+        for r in super(Thread, self).resources():
+            yield r
         for w in self.widgets.itervalues():
             for r in w.resources():
                 yield r
@@ -441,18 +482,20 @@ class Thread(HierWidget):
         });
         ''')
 
+
 class Discussion(HierWidget):
-    template='jinja:allura:templates/widgets/discussion.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/discussion.html'
+    defaults = dict(
         HierWidget.defaults,
         value=None,
         threads=None,
         show_subject=False,
         allow_create_thread=False)
-    widgets=dict(
+    widgets = dict(
         discussion_header=DiscussionHeader(),
         edit_post=EditPost(submit_text='New Topic'),
         subscription_form=SubscriptionForm())
 
     def resources(self):
-        for r in super(Discussion, self).resources(): yield r
+        for r in super(Discussion, self).resources():
+            yield r


[30/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/reclone_repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/reclone_repo.py b/Allura/allura/command/reclone_repo.py
index 5ab8e3f..7cc06a5 100644
--- a/Allura/allura/command/reclone_repo.py
+++ b/Allura/allura/command/reclone_repo.py
@@ -26,8 +26,8 @@ from . import base
 
 
 class RecloneRepoCommand(base.Command):
-    min_args=3
-    max_args=None
+    min_args = 3
+    max_args = None
     usage = '<ini file> [-n nbhd] <project_shortname> <mount_point>'
     summary = 'Reinitialize a repo from the original clone source'
     parser = base.Command.standard_parser(verbose=True)
@@ -55,11 +55,13 @@ class RecloneRepoCommand(base.Command):
         c.user = M.User.query.get(username='sfrobot')
         nbhd = M.Neighborhood.query.get(url_prefix='/%s/' % self.options.nbhd)
         assert nbhd, 'Neighborhood with prefix %s not found' % self.options.nbhd
-        c.project = M.Project.query.get(shortname=self.args[1], neighborhood_id=nbhd._id)
-        assert c.project, 'Project with shortname %s not found in neighborhood %s' % (self.args[1], nbhd.name)
+        c.project = M.Project.query.get(
+            shortname=self.args[1], neighborhood_id=nbhd._id)
+        assert c.project, 'Project with shortname %s not found in neighborhood %s' % (
+            self.args[1], nbhd.name)
         c.app = c.project.app_instance(self.args[2])
-        assert c.app, 'Mount point %s not found on project %s' % (self.args[2], c.project.shortname)
-
+        assert c.app, 'Mount point %s not found on project %s' % (
+            self.args[2], c.project.shortname)
 
     def _clone_repo(self):
         '''Initiate the repo clone.'''

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/script.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/script.py b/Allura/allura/command/script.py
index 8c124b5..1f2c93a 100644
--- a/Allura/allura/command/script.py
+++ b/Allura/allura/command/script.py
@@ -29,9 +29,10 @@ from allura.lib import helpers as h
 from allura.lib import utils
 from . import base
 
+
 class ScriptCommand(base.Command):
-    min_args=2
-    max_args=None
+    min_args = 2
+    max_args = None
     usage = '<ini file> <script> ...'
     summary = 'Run a script as if it were being run at the paster shell prompt'
     parser = base.Command.standard_parser(verbose=True)
@@ -50,7 +51,7 @@ class ScriptCommand(base.Command):
                 warnings.simplefilter("ignore", category=exc.SAWarning)
             self.basic_setup()
             request = webob.Request.blank('--script--', environ={
-                    'paste.registry':self.registry})
+                'paste.registry': self.registry})
             self.registry.register(pylons.request, request)
             if self.options.pdb:
                 base.log.info('Installing exception hook')
@@ -59,13 +60,15 @@ class ScriptCommand(base.Command):
                 ns = dict(__name__='__main__')
                 sys.argv = self.args[1:]
                 if self.options.profile:
-                    cProfile.run(fp, '%s.profile' % os.path.basename(self.args[1]))
+                    cProfile.run(fp, '%s.profile' %
+                                 os.path.basename(self.args[1]))
                 else:
                     exec fp in ns
 
+
 class SetToolAccessCommand(base.Command):
-    min_args=3
-    max_args=None
+    min_args = 3
+    max_args = None
     usage = '<ini file> <project_shortname> <neighborhood_name> <access_level>...'
     summary = ('Set the tool statuses that are permitted to be installed on a'
                ' given project')
@@ -77,7 +80,7 @@ class SetToolAccessCommand(base.Command):
         extra_status = []
         for s in self.args[3:]:
             s = s.lower()
-            if s=='production':
+            if s == 'production':
                 print ('All projects always have access to prodcution tools,'
                        ' so removing from list.')
                 continue

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/set_neighborhood_features.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/set_neighborhood_features.py b/Allura/allura/command/set_neighborhood_features.py
index 53469c0..c4de43c 100644
--- a/Allura/allura/command/set_neighborhood_features.py
+++ b/Allura/allura/command/set_neighborhood_features.py
@@ -25,7 +25,10 @@ from allura.lib import plugin, exceptions
 from ming.orm import session
 
 # Example usage:
-# paster set-neighborhood-features development.ini 4f50c898610b270c92000286 max_projects 50
+# paster set-neighborhood-features development.ini
+# 4f50c898610b270c92000286 max_projects 50
+
+
 class SetNeighborhoodFeaturesCommand(base.Command):
     min_args = 4
     max_args = 4
@@ -50,40 +53,40 @@ class SetNeighborhoodFeaturesCommand(base.Command):
         except ValueError:
             n_value = self.args[3]
         if n_feature not in ["max_projects", "css", "google_analytics", "private_projects"]:
-            raise exceptions.NoSuchNBFeatureError("%s is not a valid " \
-                "neighborhood feature. The valid features are \"max_projects\", " \
-                "\"css\", \"google_analytics\" and \"private_projects\"" % n_feature)
+            raise exceptions.NoSuchNBFeatureError("%s is not a valid "
+                                                  "neighborhood feature. The valid features are \"max_projects\", "
+                                                  "\"css\", \"google_analytics\" and \"private_projects\"" % n_feature)
 
         n = M.Neighborhood.query.get(name=n_id)
         if not n:
             n = M.Neighborhood.query.get(_id=ObjectId(n_id))
 
         if not n:
-            raise exceptions.NoSuchNeighborhoodError("The neighborhood %s " \
-                "could not be found in the database" % n_id)
+            raise exceptions.NoSuchNeighborhoodError("The neighborhood %s "
+                                                     "could not be found in the database" % n_id)
         else:
             if n_feature == "max_projects":
                 if isinstance(n_value, int) or n_value is None:
                     n.features['max_projects'] = n_value
                 else:
-                    raise exceptions.InvalidNBFeatureValueError("max_projects must be " \
-                        "an int or None.")
+                    raise exceptions.InvalidNBFeatureValueError("max_projects must be "
+                                                                "an int or None.")
             elif n_feature == "css":
                 if n_value in ['none', 'custom', 'picker']:
                     n.features['css'] = n_value
                 else:
-                    raise exceptions.InvalidNBFeatureValueError("css must be " \
-                        "'none', 'custom', or 'picker'")
+                    raise exceptions.InvalidNBFeatureValueError("css must be "
+                                                                "'none', 'custom', or 'picker'")
             elif n_feature == "google_analytics":
                 if isinstance(n_value, bool):
                     n.features['google_analytics'] = n_value
                 else:
-                    raise exceptions.InvalidNBFeatureValueError("google_analytics must be " \
-                        "a boolean")
+                    raise exceptions.InvalidNBFeatureValueError("google_analytics must be "
+                                                                "a boolean")
             else:
                 if isinstance(n_value, bool):
                     n.features['private_projects'] = n_value
                 else:
-                    raise exceptions.InvalidNBFeatureValueError("private_projects must be " \
-                        "a boolean")
+                    raise exceptions.InvalidNBFeatureValueError("private_projects must be "
+                                                                "a boolean")
             session(M.Neighborhood).flush()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/show_models.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/show_models.py b/Allura/allura/command/show_models.py
index f4d2784..76d95ac 100644
--- a/Allura/allura/command/show_models.py
+++ b/Allura/allura/command/show_models.py
@@ -35,8 +35,8 @@ from . import base
 
 
 class ShowModelsCommand(base.Command):
-    min_args=1
-    max_args=1
+    min_args = 1
+    max_args = 1
     usage = '<ini file>'
     summary = 'Show the inheritance graph of all Ming models'
     parser = base.Command.standard_parser(verbose=True)
@@ -50,8 +50,8 @@ class ShowModelsCommand(base.Command):
 
 
 class ReindexCommand(base.Command):
-    min_args=1
-    max_args=1
+    min_args = 1
+    max_args = 1
     usage = '<ini file>'
     summary = 'Reindex and re-shortlink all artifacts'
     parser = base.Command.standard_parser(verbose=True)
@@ -60,13 +60,15 @@ class ReindexCommand(base.Command):
     parser.add_option('--project-regex', dest='project_regex', default='',
                       help='Restrict reindex to projects for which the shortname matches '
                       'the provided regex.')
-    parser.add_option('-n', '--neighborhood', dest='neighborhood', default=None,
-                      help='neighborhood to reindex (e.g. p)')
+    parser.add_option(
+        '-n', '--neighborhood', dest='neighborhood', default=None,
+        help='neighborhood to reindex (e.g. p)')
 
     parser.add_option('--solr', action='store_true', dest='solr',
                       help='Solr needs artifact references to already exist.')
-    parser.add_option('--skip-solr-delete', action='store_true', dest='skip_solr_delete',
-                      help='Skip clearing solr index.')
+    parser.add_option(
+        '--skip-solr-delete', action='store_true', dest='skip_solr_delete',
+        help='Skip clearing solr index.')
     parser.add_option('--refs', action='store_true', dest='refs',
                       help='Update artifact references and shortlinks')
     parser.add_option('--tasks', action='store_true', dest='tasks',
@@ -75,10 +77,11 @@ class ReindexCommand(base.Command):
                            'which are needed for some markdown macros to run properly')
     parser.add_option('--solr-hosts', dest='solr_hosts',
                       help='Override the solr host(s) to post to.  Comma-separated list of solr server URLs')
-    parser.add_option('--max-chunk', dest='max_chunk', type=int, default=100*1000,
-                      help='Max number of artifacts to index in one Solr update command')
+    parser.add_option(
+        '--max-chunk', dest='max_chunk', type=int, default=100 * 1000,
+        help='Max number of artifacts to index in one Solr update command')
     parser.add_option('--ming-config', dest='ming_config', help='Path (absolute, or relative to '
-                        'Allura root) to .ini file defining ming configuration.')
+                      'Allura root) to .ini file defining ming configuration.')
 
     def command(self):
         from allura import model as M
@@ -107,9 +110,10 @@ class ReindexCommand(base.Command):
                 if self.options.solr and not self.options.skip_solr_delete:
                     g.solr.delete(q='project_id_s:%s' % p._id)
                 if self.options.refs:
-                    M.ArtifactReference.query.remove({'artifact_reference.project_id':p._id})
-                    M.Shortlink.query.remove({'project_id':p._id})
-                app_config_ids = [ ac._id for ac in p.app_configs ]
+                    M.ArtifactReference.query.remove(
+                        {'artifact_reference.project_id': p._id})
+                    M.Shortlink.query.remove({'project_id': p._id})
+                app_config_ids = [ac._id for ac in p.app_configs]
                 # Traverse the inheritance graph, finding all artifacts that
                 # belong to this project
                 for _, a_cls in dfs(M.Artifact, graph):
@@ -124,7 +128,8 @@ class ReindexCommand(base.Command):
                                 M.ArtifactReference.from_artifact(a)
                                 M.Shortlink.from_artifact(a)
                             except:
-                                base.log.exception('Making ArtifactReference/Shortlink from %s', a)
+                                base.log.exception(
+                                    'Making ArtifactReference/Shortlink from %s', a)
                                 continue
                         ref_ids.append(a.index_id())
                     M.main_orm_session.flush()
@@ -132,7 +137,8 @@ class ReindexCommand(base.Command):
                     try:
                         self._chunked_add_artifacts(ref_ids)
                     except CompoundError, err:
-                        base.log.exception('Error indexing artifacts:\n%r', err)
+                        base.log.exception(
+                            'Error indexing artifacts:\n%r', err)
                         base.log.error('%s', err.format_error())
                     M.main_orm_session.flush()
                     M.main_orm_session.clear()
@@ -141,7 +147,7 @@ class ReindexCommand(base.Command):
     @property
     def add_artifact_kwargs(self):
         if self.options.solr_hosts:
-           return {'solr_hosts': self.options.solr_hosts.split(',')}
+            return {'solr_hosts': self.options.solr_hosts.split(',')}
         return {}
 
     def _chunked_add_artifacts(self, ref_ids):
@@ -169,7 +175,8 @@ class ReindexCommand(base.Command):
                                    update_refs=self.options.refs,
                                    **self.add_artifact_kwargs)
         except InvalidDocument as e:
-            # there are many types of InvalidDocument, only recurse if its expected to help
+            # there are many types of InvalidDocument, only recurse if its
+            # expected to help
             if str(e).startswith('BSON document too large'):
                 self._post_add_artifacts(chunk[:len(chunk) // 2])
                 self._post_add_artifacts(chunk[len(chunk) // 2:])
@@ -193,8 +200,8 @@ class ReindexCommand(base.Command):
 
 
 class EnsureIndexCommand(base.Command):
-    min_args=1
-    max_args=1
+    min_args = 1
+    max_args = 1
     usage = '[<ini file>]'
     summary = 'Run ensure_index on all mongo objects'
     parser = base.Command.standard_parser(verbose=True)
@@ -202,12 +209,13 @@ class EnsureIndexCommand(base.Command):
     def command(self):
         from allura import model as M
         main_session_classes = [M.main_orm_session, M.repository_orm_session,
-                M.task_orm_session]
+                                M.task_orm_session]
         if asbool(self.config.get('activitystream.recording.enabled', False)):
             from activitystream.storage.mingstorage import activity_orm_session
             main_session_classes.append(activity_orm_session)
         self.basic_setup()
-        main_indexes = defaultdict(lambda: defaultdict(list))  # by db, then collection name
+        # by db, then collection name
+        main_indexes = defaultdict(lambda: defaultdict(list))
         project_indexes = defaultdict(list)  # by collection name
         base.log.info('Collecting indexes...')
         for m in Mapper.all_mappers():
@@ -237,7 +245,8 @@ class EnsureIndexCommand(base.Command):
 
     def _update_indexes(self, collection, indexes):
         uindexes = dict(
-            (tuple(i.index_spec), i)  # convert list to tuple so it's hashable for 'set'
+            # convert list to tuple so it's hashable for 'set'
+            (tuple(i.index_spec), i)
             for i in indexes
             if i.unique)
         indexes = dict(
@@ -284,30 +293,36 @@ class EnsureIndexCommand(base.Command):
         # Drop obsolete indexes
         for iname, keys in prev_indexes.iteritems():
             if keys not in indexes:
-                base.log.info('...... drop index %s:%s', collection.name, iname)
+                base.log.info('...... drop index %s:%s',
+                              collection.name, iname)
                 collection.drop_index(iname)
         for iname, keys in prev_uindexes.iteritems():
             if keys not in uindexes:
-                base.log.info('...... drop index %s:%s', collection.name, iname)
+                base.log.info('...... drop index %s:%s',
+                              collection.name, iname)
                 collection.drop_index(iname)
 
     def _recreate_index(self, collection, iname, keys, **creation_options):
         '''Recreate an index with new creation options, using a temporary index
         so that at no time is an index missing from the specified keys'''
         superset_keys = keys + [('temporary_extra_field_for_indexing', 1)]
-        base.log.info('...... ensure index %s:%s', collection.name, superset_keys)
+        base.log.info('...... ensure index %s:%s',
+                      collection.name, superset_keys)
         superset_index = collection.ensure_index(superset_keys)
         base.log.info('...... drop index %s:%s', collection.name, iname)
         collection.drop_index(iname)
-        base.log.info('...... ensure index %s:%s %s', collection.name, keys, creation_options)
+        base.log.info('...... ensure index %s:%s %s',
+                      collection.name, keys, creation_options)
         collection.ensure_index(keys, **creation_options)
-        base.log.info('...... drop index %s:%s', collection.name, superset_index)
+        base.log.info('...... drop index %s:%s',
+                      collection.name, superset_index)
         collection.drop_index(superset_index)
 
     def _remove_dupes(self, collection, spec):
         iname = collection.create_index(spec)
-        fields = [ f[0] for f in spec ]
+        fields = [f[0] for f in spec]
         q = collection.find({}, fields=fields).sort(spec)
+
         def keyfunc(doc):
             return tuple(doc.get(f, None) for f in fields)
         dupes = []
@@ -315,47 +330,53 @@ class EnsureIndexCommand(base.Command):
             docs = list(doc_iter)
             if len(docs) > 1:
                 base.log.info('Found dupes with %s', key)
-                dupes += [ doc['_id'] for doc in docs[1:] ]
+                dupes += [doc['_id'] for doc in docs[1:]]
         collection.drop_index(iname)
-        collection.remove(dict(_id={'$in':dupes}))
+        collection.remove(dict(_id={'$in': dupes}))
+
 
 def build_model_inheritance_graph():
     graph = dict((m.mapped_class, ([], [])) for m in Mapper.all_mappers())
-    for cls, (parents, children)  in graph.iteritems():
+    for cls, (parents, children) in graph.iteritems():
         for b in cls.__bases__:
-            if b not in graph: continue
+            if b not in graph:
+                continue
             parents.append(b)
             graph[b][1].append(cls)
     return graph
 
+
 def dump_cls(depth, cls):
-    indent = ' '*4*depth
+    indent = ' ' * 4 * depth
     yield indent + '%s.%s' % (cls.__module__, cls.__name__)
     m = mapper(cls)
     for p in m.properties:
-        s = indent*2 + ' - ' + str(p)
+        s = indent * 2 + ' - ' + str(p)
         if hasattr(p, 'field_type'):
             s += ' (%s)' % p.field_type
         yield s
 
+
 def dfs(root, graph, depth=0):
     yield depth, root
     for c in graph[root][1]:
-        for r in dfs(c, graph, depth+1):
+        for r in dfs(c, graph, depth + 1):
             yield r
 
 
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
+def pm(etype, value, tb):  # pragma no cover
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/smtp_server.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/smtp_server.py b/Allura/allura/command/smtp_server.py
index ca26cb4..b113072 100644
--- a/Allura/allura/command/smtp_server.py
+++ b/Allura/allura/command/smtp_server.py
@@ -27,9 +27,10 @@ from allura.command import base
 
 from paste.deploy.converters import asint
 
+
 class SMTPServerCommand(base.Command):
-    min_args=1
-    max_args=1
+    min_args = 1
+    max_args = 1
     usage = '<ini file>'
     summary = 'Handle incoming emails, routing them to RabbitMQ'
     parser = command.Command.standard_parser(verbose=True)
@@ -45,6 +46,7 @@ class SMTPServerCommand(base.Command):
                    None)
         asyncore.loop()
 
+
 class MailServer(smtpd.SMTPServer):
 
     def process_message(self, peer, mailfrom, rcpttos, data):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/taskd.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/taskd.py b/Allura/allura/command/taskd.py
index 94c6221..f05a4b3 100644
--- a/Allura/allura/command/taskd.py
+++ b/Allura/allura/command/taskd.py
@@ -40,6 +40,7 @@ status_log = logging.getLogger('taskdstatus')
 
 log = logging.getLogger(__name__)
 
+
 @contextmanager
 def proctitle(title):
     """Temporarily change the process title, then restore it."""
@@ -77,23 +78,29 @@ class TaskdCommand(base.Command):
         self.worker()
 
     def graceful_restart(self, signum, frame):
-        base.log.info('taskd pid %s recieved signal %s preparing to do a graceful restart' % (os.getpid(), signum))
+        base.log.info(
+            'taskd pid %s recieved signal %s preparing to do a graceful restart' %
+            (os.getpid(), signum))
         self.keep_running = False
         self.restart_when_done = True
 
     def graceful_stop(self, signum, frame):
-        base.log.info('taskd pid %s recieved signal %s preparing to do a graceful stop' % (os.getpid(), signum))
+        base.log.info(
+            'taskd pid %s recieved signal %s preparing to do a graceful stop' %
+            (os.getpid(), signum))
         self.keep_running = False
 
     def log_current_task(self, signum, frame):
-        entry = 'taskd pid %s is currently handling task %s' % (os.getpid(), getattr(self, 'task', None))
+        entry = 'taskd pid %s is currently handling task %s' % (
+            os.getpid(), getattr(self, 'task', None))
         status_log.info(entry)
         base.log.info(entry)
 
     def worker(self):
         from allura import model as M
         name = '%s pid %s' % (os.uname()[1], os.getpid())
-        wsgi_app = loadapp('config:%s#task' % self.args[0],relative_to=os.getcwd())
+        wsgi_app = loadapp('config:%s#task' %
+                           self.args[0], relative_to=os.getcwd())
         poll_interval = asint(pylons.config.get('monq.poll_interval', 10))
         only = self.options.only
         if only:
@@ -101,8 +108,9 @@ class TaskdCommand(base.Command):
 
         def start_response(status, headers, exc_info=None):
             if status != '200 OK':
-                log.warn('Unexpected http response from taskd request: %s.  Headers: %s',
-                             status, headers)
+                log.warn(
+                    'Unexpected http response from taskd request: %s.  Headers: %s',
+                    status, headers)
 
         def waitfunc_amqp():
             try:
@@ -132,23 +140,26 @@ class TaskdCommand(base.Command):
             try:
                 while self.keep_running:
                     self.task = M.MonQTask.get(
-                            process=name,
-                            waitfunc=waitfunc,
-                            only=only)
+                        process=name,
+                        waitfunc=waitfunc,
+                        only=only)
                     if self.task:
                         with(proctitle("taskd:{0}:{1}".format(
                                 self.task.task_name, self.task._id))):
                             # Build the (fake) request
-                            request_path = '/--%s--/%s/' % (self.task.task_name, self.task._id)
+                            request_path = '/--%s--/%s/' % (self.task.task_name,
+                                                            self.task._id)
                             r = Request.blank(request_path,
-                                              base_url=tg.config['base_url'].rstrip('/') + request_path,
+                                              base_url=tg.config['base_url'].rstrip(
+                                                  '/') + request_path,
                                               environ={'task': self.task,
-                                               })
+                                                       })
                             list(wsgi_app(r.environ, start_response))
                             self.task = None
             except Exception as e:
                 if self.keep_running:
-                    base.log.exception('taskd error %s; pausing for 10s before taking more tasks' % e)
+                    base.log.exception(
+                        'taskd error %s; pausing for 10s before taking more tasks' % e)
                     time.sleep(10)
                 else:
                     base.log.exception('taskd error %s' % e)
@@ -211,7 +222,8 @@ class TaskCommand(base.Command):
     def _timeout(self):
         '''Reset tasks that have been busy too long to 'ready' state'''
         from allura import model as M
-        base.log.info('Reset tasks stuck for %ss or more', self.options.timeout)
+        base.log.info('Reset tasks stuck for %ss or more',
+                      self.options.timeout)
         cutoff = datetime.utcnow() - timedelta(seconds=self.options.timeout)
         M.MonQTask.timeout_tasks(cutoff)
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/taskd_cleanup.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/taskd_cleanup.py b/Allura/allura/command/taskd_cleanup.py
index 3652758..517917f 100644
--- a/Allura/allura/command/taskd_cleanup.py
+++ b/Allura/allura/command/taskd_cleanup.py
@@ -25,16 +25,17 @@ from ming.orm.ormsession import ThreadLocalORMSession
 from allura import model as M
 import base
 
+
 class TaskdCleanupCommand(base.Command):
     summary = 'Tasks cleanup command'
     parser = base.Command.standard_parser(verbose=True)
     parser.add_option('-k', '--kill-stuck-taskd',
-            dest='kill', action='store_true',
-            help='automatically kill stuck taskd processes.  Be careful with this, a taskd process '
-                 'may just be very busy on certain operations and not able to respond to our status request')
+                      dest='kill', action='store_true',
+                      help='automatically kill stuck taskd processes.  Be careful with this, a taskd process '
+                      'may just be very busy on certain operations and not able to respond to our status request')
     parser.add_option('-n', '--num-retry-status-check',
-            dest='num_retry', type='int', default=5,
-            help='number of retries to read taskd status log after sending USR1 signal (5 by default)')
+                      dest='num_retry', type='int', default=5,
+                      help='number of retries to read taskd status log after sending USR1 signal (5 by default)')
     usage = '<ini file> [-k] <taskd status log file>'
     min_args = 2
     max_args = 2
@@ -48,12 +49,14 @@ class TaskdCleanupCommand(base.Command):
         self.suspicious_tasks = []
 
         taskd_pids = self._taskd_pids()
-        base.log.info('Taskd processes on %s: %s' % (self.hostname, taskd_pids))
+        base.log.info('Taskd processes on %s: %s' %
+                      (self.hostname, taskd_pids))
 
         # find stuck taskd processes
         base.log.info('Seeking for stuck taskd processes')
         for pid in taskd_pids:
-            base.log.info('...sending USR1 to %s and watching status log' % (pid))
+            base.log.info('...sending USR1 to %s and watching status log' %
+                          (pid))
             status = self._check_taskd_status(int(pid))
             if status != 'OK':
                 base.log.info('...taskd pid %s has stuck' % pid)
@@ -68,21 +71,25 @@ class TaskdCleanupCommand(base.Command):
         base.log.info('Seeking for forsaken busy tasks')
         tasks = [t for t in self._busy_tasks()
                  if t not in self.error_tasks]  # skip seen tasks
-        base.log.info('Found %s busy tasks on %s' % (len(tasks), self.hostname))
+        base.log.info('Found %s busy tasks on %s' %
+                      (len(tasks), self.hostname))
         for task in tasks:
             base.log.info('Verifying task %s' % task)
             pid = task.process.split()[-1]
             if pid not in taskd_pids:
                 # 'forsaken' task
                 base.log.info('Task is forsaken '
-                    '(can\'t find taskd with given pid). '
-                    'Setting state to \'error\'')
+                              '(can\'t find taskd with given pid). '
+                              'Setting state to \'error\'')
                 task.state = 'error'
                 task.result = 'Can\'t find taskd with given pid'
                 self.error_tasks.append(task)
             else:
-                # check if taskd with given pid really processing this task now:
-                base.log.info('Checking that taskd pid %s is really processing task %s' % (pid, task._id))
+                # check if taskd with given pid really processing this task
+                # now:
+                base.log.info(
+                    'Checking that taskd pid %s is really processing task %s' %
+                    (pid, task._id))
                 status = self._check_task(pid, task)
                 if status != 'OK':
                     # maybe task moved quickly and now is complete
@@ -106,7 +113,8 @@ class TaskdCleanupCommand(base.Command):
             if self.options.kill:
                 base.log.info('...stuck taskd processes were killed')
             else:
-                base.log.info('...to kill these processes run command with -k flag if you are sure they are really stuck')
+                base.log.info(
+                    '...to kill these processes run command with -k flag if you are sure they are really stuck')
         if self.error_tasks:
             base.log.info('Tasks marked as \'error\': %s' % self.error_tasks)
 
@@ -122,8 +130,8 @@ class TaskdCleanupCommand(base.Command):
     def _taskd_pids(self):
         # space after "taskd" to ensure no match on taskd_cleanup (ourself)
         p = subprocess.Popen(['pgrep', '-f', '/paster taskd '],
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
         stdout, stderr = p.communicate()
         tasks = []
         if p.returncode == 0:
@@ -134,11 +142,12 @@ class TaskdCleanupCommand(base.Command):
         if not retry:
             os.kill(int(pid), signal.SIGUSR1)
         p = subprocess.Popen(['tail', '-n1', self.taskd_status_log],
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
         stdout, stderr = p.communicate()
         if p.returncode != 0:
-            base.log.error('Can\'t read taskd status log %s' % self.taskd_status_log)
+            base.log.error('Can\'t read taskd status log %s' %
+                           self.taskd_status_log)
             exit(1)
         return stdout
 
@@ -156,7 +165,8 @@ class TaskdCleanupCommand(base.Command):
         for i in range(self.options.num_retry):
             retry = False if i == 0 else True
             status = self._taskd_status(taskd_pid, retry)
-            line = 'taskd pid %s is currently handling task %s' % (taskd_pid, task)
+            line = 'taskd pid %s is currently handling task %s' % (
+                taskd_pid, task)
             if line in status:
                 return 'OK'
             base.log.info('retrying after one second')
@@ -168,7 +178,7 @@ class TaskdCleanupCommand(base.Command):
         # find all 'busy' tasks for this pid and mark them as 'error'
         tasks = list(self._busy_tasks(pid=pid))
         base.log.info('...taskd pid %s has assigned tasks: %s. '
-                'setting state to \'error\' for all of them' % (pid, tasks))
+                      'setting state to \'error\' for all of them' % (pid, tasks))
         for task in tasks:
             task.state = 'error'
             task.result = 'Taskd has stuck with this task'
@@ -178,7 +188,7 @@ class TaskdCleanupCommand(base.Command):
         complete_tasks = M.MonQTask.query.find({
             'state': 'complete',
             '_id': {'$in': [t._id for t in self.suspicious_tasks]}
-        });
+        })
         return [t._id for t in complete_tasks]
 
     def _check_suspicious_tasks(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/config/app_cfg.py
----------------------------------------------------------------------
diff --git a/Allura/allura/config/app_cfg.py b/Allura/allura/config/app_cfg.py
index 6f15406..6a60689 100644
--- a/Allura/allura/config/app_cfg.py
+++ b/Allura/allura/config/app_cfg.py
@@ -48,6 +48,7 @@ from allura.lib.package_path_loader import PackagePathLoader
 
 log = logging.getLogger(__name__)
 
+
 class ForgeConfig(AppConfig):
 
     def __init__(self, root_controller='root'):
@@ -86,7 +87,7 @@ class ForgeConfig(AppConfig):
                 bcc = FileSystemBytecodeCache()
         except:
             log.exception("Error encountered while setting up a" +
-                        " %s-backed bytecode cache for Jinja" % cache_type)
+                          " %s-backed bytecode cache for Jinja" % cache_type)
         return bcc
 
     def setup_jinja_renderer(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/config/environment.py
----------------------------------------------------------------------
diff --git a/Allura/allura/config/environment.py b/Allura/allura/config/environment.py
index 3db89d1..35676fd 100644
--- a/Allura/allura/config/environment.py
+++ b/Allura/allura/config/environment.py
@@ -27,6 +27,5 @@ from allura.config.app_cfg import base_config
 
 __all__ = ['load_environment']
 
-#Use base_config to setup the environment loader function
+# Use base_config to setup the environment loader function
 load_environment = base_config.make_load_environment()
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/config/middleware.py
----------------------------------------------------------------------
diff --git a/Allura/allura/config/middleware.py b/Allura/allura/config/middleware.py
index fc02ffb..cface6d 100644
--- a/Allura/allura/config/middleware.py
+++ b/Allura/allura/config/middleware.py
@@ -59,6 +59,7 @@ def make_app(global_conf, full_stack=True, **app_conf):
     root = app_conf.get('override_root', 'root')
     return _make_core_app(root, global_conf, full_stack, **app_conf)
 
+
 def _make_core_app(root, global_conf, full_stack=True, **app_conf):
     """
     Set allura up with the settings found in the PasteDeploy configuration
@@ -124,7 +125,7 @@ def _make_core_app(root, global_conf, full_stack=True, **app_conf):
                                " the zarkov.host setting in your ini file."
 
     app = tg.TGApp()
-    if asbool(config.get('auth.method', 'local')=='sfx'):
+    if asbool(config.get('auth.method', 'local') == 'sfx'):
         import sfx.middleware
         d = h.config_with_prefix(config, 'auth.')
         d.update(h.config_with_prefix(config, 'sfx.'))
@@ -146,7 +147,7 @@ def _make_core_app(root, global_conf, full_stack=True, **app_conf):
     if (asbool(app_conf.get('auth.method', 'local') == 'sfx')
             and config.get('override_root') != 'task'):
         app = SSLMiddleware(app, app_conf.get('no_redirect.pattern'),
-                app_conf.get('force_ssl.pattern'))
+                            app_conf.get('force_ssl.pattern'))
     # Setup resource manager, widget context SOP
     app = ew.WidgetMiddleware(
         app,
@@ -166,21 +167,25 @@ def _make_core_app(root, global_conf, full_stack=True, **app_conf):
     # "task" wsgi would get a 2nd request to /error/document if we used this middleware
     if config.get('override_root') != 'task':
         # Converts exceptions to HTTP errors, shows traceback in debug mode
-        tg.error.footer_html = '<!-- %s %s -->'  # don't use TG footer with extra CSS & images that take time to load
-        app = tg.error.ErrorHandler(app, global_conf, **config['pylons.errorware'])
+        # don't use TG footer with extra CSS & images that take time to load
+        tg.error.footer_html = '<!-- %s %s -->'
+        app = tg.error.ErrorHandler(
+            app, global_conf, **config['pylons.errorware'])
 
         # Make sure that the wsgi.scheme is set appropriately when we
         # have the funky HTTP_X_SFINC_SSL  environ var
-        if asbool(app_conf.get('auth.method', 'local')=='sfx'):
+        if asbool(app_conf.get('auth.method', 'local') == 'sfx'):
             app = set_scheme_middleware(app)
-        
+
         # Redirect some status codes to /error/document
         if asbool(config['debug']):
             app = StatusCodeRedirect(app, base_config.handle_status_codes)
         else:
-            app = StatusCodeRedirect(app, base_config.handle_status_codes + [500])
+            app = StatusCodeRedirect(
+                app, base_config.handle_status_codes + [500])
     return app
 
+
 def set_scheme_middleware(app):
     def SchemeMiddleware(environ, start_response):
         if asbool(environ.get('HTTP_X_SFINC_SSL', 'false')):
@@ -188,17 +193,21 @@ def set_scheme_middleware(app):
         return app(environ, start_response)
     return SchemeMiddleware
 
+
 def allura_globals_middleware(app):
     def AlluraGlobalsMiddleware(environ, start_response):
         import allura.lib.security
         import allura.lib.app_globals
         registry = environ['paste.registry']
-        registry.register(allura.credentials, allura.lib.security.Credentials())
+        registry.register(allura.credentials,
+                          allura.lib.security.Credentials())
         return app(environ, start_response)
     return AlluraGlobalsMiddleware
 
+
 def get_tg_vars(context):
-    import pylons, tg
+    import pylons
+    import tg
     from allura.lib import helpers as h
     from urllib import quote, quote_plus
     context.setdefault('g', pylons.app_globals)
@@ -208,8 +217,8 @@ def get_tg_vars(context):
     context.setdefault('response', pylons.response)
     context.setdefault('url', pylons.url)
     context.setdefault('tg', dict(
-            config=tg.config,
-            flash_obj=tg.flash,
-            quote=quote,
-            quote_plus=quote_plus,
-            url=tg.url))
+        config=tg.config,
+        flash_obj=tg.flash,
+        quote=quote,
+        quote_plus=quote_plus,
+        url=tg.url))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/config/resources.py
----------------------------------------------------------------------
diff --git a/Allura/allura/config/resources.py b/Allura/allura/config/resources.py
index b289416..e8717fe 100644
--- a/Allura/allura/config/resources.py
+++ b/Allura/allura/config/resources.py
@@ -24,6 +24,7 @@ from allura.lib.helpers import iter_entry_points
 
 log = logging.getLogger(__name__)
 
+
 def register_ew_resources(manager):
     manager.register_directory(
         'js', pkg_resources.resource_filename('allura', 'lib/widgets/resources/js'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/attachments.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/attachments.py b/Allura/allura/controllers/attachments.py
index 5b98881..fc0fbbc 100644
--- a/Allura/allura/controllers/attachments.py
+++ b/Allura/allura/controllers/attachments.py
@@ -24,6 +24,7 @@ from ming.utils import LazyProperty
 from allura.lib.security import require, has_access, require_access
 from .base import BaseController
 
+
 class AttachmentsController(BaseController):
     AttachmentControllerClass = None
 
@@ -35,11 +36,12 @@ class AttachmentsController(BaseController):
         if filename:
             if not args:
                 filename = request.path.rsplit('/', 1)[-1]
-            filename=unquote(filename)
+            filename = unquote(filename)
             return self.AttachmentControllerClass(filename, self.artifact), args
         else:
             raise exc.HTTPNotFound
 
+
 class AttachmentController(BaseController):
     AttachmentClass = None
     edit_perm = 'edit'
@@ -55,7 +57,8 @@ class AttachmentController(BaseController):
     def attachment(self):
         metadata = self.AttachmentClass.metadata_for(self.artifact)
         metadata['type'] = 'attachment'
-        attachment = self.AttachmentClass.query.get(filename=self.filename, **metadata)
+        attachment = self.AttachmentClass.query.get(
+            filename=self.filename, **metadata)
         if attachment is None:
             raise exc.HTTPNotFound
         return attachment
@@ -64,7 +67,8 @@ class AttachmentController(BaseController):
     def thumbnail(self):
         metadata = self.AttachmentClass.metadata_for(self.artifact)
         metadata['type'] = 'thumbnail'
-        attachment = self.AttachmentClass.query.get(filename=self.filename, **metadata)
+        attachment = self.AttachmentClass.query.get(
+            filename=self.filename, **metadata)
         if attachment is None:
             raise exc.HTTPNotFound
         return attachment
@@ -81,7 +85,7 @@ class AttachmentController(BaseController):
                 except exc.HTTPNotFound:
                     pass
             redirect(request.referer)
-        embed=False
+        embed = False
         if self.attachment.content_type and self.attachment.content_type.startswith('image/'):
             embed = True
         return self.attachment.serve(embed=embed)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/auth.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/auth.py b/Allura/allura/controllers/auth.py
index bb6e98f..587d151 100644
--- a/Allura/allura/controllers/auth.py
+++ b/Allura/allura/controllers/auth.py
@@ -15,7 +15,9 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import logging, string, os
+import logging
+import string
+import os
 from urllib import urlencode
 import datetime
 
@@ -48,7 +50,7 @@ from allura.controllers import BaseController
 
 log = logging.getLogger(__name__)
 
-OID_PROVIDERS=[
+OID_PROVIDERS = [
     ('OpenID', '${username}'),
     ('Yahoo!', 'http://yahoo.com'),
     ('Google', 'https://www.google.com/accounts/o8/id'),
@@ -60,16 +62,18 @@ OID_PROVIDERS=[
     ('Vidoop', 'http://${username}.myvidoop.com/'),
     ('Verisign', 'http://${username}.pip.verisignlabs.com/'),
     ('ClaimID', 'http://openid.claimid.com/${username}/'),
-    ('AOL', 'http://openid.aol.com/${username}/') ]
+    ('AOL', 'http://openid.aol.com/${username}/')]
+
 
 class F(object):
     login_form = LoginForm()
     recover_password_change_form = forms.PasswordChangeBase()
     forgotten_password_form = ForgottenPasswordForm()
-    subscription_form=SubscriptionForm()
+    subscription_form = SubscriptionForm()
     registration_form = forms.RegistrationForm(action='/auth/save_new')
     oauth_application_form = OAuthApplicationForm(action='register')
-    oauth_revocation_form = OAuthRevocationForm(action='/auth/preferences/revoke_oauth')
+    oauth_revocation_form = OAuthRevocationForm(
+        action='/auth/preferences/revoke_oauth')
     change_personal_data_form = forms.PersonalDataForm()
     add_socialnetwork_form = forms.AddSocialNetworkForm()
     remove_socialnetwork_form = forms.RemoveSocialNetworkForm()
@@ -84,6 +88,7 @@ class F(object):
     save_skill_form = forms.AddUserSkillForm()
     remove_skill_form = forms.RemoveSkillForm()
 
+
 class AuthController(BaseController):
 
     def __init__(self):
@@ -125,7 +130,8 @@ class AuthController(BaseController):
         else:
             oid_url = username
         return verify_oid(oid_url, failure_redirect='.',
-                          return_to='login_process_oid?%s' % urlencode(dict(return_to=return_to)),
+                          return_to='login_process_oid?%s' % urlencode(
+                              dict(return_to=return_to)),
                           title='OpenID Login',
                           prompt='Click below to continue')
 
@@ -160,11 +166,13 @@ class AuthController(BaseController):
         login_url = config.get('auth.login_url', '/auth/')
         if not hash:
             redirect(login_url)
-        user_record = M.User.query.find({'tool_data.AuthPasswordReset.hash': hash}).first()
+        user_record = M.User.query.find(
+            {'tool_data.AuthPasswordReset.hash': hash}).first()
         if not user_record:
             flash('Unable to process reset, please try again')
             redirect(login_url)
-        hash_expiry = user_record.get_tool_data('AuthPasswordReset', 'hash_expiry')
+        hash_expiry = user_record.get_tool_data(
+            'AuthPasswordReset', 'hash_expiry')
         if not hash_expiry or hash_expiry < datetime.datetime.utcnow():
             flash('Unable to process reset, please try again')
             redirect(login_url)
@@ -330,18 +338,19 @@ To reset your password on %s, please visit the following URL:
             return 'No project at %s' % repo_path
         if not rest:
             return '%s does not include a repo mount point' % repo_path
-        h.set_context(project.shortname, rest[0], neighborhood=project.neighborhood)
+        h.set_context(project.shortname,
+                      rest[0], neighborhood=project.neighborhood)
         if c.app is None or not getattr(c.app, 'repo'):
             return 'Cannot find repo at %s' % repo_path
         allura.tasks.repo_tasks.refresh.post()
         return '%r refresh queued.\n' % c.app.repo
 
-
     def _auth_repos(self, user):
         def _unix_group_name(neighborhood, shortname):
             'shameless copied from sfx_api.py'
-            path = neighborhood.url_prefix + shortname[len(neighborhood.shortname_prefix):]
-            parts = [ p for p in path.split('/') if p ]
+            path = neighborhood.url_prefix + \
+                shortname[len(neighborhood.shortname_prefix):]
+            parts = [p for p in path.split('/') if p]
             if len(parts) == 2 and parts[0] == 'p':
                 parts = parts[1:]
             return '.'.join(reversed(parts))
@@ -361,7 +370,6 @@ To reset your password on %s, please visit the following URL:
         repos.sort()
         return repos
 
-
     @expose('json:')
     def repo_permissions(self, repo_path=None, username=None, **kw):
         """Expects repo_path to be a filesystem path like
@@ -371,11 +379,12 @@ To reset your password on %s, please visit the following URL:
 
         Returns JSON describing this user's permissions on that repo.
         """
-        disallow = dict(allow_read=False, allow_write=False, allow_create=False)
+        disallow = dict(allow_read=False, allow_write=False,
+                        allow_create=False)
         # Find the user
         user = M.User.by_username(username)
         if not user:
-            response.status=404
+            response.status = 404
             return dict(disallow, error='unknown user')
         if not repo_path:
             return dict(allow_write=self._auth_repos(user))
@@ -387,7 +396,7 @@ To reset your password on %s, please visit the following URL:
             project, neighborhood = parts[0].split('.')
         else:
             project, neighborhood = parts[0], 'p'
-        parts = [ neighborhood, project ] + parts[1:]
+        parts = [neighborhood, project] + parts[1:]
         project_path = '/' + '/'.join(parts)
         project, rest = h.find_project(project_path)
         if project is None:
@@ -407,6 +416,7 @@ To reset your password on %s, please visit the following URL:
                     allow_write=has_access(c.app, 'write')(user=user),
                     allow_create=has_access(c.app, 'create')(user=user))
 
+
 class PreferencesController(BaseController):
 
     def _check_security(self):
@@ -419,9 +429,9 @@ class PreferencesController(BaseController):
         menu = provider.account_navigation()
         api_token = M.ApiToken.query.get(user_id=c.user._id)
         return dict(
-                menu=menu,
-                api_token=api_token,
-            )
+            menu=menu,
+            api_token=api_token,
+        )
 
     @h.vardec
     @expose()
@@ -436,14 +446,15 @@ class PreferencesController(BaseController):
                **kw):
         if config.get('auth.method', 'local') == 'local':
             if not preferences.get('display_name'):
-                flash("Display Name cannot be empty.",'error')
+                flash("Display Name cannot be empty.", 'error')
                 redirect('.')
             c.user.set_pref('display_name', preferences['display_name'])
             for i, (old_a, data) in enumerate(zip(c.user.email_addresses, addr or [])):
                 obj = c.user.address_object(old_a)
                 if data.get('delete') or not obj:
                     del c.user.email_addresses[i]
-                    if obj: obj.delete()
+                    if obj:
+                        obj.delete()
             c.user.set_pref('email_address', primary_addr)
             if new_addr.get('claim'):
                 if M.EmailAddress.query.get(_id=new_addr['addr'], confirmed=True):
@@ -451,14 +462,15 @@ class PreferencesController(BaseController):
                 else:
                     c.user.email_addresses.append(new_addr['addr'])
                     em = M.EmailAddress.upsert(new_addr['addr'])
-                    em.claimed_by_user_id=c.user._id
+                    em.claimed_by_user_id = c.user._id
                     em.send_verification_link()
             for i, (old_oid, data) in enumerate(zip(c.user.open_ids, oid or [])):
                 obj = c.user.openid_object(old_oid)
                 if data.get('delete') or not obj:
                     del c.user.open_ids[i]
-                    if obj: obj.delete()
-            for k,v in preferences.iteritems():
+                    if obj:
+                        obj.delete()
+            for k, v in preferences.iteritems():
                 if k == 'results_per_page':
                     v = int(v)
                 c.user.set_pref(k, v)
@@ -478,7 +490,8 @@ class PreferencesController(BaseController):
     @require_post()
     def del_api_token(self):
         tok = M.ApiToken.query.get(user_id=c.user._id)
-        if tok is None: return
+        if tok is None:
+            return
         tok.delete()
         redirect(request.referer)
 
@@ -513,6 +526,7 @@ class PreferencesController(BaseController):
         c.user.set_pref('disable_user_messages', not allow_user_messages)
         redirect(request.referer)
 
+
 class UserInfoController(BaseController):
 
     def __init__(self, *args, **kwargs):
@@ -537,13 +551,14 @@ class UserInfoController(BaseController):
         require_authenticated()
         c.user.set_pref('sex', kw['sex'])
         c.user.set_pref('birthdate', kw.get('birthdate'))
-        localization={'country':kw.get('country'), 'city':kw.get('city')}
+        localization = {'country': kw.get('country'), 'city': kw.get('city')}
         c.user.set_pref('localization', localization)
         c.user.set_pref('timezone', kw['timezone'])
 
         flash('Your personal data was successfully updated!')
         redirect('.')
 
+
 class UserSkillsController(BaseController):
 
     def __init__(self, category=None):
@@ -564,11 +579,13 @@ class UserSkillsController(BaseController):
         l = []
         parents = []
         if kw.get('selected_category') is not None:
-            selected_skill = M.TroveCategory.query.get(trove_cat_id=int(kw.get('selected_category')))
+            selected_skill = M.TroveCategory.query.get(
+                trove_cat_id=int(kw.get('selected_category')))
         elif self.category:
             selected_skill = self.category
         else:
-            l = M.TroveCategory.query.find(dict(trove_parent_id=0, show_as_skill=True)).all()
+            l = M.TroveCategory.query.find(
+                dict(trove_parent_id=0, show_as_skill=True)).all()
             selected_skill = None
         if selected_skill:
             l = [scat for scat in selected_skill.subcategories
@@ -580,10 +597,10 @@ class UserSkillsController(BaseController):
         provider = plugin.AuthenticationProvider.get(request)
         menu = provider.account_navigation()
         return dict(
-            skills_list = l,
-            selected_skill = selected_skill,
-            parents = parents,
-            menu = menu,
+            skills_list=l,
+            selected_skill=selected_skill,
+            parents=parents,
+            menu=menu,
             add_details_fields=(len(l) == 0))
 
     @expose()
@@ -618,6 +635,7 @@ class UserSkillsController(BaseController):
         flash('Your skills list was successfully updated!')
         redirect('.')
 
+
 class UserContactsController(BaseController):
 
     def _check_security(self):
@@ -693,6 +711,7 @@ class UserContactsController(BaseController):
         flash('Your personal contacts were successfully updated!')
         redirect('.')
 
+
 class UserAvailabilityController(BaseController):
 
     def _check_security(self):
@@ -741,6 +760,7 @@ class UserAvailabilityController(BaseController):
         flash('Your availability timeslots were successfully updated!')
         redirect('.')
 
+
 class SubscriptionsController(BaseController):
 
     def _check_security(self):
@@ -752,16 +772,17 @@ class SubscriptionsController(BaseController):
         c.form = F.subscription_form
         c.revoke_access = F.oauth_revocation_form
         subscriptions = []
-        mailboxes = M.Mailbox.query.find(dict(user_id=c.user._id, is_flash=False))
+        mailboxes = M.Mailbox.query.find(
+            dict(user_id=c.user._id, is_flash=False))
         mailboxes = list(mailboxes.ming_cursor)
         project_collection = M.Project.query.mapper.collection
         app_collection = M.AppConfig.query.mapper.collection
         projects = dict(
             (p._id, p) for p in project_collection.m.find(dict(
-                    _id={'$in': [mb.project_id for mb in mailboxes ]})))
+                _id={'$in': [mb.project_id for mb in mailboxes]})))
         app_index = dict(
             (ac._id, ac) for ac in app_collection.m.find(dict(
-                    _id={'$in': [mb.app_config_id for mb in mailboxes]})))
+                _id={'$in': [mb.app_config_id for mb in mailboxes]})))
 
         for mb in mailboxes:
             project = projects.get(mb.project_id, None)
@@ -772,15 +793,16 @@ class SubscriptionsController(BaseController):
             if app_config is None:
                 continue
             subscriptions.append(dict(
-                    subscription_id=mb._id,
-                    project_name=project.name,
-                    mount_point=app_config.options['mount_point'],
-                    artifact_title=dict(text=mb.artifact_title, href=mb.artifact_url),
-                    topic=mb.topic,
-                    type=mb.type,
-                    frequency=mb.frequency.unit,
-                    artifact=mb.artifact_index_id,
-                    subscribed=True))
+                subscription_id=mb._id,
+                project_name=project.name,
+                mount_point=app_config.options['mount_point'],
+                artifact_title=dict(
+                    text=mb.artifact_title, href=mb.artifact_url),
+                topic=mb.topic,
+                type=mb.type,
+                frequency=mb.frequency.unit,
+                artifact=mb.artifact_index_id,
+                subscribed=True))
 
         my_projects = dict((p._id, p) for p in c.user.my_projects())
         my_tools = app_collection.m.find(dict(
@@ -788,7 +810,7 @@ class SubscriptionsController(BaseController):
         for tool in my_tools:
             p_id = tool.project_id
             subscribed = M.Mailbox.subscribed(
-                    project_id=p_id, app_config_id=tool._id)
+                project_id=p_id, app_config_id=tool._id)
             if not subscribed:
                 subscriptions.append(dict(
                     tool_id=tool._id,
@@ -826,6 +848,7 @@ class SubscriptionsController(BaseController):
 
         redirect(request.referer)
 
+
 class OAuthController(BaseController):
 
     def _check_security(self):
@@ -839,16 +862,17 @@ class OAuthController(BaseController):
         access_tokens = M.OAuthAccessToken.for_user(c.user)
         provider = plugin.AuthenticationProvider.get(request)
         return dict(
-                menu=provider.account_navigation(),
-                consumer_tokens=consumer_tokens,
-                access_tokens=access_tokens,
-            )
+            menu=provider.account_navigation(),
+            consumer_tokens=consumer_tokens,
+            access_tokens=access_tokens,
+        )
 
     @expose()
     @require_post()
     @validate(F.oauth_application_form, error_handler=index)
     def register(self, application_name=None, application_description=None, **kw):
-        M.OAuthConsumerToken(name=application_name, description=application_description)
+        M.OAuthConsumerToken(name=application_name,
+                             description=application_description)
         flash('OAuth Application registered')
         redirect('.')
 
@@ -886,18 +910,18 @@ class OAuthController(BaseController):
             flash('Invalid app ID', 'error')
             redirect('.')
         request_token = M.OAuthRequestToken(
-                consumer_token_id=consumer_token._id,
-                user_id=c.user._id,
-                callback='manual',
-                validation_pin=h.nonce(20),
-                is_bearer=True,
-            )
+            consumer_token_id=consumer_token._id,
+            user_id=c.user._id,
+            callback='manual',
+            validation_pin=h.nonce(20),
+            is_bearer=True,
+        )
         access_token = M.OAuthAccessToken(
-                consumer_token_id=consumer_token._id,
-                request_token_id=c.user._id,
-                user_id=request_token.user_id,
-                is_bearer=True,
-            )
+            consumer_token_id=consumer_token._id,
+            request_token_id=c.user._id,
+            user_id=request_token.user_id,
+            is_bearer=True,
+        )
         redirect('.')
 
     @expose()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/base.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/base.py b/Allura/allura/controllers/base.py
index d599e21..215b19d 100644
--- a/Allura/allura/controllers/base.py
+++ b/Allura/allura/controllers/base.py
@@ -21,6 +21,7 @@ from tg.controllers.dispatcher import ObjectDispatcher
 
 
 class BaseController(object):
+
     @expose()
     def _lookup(self, name=None, *remainder):
         """Provide explicit default lookup to avoid dispatching backtracking
@@ -29,6 +30,7 @@ class BaseController(object):
 
 
 class DispatchIndex(object):
+
     """Rewrite default url dispatching for controller.
 
     Catch url that ends with `index.*` and pass it to the `_lookup()`

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/basetest_project_root.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/basetest_project_root.py b/Allura/allura/controllers/basetest_project_root.py
index ceb597b..91cf690 100644
--- a/Allura/allura/controllers/basetest_project_root.py
+++ b/Allura/allura/controllers/basetest_project_root.py
@@ -29,7 +29,7 @@ from webob import exc
 from tg import expose
 from tg.decorators import without_trailing_slash
 
-import  ming.orm.ormsession
+import ming.orm.ormsession
 
 import allura
 from allura.lib.base import WsgiDispatchController
@@ -49,7 +49,9 @@ __all__ = ['RootController']
 
 log = logging.getLogger(__name__)
 
+
 class BasetestProjectRootController(WsgiDispatchController, ProjectController):
+
     '''Root controller for testing -- it behaves just like a
     ProjectController for test/ except that all tools are mounted,
     on-demand, at the mount point that is the same as their entry point
@@ -84,14 +86,17 @@ class BasetestProjectRootController(WsgiDispatchController, ProjectController):
 
     def _setup_request(self):
         # This code fixes a race condition in our tests
-        c.project = M.Project.query.get(shortname='test', neighborhood_id=self.p_nbhd._id)
+        c.project = M.Project.query.get(
+            shortname='test', neighborhood_id=self.p_nbhd._id)
         c.memoize_cache = {}
         count = 20
         while c.project is None:
-            import sys, time
+            import sys
+            import time
             time.sleep(0.5)
             log.warning('Project "test" not found, retrying...')
-            c.project = M.Project.query.get(shortname='test', neighborhood_id=self.p_nbhd._id)
+            c.project = M.Project.query.get(
+                shortname='test', neighborhood_id=self.p_nbhd._id)
             count -= 1
             assert count > 0, 'Timeout waiting for test project to appear'
 
@@ -102,8 +107,9 @@ class BasetestProjectRootController(WsgiDispatchController, ProjectController):
     def _lookup(self, name, *remainder):
         if not h.re_project_name.match(name):
             raise exc.HTTPNotFound, name
-        subproject = M.Project.query.get(shortname=c.project.shortname + '/' + name,
-                                         neighborhood_id=self.p_nbhd._id)
+        subproject = M.Project.query.get(
+            shortname=c.project.shortname + '/' + name,
+            neighborhood_id=self.p_nbhd._id)
         if subproject:
             c.project = subproject
             c.app = None
@@ -123,7 +129,8 @@ class BasetestProjectRootController(WsgiDispatchController, ProjectController):
 
     def __call__(self, environ, start_response):
         c.app = None
-        c.project = M.Project.query.get(shortname='test', neighborhood_id=self.p_nbhd._id)
+        c.project = M.Project.query.get(
+            shortname='test', neighborhood_id=self.p_nbhd._id)
         auth = plugin.AuthenticationProvider.get(request)
         user = auth.by_username(environ.get('username', 'test-admin'))
         if not user:
@@ -132,6 +139,7 @@ class BasetestProjectRootController(WsgiDispatchController, ProjectController):
         c.user = auth.authenticate_request()
         return WsgiDispatchController.__call__(self, environ, start_response)
 
+
 class DispatchTest(object):
 
     @expose()
@@ -141,6 +149,7 @@ class DispatchTest(object):
         else:
             raise exc.HTTPNotFound()
 
+
 class NamedController(object):
 
     def __init__(self, name):
@@ -154,6 +163,7 @@ class NamedController(object):
     def _default(self, *args):
         return 'default(%s)(%r)' % (self.name, args)
 
+
 class SecurityTests(object):
 
     @expose()
@@ -163,16 +173,18 @@ class SecurityTests(object):
             c.user = M.User.anonymous()
         return SecurityTest(), args
 
+
 class SecurityTest(object):
 
     def __init__(self):
         from forgewiki import model as WM
         c.app = c.project.app_instance('wiki')
-        self.page = WM.Page.query.get(app_config_id=c.app.config._id, title='Home')
+        self.page = WM.Page.query.get(
+            app_config_id=c.app.config._id, title='Home')
 
     @expose()
     def forbidden(self):
-        require(lambda:False, 'Never allowed')
+        require(lambda: False, 'Never allowed')
         return ''
 
     @expose()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/discuss.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/discuss.py b/Allura/allura/controllers/discuss.py
index b80b4e9..2eef299 100644
--- a/Allura/allura/controllers/discuss.py
+++ b/Allura/allura/controllers/discuss.py
@@ -42,16 +42,20 @@ from .feed import FeedArgs, FeedController
 
 log = logging.getLogger(__name__)
 
+
 class pass_validator(object):
+
     def validate(self, v, s):
         return v
-pass_validator=pass_validator()
+pass_validator = pass_validator()
+
 
 class ModelConfig(object):
-    Discussion=M.Discussion
-    Thread=M.Thread
-    Post=M.Post
-    Attachment=M.DiscussionAttachment
+    Discussion = M.Discussion
+    Thread = M.Thread
+    Post = M.Post
+    Attachment = M.DiscussionAttachment
+
 
 class WidgetConfig(object):
     # Forms
@@ -61,7 +65,7 @@ class WidgetConfig(object):
     moderate_post = DW.ModeratePost()
     flag_post = DW.FlagPost()
     post_filter = DW.PostFilter()
-    moderate_posts=DW.ModeratePosts()
+    moderate_posts = DW.ModeratePosts()
     # Other widgets
     discussion = DW.Discussion()
     thread = DW.Thread()
@@ -70,9 +74,11 @@ class WidgetConfig(object):
     discussion_header = DW.DiscussionHeader()
 
 # Controllers
+
+
 class DiscussionController(BaseController, FeedController):
-    M=ModelConfig
-    W=WidgetConfig
+    M = ModelConfig
+    W = WidgetConfig
 
     def __init__(self):
         if not hasattr(self, 'ThreadController'):
@@ -115,7 +121,8 @@ class DiscussionController(BaseController, FeedController):
 
         """
         return FeedArgs(
-            dict(ref_id={'$in': [t.index_id() for t in self.discussion.threads]}),
+            dict(ref_id={'$in': [t.index_id()
+                 for t in self.discussion.threads]}),
             'Recent posts to %s' % self.discussion.name,
             self.discussion.url())
 
@@ -124,16 +131,20 @@ class AppDiscussionController(DiscussionController):
 
     @LazyProperty
     def discussion(self):
-        return self.M.Discussion.query.get(shortname=c.app.config.options.mount_point,
-                                           app_config_id=c.app.config._id)
+        return self.M.Discussion.query.get(
+            shortname=c.app.config.options.mount_point,
+            app_config_id=c.app.config._id)
+
 
 class ThreadsController(BaseController):
-    __metaclass__=h.ProxiedAttrMeta
-    M=h.attrproxy('_discussion_controller', 'M')
-    W=h.attrproxy('_discussion_controller', 'W')
-    ThreadController=h.attrproxy('_discussion_controller', 'ThreadController')
-    PostController=h.attrproxy('_discussion_controller', 'PostController')
-    AttachmentController=h.attrproxy('_discussion_controller', 'AttachmentController')
+    __metaclass__ = h.ProxiedAttrMeta
+    M = h.attrproxy('_discussion_controller', 'M')
+    W = h.attrproxy('_discussion_controller', 'W')
+    ThreadController = h.attrproxy(
+        '_discussion_controller', 'ThreadController')
+    PostController = h.attrproxy('_discussion_controller', 'PostController')
+    AttachmentController = h.attrproxy(
+        '_discussion_controller', 'AttachmentController')
 
     def __init__(self, discussion_controller):
         self._discussion_controller = discussion_controller
@@ -141,18 +152,21 @@ class ThreadsController(BaseController):
     @expose()
     def _lookup(self, id=None, *remainder):
         if id:
-            id=unquote(id)
+            id = unquote(id)
             return self.ThreadController(self._discussion_controller, id), remainder
         else:
             raise exc.HTTPNotFound()
 
+
 class ThreadController(BaseController, FeedController):
-    __metaclass__=h.ProxiedAttrMeta
-    M=h.attrproxy('_discussion_controller', 'M')
-    W=h.attrproxy('_discussion_controller', 'W')
-    ThreadController=h.attrproxy('_discussion_controller', 'ThreadController')
-    PostController=h.attrproxy('_discussion_controller', 'PostController')
-    AttachmentController=h.attrproxy('_discussion_controller', 'AttachmentController')
+    __metaclass__ = h.ProxiedAttrMeta
+    M = h.attrproxy('_discussion_controller', 'M')
+    W = h.attrproxy('_discussion_controller', 'W')
+    ThreadController = h.attrproxy(
+        '_discussion_controller', 'ThreadController')
+    PostController = h.attrproxy('_discussion_controller', 'PostController')
+    AttachmentController = h.attrproxy(
+        '_discussion_controller', 'AttachmentController')
 
     def _check_security(self):
         require_access(self.thread, 'read')
@@ -168,7 +182,7 @@ class ThreadController(BaseController, FeedController):
 
     @expose()
     def _lookup(self, id, *remainder):
-        id=unquote(id)
+        id = unquote(id)
         return self.PostController(self._discussion_controller, self.thread, id), remainder
 
     @expose('jinja:allura:templates/discussion/thread.html')
@@ -177,7 +191,8 @@ class ThreadController(BaseController, FeedController):
         c.thread_header = self.W.thread_header
         limit, page, start = g.handle_paging(limit, page)
         self.thread.num_views += 1
-        M.session.artifact_orm_session._get().skip_mod_date = True # the update to num_views shouldn't affect it
+        # the update to num_views shouldn't affect it
+        M.session.artifact_orm_session._get().skip_mod_date = True
         count = self.thread.query_posts(page=page, limit=int(limit)).count()
         return dict(discussion=self.thread.discussion,
                     thread=self.thread,
@@ -200,7 +215,8 @@ class ThreadController(BaseController, FeedController):
             require_access(self.thread.ref.artifact, 'post')
         kw = self.W.edit_post.to_python(kw, None)
         if not kw['text']:
-            flash('Your post was not saved. You must provide content.', 'error')
+            flash('Your post was not saved. You must provide content.',
+                  'error')
             redirect(request.referer)
 
         file_info = kw.get('file_info', None)
@@ -241,12 +257,14 @@ class ThreadController(BaseController, FeedController):
 
 
 class PostController(BaseController):
-    __metaclass__=h.ProxiedAttrMeta
-    M=h.attrproxy('_discussion_controller', 'M')
-    W=h.attrproxy('_discussion_controller', 'W')
-    ThreadController=h.attrproxy('_discussion_controller', 'ThreadController')
-    PostController=h.attrproxy('_discussion_controller', 'PostController')
-    AttachmentController=h.attrproxy('_discussion_controller', 'AttachmentController')
+    __metaclass__ = h.ProxiedAttrMeta
+    M = h.attrproxy('_discussion_controller', 'M')
+    W = h.attrproxy('_discussion_controller', 'W')
+    ThreadController = h.attrproxy(
+        '_discussion_controller', 'ThreadController')
+    PostController = h.attrproxy('_discussion_controller', 'PostController')
+    AttachmentController = h.attrproxy(
+        '_discussion_controller', 'AttachmentController')
 
     def _check_security(self):
         require_access(self.post, 'read')
@@ -259,7 +277,8 @@ class PostController(BaseController):
 
     @LazyProperty
     def post(self):
-        post = self.M.Post.query.get(slug=self._post_slug, thread_id=self.thread._id)
+        post = self.M.Post.query.get(
+            slug=self._post_slug, thread_id=self.thread._id)
         if post:
             return post
         post = self.M.Post.query.get(slug=self._post_slug)
@@ -279,7 +298,7 @@ class PostController(BaseController):
             post_fields = self.W.edit_post.to_python(kw, None)
             file_info = post_fields.pop('file_info', None)
             self.post.add_multiple_attachments(file_info)
-            for k,v in post_fields.iteritems():
+            for k, v in post_fields.iteritems():
                 try:
                     setattr(self.post, k, v)
                 except AttributeError:
@@ -289,14 +308,16 @@ class PostController(BaseController):
             self.post.last_edit_by_id = c.user._id
             self.post.commit()
             g.director.create_activity(c.user, 'modified', self.post,
-                    target=self.post.thread.artifact or self.post.thread,
-                    related_nodes=[self.post.app_config.project])
+                                       target=self.post.thread.artifact or self.post.thread,
+                                       related_nodes=[self.post.app_config.project])
             redirect(request.referer)
-        elif request.method=='GET':
+        elif request.method == 'GET':
             if version is not None:
                 HC = self.post.__mongometa__.history_class
-                ss = HC.query.find({'artifact_id':self.post._id, 'version':int(version)}).first()
-                if not ss: raise exc.HTTPNotFound
+                ss = HC.query.find(
+                    {'artifact_id': self.post._id, 'version': int(version)}).first()
+                if not ss:
+                    raise exc.HTTPNotFound
                 post = Object(
                     ss.data,
                     acl=self.post.acl,
@@ -307,9 +328,9 @@ class PostController(BaseController):
                     attachments=self.post.attachments,
                     related_artifacts=self.post.related_artifacts,
                     parent_security_context=lambda: None,
-                    )
+                )
             else:
-                post=self.post
+                post = self.post
             return dict(discussion=self.post.discussion,
                         post=post)
 
@@ -341,9 +362,10 @@ class PostController(BaseController):
             self.post.spam()
         elif kw.pop('approve', None):
             self.post.status = 'ok'
-            g.spam_checker.submit_ham(self.post.text, artifact=self.post, user=c.user)
+            g.spam_checker.submit_ham(
+                self.post.text, artifact=self.post, user=c.user)
         self.thread.update_stats()
-        return dict(result ='success')
+        return dict(result='success')
 
     @h.vardec
     @expose()
@@ -366,26 +388,31 @@ class PostController(BaseController):
 
     @expose()
     def _lookup(self, id, *remainder):
-        id=unquote(id)
+        id = unquote(id)
         return self.PostController(
             self._discussion_controller,
             self.thread, self._post_slug + '/' + id), remainder
 
+
 class DiscussionAttachmentController(AttachmentController):
-    AttachmentClass=M.DiscussionAttachment
-    edit_perm='moderate'
+    AttachmentClass = M.DiscussionAttachment
+    edit_perm = 'moderate'
+
 
 class DiscussionAttachmentsController(AttachmentsController):
-    AttachmentControllerClass=DiscussionAttachmentController
+    AttachmentControllerClass = DiscussionAttachmentController
+
 
 class ModerationController(BaseController):
-    __metaclass__=h.ProxiedAttrMeta
+    __metaclass__ = h.ProxiedAttrMeta
     PostModel = M.Post
-    M=h.attrproxy('_discussion_controller', 'M')
-    W=h.attrproxy('_discussion_controller', 'W')
-    ThreadController=h.attrproxy('_discussion_controller', 'ThreadController')
-    PostController=h.attrproxy('_discussion_controller', 'PostController')
-    AttachmentController=h.attrproxy('_discussion_controller', 'AttachmentController')
+    M = h.attrproxy('_discussion_controller', 'M')
+    W = h.attrproxy('_discussion_controller', 'W')
+    ThreadController = h.attrproxy(
+        '_discussion_controller', 'ThreadController')
+    PostController = h.attrproxy('_discussion_controller', 'PostController')
+    AttachmentController = h.attrproxy(
+        '_discussion_controller', 'AttachmentController')
 
     def _check_security(self):
         require_access(self.discussion, 'moderate')
@@ -413,7 +440,7 @@ class ModerationController(BaseController):
         if status != '-':
             query['status'] = status
         if flag:
-            query['flags'] = {'$gte': int(flag) }
+            query['flags'] = {'$gte': int(flag)}
         q = self.PostModel.query.find(query)
         count = q.count()
         if not page:
@@ -437,7 +464,9 @@ class ModerationController(BaseController):
             if 'checked' in p:
                 posted = self.PostModel.query.get(
                     _id=p['_id'],
-                    discussion_id=self.discussion._id,  # make sure nobody hacks the HTML form to moderate other posts
+                    # make sure nobody hacks the HTML form to moderate other
+                    # posts
+                    discussion_id=self.discussion._id,
                 )
                 if posted:
                     if delete:
@@ -450,13 +479,15 @@ class ModerationController(BaseController):
                         posted.spam()
                     elif approve and posted.status != 'ok':
                         posted.status = 'ok'
-                        g.spam_checker.submit_ham(posted.text, artifact=posted, user=c.user)
+                        g.spam_checker.submit_ham(
+                            posted.text, artifact=posted, user=c.user)
                         posted.thread.last_post_date = max(
                             posted.thread.last_post_date,
                             posted.mod_date)
                         posted.thread.num_replies += 1
         redirect(request.referer)
 
+
 class PostRestController(PostController):
 
     @expose('json:')
@@ -474,6 +505,7 @@ class PostRestController(PostController):
         self.thread.num_replies += 1
         redirect(post.slug.split('/')[-1] + '/')
 
+
 class ThreadRestController(ThreadController):
 
     @expose('json:')
@@ -490,6 +522,7 @@ class ThreadRestController(ThreadController):
         p = self.thread.add_post(**kw)
         redirect(p.slug + '/')
 
+
 class AppDiscussionRestController(AppDiscussionController):
     ThreadController = ThreadRestController
     PostController = PostRestController

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/feed.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/feed.py b/Allura/allura/controllers/feed.py
index f6e1420..7aaa789 100644
--- a/Allura/allura/controllers/feed.py
+++ b/Allura/allura/controllers/feed.py
@@ -26,12 +26,14 @@ from allura.lib import helpers as h
 
 
 class FeedArgs(object):
+
     """A facade for the arguments required by
     :meth:`allura.model.artifact.Feed.feed`.
 
     Used by :meth:`FeedController.feed` to create a real feed.
 
     """
+
     def __init__(self, query, title, url, description=None):
         self.query = query
         self.title = title
@@ -40,6 +42,7 @@ class FeedArgs(object):
 
 
 class FeedController(object):
+
     """Mixin class which adds RSS and Atom feed endpoints to an existing
     controller.
 
@@ -70,10 +73,10 @@ class FeedController(object):
     @without_trailing_slash
     @expose()
     @validate(dict(
-            since=h.DateTimeConverter(if_empty=None, if_invalid=None),
-            until=h.DateTimeConverter(if_empty=None, if_invalid=None),
-            page=V.Int(if_empty=None, if_invalid=None),
-            limit=V.Int(if_empty=None, if_invalid=None)))
+        since=h.DateTimeConverter(if_empty=None, if_invalid=None),
+        until=h.DateTimeConverter(if_empty=None, if_invalid=None),
+        page=V.Int(if_empty=None, if_invalid=None),
+        limit=V.Int(if_empty=None, if_invalid=None)))
     def feed(self, since=None, until=None, page=None, limit=None, **kw):
         """Return a utf8-encoded XML feed (RSS or Atom) to the browser.
         """


[19/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_auth.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_auth.py b/Allura/allura/tests/functional/test_auth.py
index 5ac17c6..d9362d4 100644
--- a/Allura/allura/tests/functional/test_auth.py
+++ b/Allura/allura/tests/functional/test_auth.py
@@ -22,13 +22,13 @@ from urlparse import urlparse, parse_qs
 
 import mock
 from nose.tools import (
-        assert_equal,
-        assert_not_equal,
-        assert_is_none,
-        assert_is_not_none,
-        assert_in,
-        assert_true
-    )
+    assert_equal,
+    assert_not_equal,
+    assert_is_none,
+    assert_is_not_none,
+    assert_in,
+    assert_true
+)
 from pylons import tmpl_context as c
 from allura.tests import TestController
 from allura.tests import decorators as td
@@ -46,72 +46,77 @@ from pylons import request
 def unentity(s):
     return s.replace('&quot;', '"')
 
+
 class TestAuth(TestController):
 
     def test_login(self):
         result = self.app.get('/auth/')
-        r = self.app.post('/auth/send_verification_link', params=dict(a='test@example.com'))
+        r = self.app.post('/auth/send_verification_link',
+                          params=dict(a='test@example.com'))
         email = M.User.query.get(username='test-admin').email_addresses[0]
         r = self.app.post('/auth/send_verification_link', params=dict(a=email))
         ThreadLocalORMSession.flush_all()
         r = self.app.get('/auth/verify_addr', params=dict(a='foo'))
-        assert json.loads(self.webflash(r))['status'] == 'error', self.webflash(r)
+        assert json.loads(self.webflash(r))[
+            'status'] == 'error', self.webflash(r)
         ea = M.EmailAddress.query.find().first()
         r = self.app.get('/auth/verify_addr', params=dict(a=ea.nonce))
         assert json.loads(self.webflash(r))['status'] == 'ok', self.webflash(r)
         r = self.app.get('/auth/logout')
         r = self.app.post('/auth/do_login', params=dict(
-                username='test-user', password='foo'))
+            username='test-user', password='foo'))
         r = self.app.post('/auth/do_login', params=dict(
-                username='test-user', password='food'))
+            username='test-user', password='food'))
         assert 'Invalid login' in str(r), r.showbrowser()
         r = self.app.post('/auth/do_login', params=dict(
-                username='test-usera', password='foo'))
+            username='test-usera', password='foo'))
         assert 'Invalid login' in str(r), r.showbrowser()
 
     @td.with_user_project('test-admin')
     def test_prefs(self):
-        r = self.app.get('/auth/preferences/', extra_environ=dict(username='test-admin'))
+        r = self.app.get('/auth/preferences/',
+                         extra_environ=dict(username='test-admin'))
         assert 'test@example.com' not in r
         r = self.app.post('/auth/preferences/update', params={
-                 'preferences.display_name':'Test Admin',
-                 'new_addr.addr':'test@example.com',
-                 'new_addr.claim':'Claim Address',
-                 'primary_addr':'test-admin@users.localhost',
-                 'preferences.email_format':'plain'},
-                extra_environ=dict(username='test-admin'))
+            'preferences.display_name': 'Test Admin',
+            'new_addr.addr': 'test@example.com',
+            'new_addr.claim': 'Claim Address',
+            'primary_addr': 'test-admin@users.localhost',
+            'preferences.email_format': 'plain'},
+            extra_environ=dict(username='test-admin'))
         r = self.app.get('/auth/preferences/')
         assert 'test@example.com' in r
         r = self.app.post('/auth/preferences/update', params={
-                 'preferences.display_name':'Test Admin',
-                 'addr-1.ord':'1',
-                 'addr-2.ord':'1',
-                 'addr-2.delete':'on',
-                 'new_addr.addr':'',
-                 'primary_addr':'test-admin@users.localhost',
-                 'preferences.email_format':'plain'},
-                extra_environ=dict(username='test-admin'))
+            'preferences.display_name': 'Test Admin',
+            'addr-1.ord': '1',
+            'addr-2.ord': '1',
+            'addr-2.delete': 'on',
+            'new_addr.addr': '',
+            'primary_addr': 'test-admin@users.localhost',
+            'preferences.email_format': 'plain'},
+            extra_environ=dict(username='test-admin'))
         r = self.app.get('/auth/preferences/')
         assert 'test@example.com' not in r
         ea = M.EmailAddress.query.get(_id='test-admin@users.localhost')
         ea.confirmed = True
         ThreadLocalORMSession.flush_all()
         r = self.app.post('/auth/preferences/update', params={
-                 'preferences.display_name':'Test Admin',
-                 'new_addr.addr':'test-admin@users.localhost',
-                 'new_addr.claim':'Claim Address',
-                 'primary_addr':'test-admin@users.localhost',
-                 'preferences.email_format':'plain'},
-                extra_environ=dict(username='test-admin'))
+            'preferences.display_name': 'Test Admin',
+            'new_addr.addr': 'test-admin@users.localhost',
+            'new_addr.claim': 'Claim Address',
+            'primary_addr': 'test-admin@users.localhost',
+            'preferences.email_format': 'plain'},
+            extra_environ=dict(username='test-admin'))
 
     @td.with_user_project('test-admin')
     def test_prefs_subscriptions(self):
         r = self.app.get('/auth/subscriptions/',
-                extra_environ=dict(username='test-admin'))
+                         extra_environ=dict(username='test-admin'))
         subscriptions = M.Mailbox.query.find(dict(
             user_id=c.user._id, is_flash=False)).all()
         # make sure page actually lists all the user's subscriptions
-        assert len(subscriptions) > 0, 'Test user has no subscriptions, cannot verify that they are shown'
+        assert len(
+            subscriptions) > 0, 'Test user has no subscriptions, cannot verify that they are shown'
         for m in subscriptions:
             assert m._id in r, "Page doesn't list subscription for Mailbox._id = %s" % m._id
 
@@ -130,7 +135,7 @@ class TestAuth(TestController):
         for f in r.forms.itervalues():
             if f.action == 'update_subscriptions':
                 form = f
-                break;
+                break
         assert form is not None, "Can't find subscriptions form"
         return form
 
@@ -149,7 +154,7 @@ class TestAuth(TestController):
     @td.with_user_project('test-admin')
     def test_prefs_subscriptions_subscribe(self):
         resp = self.app.get('/auth/subscriptions/',
-                extra_environ=dict(username='test-admin'))
+                            extra_environ=dict(username='test-admin'))
         form = self._find_subscriptions_form(resp)
         # find not subscribed tool, subscribe and verify
         field_name = self._find_subscriptions_field(form, subscribed=False)
@@ -165,7 +170,7 @@ class TestAuth(TestController):
     @td.with_user_project('test-admin')
     def test_prefs_subscriptions_unsubscribe(self):
         resp = self.app.get('/auth/subscriptions/',
-                extra_environ=dict(username='test-admin'))
+                            extra_environ=dict(username='test-admin'))
         form = self._find_subscriptions_form(resp)
         field_name = self._find_subscriptions_field(form, subscribed=True)
         s_id = ObjectId(form.fields[field_name + '.subscription_id'][0].value)
@@ -177,59 +182,62 @@ class TestAuth(TestController):
         assert not s, "User still has subscription with Mailbox._id %s" % s_id
 
     def test_format_email(self):
-        self.app.post('/auth/subscriptions/update_subscriptions', params={'email_format': 'html', 'subscriptions': ''})
+        self.app.post('/auth/subscriptions/update_subscriptions',
+                      params={'email_format': 'html', 'subscriptions': ''})
         r = self.app.get('/auth/subscriptions/')
         assert '<option selected value="html">HTML</option>' in r
-        self.app.post('/auth/subscriptions/update_subscriptions', params={'email_format': 'plain', 'subscriptions': ''})
+        self.app.post('/auth/subscriptions/update_subscriptions',
+                      params={'email_format': 'plain', 'subscriptions': ''})
         r = self.app.get('/auth/subscriptions/')
         assert '<option selected value="plain">Plain Text</option>' in r
-        self.app.post('/auth/subscriptions/update_subscriptions', params={'email_format': 'both', 'subscriptions': ''})
+        self.app.post('/auth/subscriptions/update_subscriptions',
+                      params={'email_format': 'both', 'subscriptions': ''})
         r = self.app.get('/auth/subscriptions/')
         assert '<option selected value="both">Combined</option>' in r
 
     def test_api_key(self):
-         r = self.app.get('/auth/preferences/')
-         assert 'No API token generated' in r
-         r = self.app.post('/auth/preferences/gen_api_token', status=302)
-         r = self.app.get('/auth/preferences/')
-         assert 'No API token generated' not in r
-         assert 'API Key:' in r
-         assert 'Secret Key:' in r
-         r = self.app.post('/auth/preferences/del_api_token', status=302)
-         r = self.app.get('/auth/preferences/')
-         assert 'No API token generated' in r
+        r = self.app.get('/auth/preferences/')
+        assert 'No API token generated' in r
+        r = self.app.post('/auth/preferences/gen_api_token', status=302)
+        r = self.app.get('/auth/preferences/')
+        assert 'No API token generated' not in r
+        assert 'API Key:' in r
+        assert 'Secret Key:' in r
+        r = self.app.post('/auth/preferences/del_api_token', status=302)
+        r = self.app.get('/auth/preferences/')
+        assert 'No API token generated' in r
 
     @mock.patch('allura.controllers.auth.verify_oid')
     def test_login_verify_oid_with_provider(self, verify_oid):
         verify_oid.return_value = dict()
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=200)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=200)
         verify_oid.assert_called_with('http://www.google.com/accounts/o8/id',
-                failure_redirect='.',
-                return_to='login_process_oid?return_to=None',
-                title='OpenID Login',
-                prompt='Click below to continue');
+                                      failure_redirect='.',
+                                      return_to='login_process_oid?return_to=None',
+                                      title='OpenID Login',
+                                      prompt='Click below to continue')
 
     @mock.patch('allura.controllers.auth.verify_oid')
     def test_login_verify_oid_without_provider(self, verify_oid):
         verify_oid.return_value = dict()
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='', username='rick446@usa.net'),
-                status=200)
+            provider='', username='rick446@usa.net'),
+            status=200)
         verify_oid.assert_called_with('rick446@usa.net',
-                failure_redirect='.',
-                return_to='login_process_oid?return_to=None',
-                title='OpenID Login',
-                prompt='Click below to continue');
+                                      failure_redirect='.',
+                                      return_to='login_process_oid?return_to=None',
+                                      title='OpenID Login',
+                                      prompt='Click below to continue')
 
     @mock.patch('allura.lib.oid_helper.consumer.Consumer')
     def test_login_verify_oid_good_provider_no_redirect(self, Consumer):
         Consumer().begin().shouldSendRedirect.return_value = False
         Consumer().begin().formMarkup.return_value = "<!-- I'm a mock object! -->"
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=200)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=200)
         flash = self.webflash(result)
         assert_equal(flash, '')
 
@@ -238,18 +246,19 @@ class TestAuth(TestController):
         Consumer().begin().shouldSendRedirect.return_value = True
         Consumer().begin().redirectURL.return_value = 'http://some.url/'
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=302)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=302)
         assert_equal(result.headers['Location'], 'http://some.url/')
         flash = self.webflash(result)
         assert_equal(flash, '')
 
     @mock.patch('allura.lib.oid_helper.consumer.Consumer')
     def test_login_verify_oid_bad_provider(self, Consumer):
-        Consumer().begin.side_effect = oid_helper.consumer.DiscoveryFailure('bad', mock.Mock('response'))
+        Consumer().begin.side_effect = oid_helper.consumer.DiscoveryFailure(
+            'bad', mock.Mock('response'))
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/', username='rick446@usa.net'),
-                              status=302)
+            provider='http://www.google.com/accounts/', username='rick446@usa.net'),
+            status=302)
         flash = self.webflash(result)
         assert_equal(flash, '{"status": "error", "message": "bad"}')
 
@@ -257,42 +266,43 @@ class TestAuth(TestController):
     def test_login_verify_oid_bad_provider2(self, Consumer):
         Consumer().begin.return_value = None
         result = self.app.get('/auth/login_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/', username='rick446@usa.net'),
-                              status=302)
+            provider='http://www.google.com/accounts/', username='rick446@usa.net'),
+            status=302)
         flash = self.webflash(result)
-        assert_equal(flash, '{"status": "error", "message": "No openid services found for <code>http://www.google.com/accounts/</code>"}')
+        assert_equal(
+            flash, '{"status": "error", "message": "No openid services found for <code>http://www.google.com/accounts/</code>"}')
 
     @mock.patch('allura.controllers.auth.verify_oid')
     def test_claim_verify_oid_with_provider(self, verify_oid):
         verify_oid.return_value = dict()
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=200)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=200)
         verify_oid.assert_called_with('http://www.google.com/accounts/o8/id',
-                failure_redirect='claim_oid',
-                return_to='claim_process_oid',
-                title='Claim OpenID',
-                prompt='Click below to continue');
+                                      failure_redirect='claim_oid',
+                                      return_to='claim_process_oid',
+                                      title='Claim OpenID',
+                                      prompt='Click below to continue')
 
     @mock.patch('allura.controllers.auth.verify_oid')
     def test_claim_verify_oid_without_provider(self, verify_oid):
         verify_oid.return_value = dict()
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='', username='rick446@usa.net'),
-                status=200)
+            provider='', username='rick446@usa.net'),
+            status=200)
         verify_oid.assert_called_with('rick446@usa.net',
-                failure_redirect='claim_oid',
-                return_to='claim_process_oid',
-                title='Claim OpenID',
-                prompt='Click below to continue');
+                                      failure_redirect='claim_oid',
+                                      return_to='claim_process_oid',
+                                      title='Claim OpenID',
+                                      prompt='Click below to continue')
 
     @mock.patch('allura.lib.oid_helper.consumer.Consumer')
     def test_claim_verify_oid_good_provider_no_redirect(self, Consumer):
         Consumer().begin().shouldSendRedirect.return_value = False
         Consumer().begin().formMarkup.return_value = "<!-- I'm a mock object! -->"
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=200)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=200)
         flash = self.webflash(result)
         assert_equal(flash, '')
 
@@ -301,18 +311,19 @@ class TestAuth(TestController):
         Consumer().begin().shouldSendRedirect.return_value = True
         Consumer().begin().redirectURL.return_value = 'http://some.url/'
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
-                status=302)
+            provider='http://www.google.com/accounts/o8/id', username='rick446@usa.net'),
+            status=302)
         assert_equal(result.headers['Location'], 'http://some.url/')
         flash = self.webflash(result)
         assert_equal(flash, '')
 
     @mock.patch('allura.lib.oid_helper.consumer.Consumer')
     def test_claim_verify_oid_bad_provider(self, Consumer):
-        Consumer().begin.side_effect = oid_helper.consumer.DiscoveryFailure('bad', mock.Mock('response'))
+        Consumer().begin.side_effect = oid_helper.consumer.DiscoveryFailure(
+            'bad', mock.Mock('response'))
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/', username='rick446@usa.net'),
-                              status=302)
+            provider='http://www.google.com/accounts/', username='rick446@usa.net'),
+            status=302)
         flash = self.webflash(result)
         assert_equal(flash, '{"status": "error", "message": "bad"}')
 
@@ -320,36 +331,41 @@ class TestAuth(TestController):
     def test_claim_verify_oid_bad_provider2(self, Consumer):
         Consumer().begin.return_value = None
         result = self.app.get('/auth/claim_verify_oid', params=dict(
-                provider='http://www.google.com/accounts/', username='rick446@usa.net'),
-                              status=302)
+            provider='http://www.google.com/accounts/', username='rick446@usa.net'),
+            status=302)
         flash = self.webflash(result)
-        assert_equal(flash, '{"status": "error", "message": "No openid services found for <code>http://www.google.com/accounts/</code>"}')
+        assert_equal(
+            flash, '{"status": "error", "message": "No openid services found for <code>http://www.google.com/accounts/</code>"}')
 
     def test_setup_openid_user_current_user(self):
         r = self.app.get('/auth/setup_openid_user')
         r = self.app.post('/auth/do_setup_openid_user', params=dict(
-                username='test-admin', display_name='Test Admin'))
+            username='test-admin', display_name='Test Admin'))
         flash = self.webflash(r)
-        assert_equal(flash, '{"status": "ok", "message": "Your username has been set to test-admin."}')
+        assert_equal(
+            flash, '{"status": "ok", "message": "Your username has been set to test-admin."}')
 
     def test_setup_openid_user_taken_user(self):
         r = self.app.get('/auth/setup_openid_user')
         r = self.app.post('/auth/do_setup_openid_user', params=dict(
-                username='test-user', display_name='Test User'))
+            username='test-user', display_name='Test User'))
         flash = self.webflash(r)
-        assert_equal(flash, '{"status": "error", "message": "That username is already taken.  Please choose another."}')
+        assert_equal(
+            flash, '{"status": "error", "message": "That username is already taken.  Please choose another."}')
 
     def test_setup_openid_user_new_user(self):
         r = self.app.get('/auth/setup_openid_user')
         r = self.app.post('/auth/do_setup_openid_user', params=dict(
-                username='test-alkajs', display_name='Test Alkajs'))
+            username='test-alkajs', display_name='Test Alkajs'))
         flash = self.webflash(r)
-        assert_equal(flash, '{"status": "ok", "message": "Your username has been set to test-alkajs."}')
+        assert_equal(
+            flash, '{"status": "ok", "message": "Your username has been set to test-alkajs."}')
 
     def test_create_account(self):
         r = self.app.get('/auth/create_account')
         assert 'Create an Account' in r
-        r = self.app.post('/auth/save_new', params=dict(username='aaa',pw='123'))
+        r = self.app.post('/auth/save_new',
+                          params=dict(username='aaa', pw='123'))
         assert 'Enter a value 8 characters long or more' in r
         r = self.app.post(
             '/auth/save_new',
@@ -381,14 +397,17 @@ class TestAuth(TestController):
         p_nbhd = M.Neighborhood.query.get(name='Projects')
         p = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
         self.app.post('/auth/save_new', params=dict(
-                username='aaa',
-                pw='12345678',
-                pw2='12345678',
-                display_name='Test Me')).follow()
+            username='aaa',
+            pw='12345678',
+            pw2='12345678',
+            display_name='Test Me')).follow()
         user = M.User.query.get(username='aaa')
-        assert M.ProjectRole.query.find(dict(user_id=user._id, project_id=p._id)).count() == 0
-        r = self.app.get('/p/test/admin/permissions',extra_environ=dict(username='aaa'), status=403)
-        assert M.ProjectRole.query.find(dict(user_id=user._id, project_id=p._id)).count() <= 1
+        assert M.ProjectRole.query.find(
+            dict(user_id=user._id, project_id=p._id)).count() == 0
+        r = self.app.get('/p/test/admin/permissions',
+                         extra_environ=dict(username='aaa'), status=403)
+        assert M.ProjectRole.query.find(
+            dict(user_id=user._id, project_id=p._id)).count() <= 1
 
     def test_default_lookup(self):
         # Make sure that default _lookup() throws 404
@@ -398,16 +417,19 @@ class TestAuth(TestController):
         user = M.User.query.get(username='test-admin')
         sess = session(user)
         assert not user.disabled
-        r = self.app.get('/p/test/admin/', extra_environ={'username':'test-admin'})
+        r = self.app.get('/p/test/admin/',
+                         extra_environ={'username': 'test-admin'})
         assert_equal(r.status_int, 200, 'Redirect to %s' % r.location)
         user.disabled = True
         sess.save(user)
         sess.flush()
         user = M.User.query.get(username='test-admin')
         assert user.disabled
-        r = self.app.get('/p/test/admin/', extra_environ={'username':'test-admin'})
+        r = self.app.get('/p/test/admin/',
+                         extra_environ={'username': 'test-admin'})
         assert_equal(r.status_int, 302)
-        assert_equal(r.location, 'http://localhost/auth/?return_to=%2Fp%2Ftest%2Fadmin%2F')
+        assert_equal(r.location,
+                     'http://localhost/auth/?return_to=%2Fp%2Ftest%2Fadmin%2F')
 
 
 class TestPreferences(TestController):
@@ -419,14 +441,14 @@ class TestPreferences(TestController):
             ('Male', '19/08/1988', 'IT', 'Milan', 'Europe/Rome')
         result = self.app.get('/auth/user_info/')
 
-        #Check if personal data is properly set
+        # Check if personal data is properly set
         r = self.app.post('/auth/user_info/change_personal_data',
-             params=dict(
-                 sex=setsex,
-                 birthdate=setbirthdate,
-                 country=setcountry,
-                 city=setcity,
-                 timezone=settimezone))
+                          params=dict(
+                              sex=setsex,
+                              birthdate=setbirthdate,
+                              country=setcountry,
+                              city=setcity,
+                              timezone=settimezone))
         user = M.User.query.get(username='test-admin')
         sex = user.sex
         assert sex == setsex
@@ -439,9 +461,9 @@ class TestPreferences(TestController):
         timezone = user.timezone
         assert timezone == settimezone
 
-        #Check if setting a wrong date everything works correctly
+        # Check if setting a wrong date everything works correctly
         r = self.app.post('/auth/user_info/change_personal_data',
-             params=dict(birthdate='30/02/1998'))
+                          params=dict(birthdate='30/02/1998'))
         assert 'Please enter a valid date' in str(r)
         user = M.User.query.get(username='test-admin')
         sex = user.sex
@@ -455,108 +477,114 @@ class TestPreferences(TestController):
         timezone = user.timezone
         assert timezone == settimezone
 
-        #Check deleting birthdate
+        # Check deleting birthdate
         r = self.app.post('/auth/user_info/change_personal_data',
-             params=dict(
-                 sex=setsex,
-                 birthdate='',
-                 country=setcountry,
-                 city=setcity,
-                 timezone=settimezone))
+                          params=dict(
+                              sex=setsex,
+                              birthdate='',
+                              country=setcountry,
+                              city=setcity,
+                              timezone=settimezone))
         user = M.User.query.get(username='test-admin')
         assert user.birthdate is None
 
     @td.with_user_project('test-admin')
     def test_contacts(self):
-        #Add skype account
+        # Add skype account
         testvalue = 'testaccount'
         result = self.app.get('/auth/user_info/contacts/')
         r = self.app.post('/auth/user_info/contacts/skype_account',
-             params=dict(skypeaccount=testvalue))
+                          params=dict(skypeaccount=testvalue))
         user = M.User.query.get(username='test-admin')
         assert user.skypeaccount == testvalue
 
-        #Add social network account
+        # Add social network account
         socialnetwork = 'Facebook'
         accounturl = 'http://www.facebook.com/test'
         r = self.app.post('/auth/user_info/contacts/add_social_network',
-             params=dict(socialnetwork=socialnetwork,
-                         accounturl = accounturl))
+                          params=dict(socialnetwork=socialnetwork,
+                                      accounturl=accounturl))
         user = M.User.query.get(username='test-admin')
         assert len(user.socialnetworks) == 1 and \
-               user.socialnetworks[0].socialnetwork == socialnetwork and \
-               user.socialnetworks[0].accounturl == accounturl
+            user.socialnetworks[0].socialnetwork == socialnetwork and \
+            user.socialnetworks[0].accounturl == accounturl
 
-        #Add second social network account
+        # Add second social network account
         socialnetwork2 = 'Twitter'
         accounturl2 = 'http://twitter.com/test'
         r = self.app.post('/auth/user_info/contacts/add_social_network',
-             params=dict(socialnetwork=socialnetwork2,
-                         accounturl = '@test'))
+                          params=dict(socialnetwork=socialnetwork2,
+                                      accounturl='@test'))
         user = M.User.query.get(username='test-admin')
         assert len(user.socialnetworks) == 2 and \
-               ({'socialnetwork':socialnetwork, 'accounturl':accounturl} in user.socialnetworks and \
-                {'socialnetwork':socialnetwork2, 'accounturl':accounturl2} in user.socialnetworks)
+            ({'socialnetwork': socialnetwork, 'accounturl': accounturl} in user.socialnetworks and
+             {'socialnetwork': socialnetwork2, 'accounturl': accounturl2} in user.socialnetworks)
 
-        #Remove first social network account
+        # Remove first social network account
         r = self.app.post('/auth/user_info/contacts/remove_social_network',
-             params=dict(socialnetwork=socialnetwork,
-                         account = accounturl))
+                          params=dict(socialnetwork=socialnetwork,
+                                      account=accounturl))
         user = M.User.query.get(username='test-admin')
         assert len(user.socialnetworks) == 1 and \
-               {'socialnetwork':socialnetwork2, 'accounturl':accounturl2} in user.socialnetworks
+            {'socialnetwork': socialnetwork2, 'accounturl':
+             accounturl2} in user.socialnetworks
 
-        #Add empty social network account
+        # Add empty social network account
         r = self.app.post('/auth/user_info/contacts/add_social_network',
-             params=dict(accounturl = accounturl, socialnetwork=''))
+                          params=dict(accounturl=accounturl, socialnetwork=''))
         user = M.User.query.get(username='test-admin')
         assert len(user.socialnetworks) == 1 and \
-               {'socialnetwork':socialnetwork2, 'accounturl':accounturl2} in user.socialnetworks
+            {'socialnetwork': socialnetwork2, 'accounturl':
+             accounturl2} in user.socialnetworks
 
-        #Add invalid social network account
+        # Add invalid social network account
         r = self.app.post('/auth/user_info/contacts/add_social_network',
-             params=dict(accounturl = accounturl, socialnetwork='invalid'))
+                          params=dict(accounturl=accounturl, socialnetwork='invalid'))
         user = M.User.query.get(username='test-admin')
         assert len(user.socialnetworks) == 1 and \
-               {'socialnetwork':socialnetwork2, 'accounturl':accounturl2} in user.socialnetworks
+            {'socialnetwork': socialnetwork2, 'accounturl':
+             accounturl2} in user.socialnetworks
 
-        #Add telephone number
+        # Add telephone number
         telnumber = '+3902123456'
         r = self.app.post('/auth/user_info/contacts/add_telnumber',
-             params=dict(newnumber=telnumber))
+                          params=dict(newnumber=telnumber))
         user = M.User.query.get(username='test-admin')
-        assert (len(user.telnumbers) == 1 and (user.telnumbers[0] == telnumber))
+        assert (len(user.telnumbers)
+                == 1 and (user.telnumbers[0] == telnumber))
 
-        #Add second telephone number
+        # Add second telephone number
         telnumber2 = '+3902654321'
         r = self.app.post('/auth/user_info/contacts/add_telnumber',
-             params=dict(newnumber=telnumber2))
+                          params=dict(newnumber=telnumber2))
         user = M.User.query.get(username='test-admin')
-        assert (len(user.telnumbers) == 2 and telnumber in user.telnumbers and telnumber2 in user.telnumbers)
+        assert (len(user.telnumbers)
+                == 2 and telnumber in user.telnumbers and telnumber2 in user.telnumbers)
 
-        #Remove first telephone number
+        # Remove first telephone number
         r = self.app.post('/auth/user_info/contacts/remove_telnumber',
-             params=dict(oldvalue=telnumber))
+                          params=dict(oldvalue=telnumber))
         user = M.User.query.get(username='test-admin')
         assert (len(user.telnumbers) == 1 and telnumber2 in user.telnumbers)
 
-        #Add website
+        # Add website
         website = 'http://www.testurl.com'
         r = self.app.post('/auth/user_info/contacts/add_webpage',
-             params=dict(newwebsite=website))
+                          params=dict(newwebsite=website))
         user = M.User.query.get(username='test-admin')
         assert (len(user.webpages) == 1 and (website in user.webpages))
 
-        #Add second website
+        # Add second website
         website2 = 'http://www.testurl2.com'
         r = self.app.post('/auth/user_info/contacts/add_webpage',
-             params=dict(newwebsite=website2))
+                          params=dict(newwebsite=website2))
         user = M.User.query.get(username='test-admin')
-        assert (len(user.webpages) == 2 and website in user.webpages and website2 in user.webpages)
+        assert (len(user.webpages)
+                == 2 and website in user.webpages and website2 in user.webpages)
 
-        #Remove first website
+        # Remove first website
         r = self.app.post('/auth/user_info/contacts/remove_webpage',
-             params=dict(oldvalue=website))
+                          params=dict(oldvalue=website))
         user = M.User.query.get(username='test-admin')
         assert (len(user.webpages) == 1 and website2 in user.webpages)
 
@@ -564,169 +592,187 @@ class TestPreferences(TestController):
     def test_availability(self):
         from datetime import time
 
-        #Add availability timeslot
+        # Add availability timeslot
         weekday = 'Monday'
-        starttime = time(9,0,0)
+        starttime = time(9, 0, 0)
         endtime = time(12, 0, 0)
 
         result = self.app.get('/auth/user_info/availability/')
         r = self.app.post('/auth/user_info/availability/add_timeslot',
-             params=dict(
-                 weekday=weekday,
-                 starttime=starttime.strftime('%H:%M'),
-                 endtime=endtime.strftime('%H:%M')))
+                          params=dict(
+                              weekday=weekday,
+                              starttime=starttime.strftime('%H:%M'),
+                              endtime=endtime.strftime('%H:%M')))
         user = M.User.query.get(username='test-admin')
-        timeslot1dict = dict(week_day=weekday, start_time=starttime, end_time=endtime)
-        assert len(user.availability) == 1 and timeslot1dict in user.get_availability_timeslots()
+        timeslot1dict = dict(
+            week_day=weekday, start_time=starttime, end_time=endtime)
+        assert len(
+            user.availability) == 1 and timeslot1dict in user.get_availability_timeslots()
 
         weekday2 = 'Tuesday'
-        starttime2 = time(14,0,0)
+        starttime2 = time(14, 0, 0)
         endtime2 = time(16, 0, 0)
 
-        #Add second availability timeslot
+        # Add second availability timeslot
         r = self.app.post('/auth/user_info/availability/add_timeslot',
-             params=dict(
-                 weekday=weekday2,
-                 starttime=starttime2.strftime('%H:%M'),
-                 endtime=endtime2.strftime('%H:%M')))
+                          params=dict(
+                              weekday=weekday2,
+                              starttime=starttime2.strftime('%H:%M'),
+                              endtime=endtime2.strftime('%H:%M')))
         user = M.User.query.get(username='test-admin')
-        timeslot2dict = dict(week_day=weekday2, start_time=starttime2, end_time=endtime2)
+        timeslot2dict = dict(week_day=weekday2,
+                             start_time=starttime2, end_time=endtime2)
         assert len(user.availability) == 2 and timeslot1dict in user.get_availability_timeslots() \
-               and timeslot2dict in user.get_availability_timeslots()
+            and timeslot2dict in user.get_availability_timeslots()
 
-        #Remove availability timeslot
+        # Remove availability timeslot
         r = self.app.post('/auth/user_info/availability/remove_timeslot',
-             params=dict(
-                 weekday=weekday,
-                 starttime=starttime.strftime('%H:%M'),
-                 endtime=endtime.strftime('%H:%M')))
+                          params=dict(
+                              weekday=weekday,
+                              starttime=starttime.strftime('%H:%M'),
+                              endtime=endtime.strftime('%H:%M')))
         user = M.User.query.get(username='test-admin')
-        assert len(user.availability) == 1 and timeslot2dict in user.get_availability_timeslots()
+        assert len(
+            user.availability) == 1 and timeslot2dict in user.get_availability_timeslots()
 
-        #Add invalid availability timeslot
+        # Add invalid availability timeslot
         r = self.app.post('/auth/user_info/availability/add_timeslot',
-             params=dict(
-                 weekday=weekday2,
-                 starttime=endtime2.strftime('%H:%M'),
-                 endtime=starttime2.strftime('%H:%M')))
+                          params=dict(
+                              weekday=weekday2,
+                              starttime=endtime2.strftime('%H:%M'),
+                              endtime=starttime2.strftime('%H:%M')))
         assert 'Invalid period:' in str(r)
         user = M.User.query.get(username='test-admin')
-        timeslot2dict = dict(week_day=weekday2, start_time=starttime2, end_time=endtime2)
-        assert len(user.availability) == 1 and timeslot2dict in user.get_availability_timeslots()
+        timeslot2dict = dict(week_day=weekday2,
+                             start_time=starttime2, end_time=endtime2)
+        assert len(
+            user.availability) == 1 and timeslot2dict in user.get_availability_timeslots()
 
     @td.with_user_project('test-admin')
     def test_inactivity(self):
         from datetime import datetime, timedelta
 
-        #Add inactivity period
+        # Add inactivity period
         now = datetime.utcnow().date()
         now = datetime(now.year, now.month, now.day)
         startdate = now + timedelta(days=1)
         enddate = now + timedelta(days=7)
         result = self.app.get('/auth/user_info/availability/')
         r = self.app.post('/auth/user_info/availability/add_inactive_period',
-             params=dict(
-                 startdate=startdate.strftime('%d/%m/%Y'),
-                 enddate=enddate.strftime('%d/%m/%Y')))
+                          params=dict(
+                              startdate=startdate.strftime('%d/%m/%Y'),
+                              enddate=enddate.strftime('%d/%m/%Y')))
         user = M.User.query.get(username='test-admin')
         period1dict = dict(start_date=startdate, end_date=enddate)
-        assert len(user.inactiveperiod) == 1 and period1dict in user.get_inactive_periods()
+        assert len(
+            user.inactiveperiod) == 1 and period1dict in user.get_inactive_periods()
 
-        #Add second inactivity period
-        startdate2 =  now + timedelta(days=24)
+        # Add second inactivity period
+        startdate2 = now + timedelta(days=24)
         enddate2 = now + timedelta(days=28)
         r = self.app.post('/auth/user_info/availability/add_inactive_period',
-             params=dict(
-                 startdate=startdate2.strftime('%d/%m/%Y'),
-                 enddate=enddate2.strftime('%d/%m/%Y')))
+                          params=dict(
+                              startdate=startdate2.strftime('%d/%m/%Y'),
+                              enddate=enddate2.strftime('%d/%m/%Y')))
         user = M.User.query.get(username='test-admin')
         period2dict = dict(start_date=startdate2, end_date=enddate2)
         assert len(user.inactiveperiod) == 2 and period1dict in user.get_inactive_periods() \
-               and period2dict in user.get_inactive_periods()
+            and period2dict in user.get_inactive_periods()
 
-        #Remove first inactivity period
-        r = self.app.post('/auth/user_info/availability/remove_inactive_period',
-             params=dict(
-                 startdate=startdate.strftime('%d/%m/%Y'),
-                 enddate=enddate.strftime('%d/%m/%Y')))
+        # Remove first inactivity period
+        r = self.app.post(
+            '/auth/user_info/availability/remove_inactive_period',
+            params=dict(
+                startdate=startdate.strftime('%d/%m/%Y'),
+                enddate=enddate.strftime('%d/%m/%Y')))
         user = M.User.query.get(username='test-admin')
-        assert len(user.inactiveperiod) == 1 and period2dict in user.get_inactive_periods()
+        assert len(
+            user.inactiveperiod) == 1 and period2dict in user.get_inactive_periods()
 
-        #Add invalid inactivity period
+        # Add invalid inactivity period
         r = self.app.post('/auth/user_info/availability/add_inactive_period',
-             params=dict(
-                 startdate='NOT/A/DATE',
-                 enddate=enddate2.strftime('%d/%m/%Y')))
+                          params=dict(
+                              startdate='NOT/A/DATE',
+                              enddate=enddate2.strftime('%d/%m/%Y')))
         user = M.User.query.get(username='test-admin')
         assert 'Please enter a valid date' in str(r)
-        assert len(user.inactiveperiod) == 1 and period2dict in user.get_inactive_periods()
+        assert len(
+            user.inactiveperiod) == 1 and period2dict in user.get_inactive_periods()
 
     @td.with_user_project('test-admin')
     def test_skills(self):
         from datetime import datetime
 
-        #Add a skill
+        # Add a skill
         skill_cat = M.TroveCategory.query.get(show_as_skill=True)
         level = 'low'
         comment = 'test comment'
         result = self.app.get('/auth/user_info/skills/')
         r = self.app.post('/auth/user_info/skills/save_skill',
-             params=dict(
-                 level=level,
-                 comment=comment,
-                 selected_skill=str(skill_cat.trove_cat_id)))
+                          params=dict(
+                              level=level,
+                              comment=comment,
+                              selected_skill=str(skill_cat.trove_cat_id)))
         user = M.User.query.get(username='test-admin')
-        skilldict = dict(category_id=skill_cat._id, comment=comment, level=level)
+        skilldict = dict(category_id=skill_cat._id,
+                         comment=comment, level=level)
         assert len(user.skills) == 1 and skilldict in user.skills
 
-        #Add again the same skill
+        # Add again the same skill
         level = 'medium'
         comment = 'test comment 2'
         result = self.app.get('/auth/user_info/skills/')
         r = self.app.post('/auth/user_info/skills/save_skill',
-             params=dict(
-                 level=level,
-                 comment=comment,
-                 selected_skill=str(skill_cat.trove_cat_id)))
+                          params=dict(
+                              level=level,
+                              comment=comment,
+                              selected_skill=str(skill_cat.trove_cat_id)))
         user = M.User.query.get(username='test-admin')
-        skilldict = dict(category_id=skill_cat._id, comment=comment, level=level)
+        skilldict = dict(category_id=skill_cat._id,
+                         comment=comment, level=level)
         assert len(user.skills) == 1 and skilldict in user.skills
 
-        #Add an invalid skill
+        # Add an invalid skill
         level2 = 'not a level'
         comment2 = 'test comment 2'
         r = self.app.post('/auth/user_info/skills/save_skill',
-             params=dict(
-                 level=level2,
-                 comment=comment2,
-                 selected_skill=str(skill_cat.trove_cat_id)))
+                          params=dict(
+                              level=level2,
+                              comment=comment2,
+                              selected_skill=str(skill_cat.trove_cat_id)))
         user = M.User.query.get(username='test-admin')
-        #Check that everything is as it was before
+        # Check that everything is as it was before
         assert len(user.skills) == 1 and skilldict in user.skills
 
-        #Remove a skill
+        # Remove a skill
         result = self.app.get('/auth/user_info/skills/')
         r = self.app.post('/auth/user_info/skills/remove_skill',
-             params=dict(
-                 categoryid=str(skill_cat.trove_cat_id)))
+                          params=dict(
+                              categoryid=str(skill_cat.trove_cat_id)))
         user = M.User.query.get(username='test-admin')
         assert len(user.skills) == 0
 
     @td.with_user_project('test-admin')
     def test_user_message(self):
-        assert not M.User.query.get(username='test-admin').get_pref('disable_user_messages')
+        assert not M.User.query.get(
+            username='test-admin').get_pref('disable_user_messages')
         self.app.post('/auth/preferences/user_message')
-        assert M.User.query.get(username='test-admin').get_pref('disable_user_messages')
-        self.app.post('/auth/preferences/user_message', params={'allow_user_messages': 'on'})
-        assert not M.User.query.get(username='test-admin').get_pref('disable_user_messages')
+        assert M.User.query.get(
+            username='test-admin').get_pref('disable_user_messages')
+        self.app.post('/auth/preferences/user_message',
+                      params={'allow_user_messages': 'on'})
+        assert not M.User.query.get(
+            username='test-admin').get_pref('disable_user_messages')
 
 
 class TestPasswordReset(TestController):
+
     @patch('allura.tasks.mail_tasks.sendmail')
     @patch('allura.lib.helpers.gen_message_id')
     def test_email_unconfirmed(self, gen_message_id, sendmail):
         user = M.User.query.get(username='test-admin')
-        email = M.EmailAddress.query.find({'claimed_by_user_id': user._id}).first()
+        email = M.EmailAddress.query.find(
+            {'claimed_by_user_id': user._id}).first()
         email.confirmed = False
         ThreadLocalORMSession.flush_all()
         r = self.app.post('/auth/password_recovery_hash', {'email': email._id})
@@ -737,7 +783,8 @@ class TestPasswordReset(TestController):
     @patch('allura.lib.helpers.gen_message_id')
     def test_user_disabled(self, gen_message_id, sendmail):
         user = M.User.query.get(username='test-admin')
-        email = M.EmailAddress.query.find({'claimed_by_user_id': user._id}).first()
+        email = M.EmailAddress.query.find(
+            {'claimed_by_user_id': user._id}).first()
         user.disabled = True
         ThreadLocalORMSession.flush_all()
         r = self.app.post('/auth/password_recovery_hash', {'email': email._id})
@@ -748,7 +795,8 @@ class TestPasswordReset(TestController):
     @patch('allura.lib.helpers.gen_message_id')
     def test_password_reset(self, gen_message_id, sendmail):
         user = M.User.query.get(username='test-admin')
-        email = M.EmailAddress.query.find({'claimed_by_user_id': user._id}).first()
+        email = M.EmailAddress.query.find(
+            {'claimed_by_user_id': user._id}).first()
         email.confirmed = True
         ThreadLocalORMSession.flush_all()
         old_pw_hash = user.password
@@ -793,16 +841,19 @@ To reset your password on %s, please visit the following URL:
     @patch('allura.lib.helpers.gen_message_id')
     def test_hash_expired(self, gen_message_id, sendmail):
         user = M.User.query.get(username='test-admin')
-        email = M.EmailAddress.query.find({'claimed_by_user_id': user._id}).first()
+        email = M.EmailAddress.query.find(
+            {'claimed_by_user_id': user._id}).first()
         email.confirmed = True
         ThreadLocalORMSession.flush_all()
         r = self.app.post('/auth/password_recovery_hash', {'email': email._id})
         user = M.User.by_username('test-admin')
         hash = user.get_tool_data('AuthPasswordReset', 'hash')
-        user.set_tool_data('AuthPasswordReset', hash_expiry=datetime.datetime(2000, 10, 10))
+        user.set_tool_data('AuthPasswordReset',
+                           hash_expiry=datetime.datetime(2000, 10, 10))
         r = self.app.get('/auth/forgotten_password/%s' % hash.encode('utf-8'))
         assert_in('Unable to process reset, please try again', r.follow().body)
-        r = self.app.post('/auth/set_new_password/%s' % hash.encode('utf-8'), {'pw': '154321', 'pw2': '154321'})
+        r = self.app.post('/auth/set_new_password/%s' %
+                          hash.encode('utf-8'), {'pw': '154321', 'pw2': '154321'})
         assert_in('Unable to process reset, please try again', r.follow().body)
 
     @patch('allura.lib.plugin.AuthenticationProvider')
@@ -812,8 +863,10 @@ To reset your password on %s, please visit the following URL:
         ap.forgotten_password_process = False
         ap.authenticate_request()._id = user._id
         self.app.get('/auth/forgotten_password', status=404)
-        self.app.post('/auth/set_new_password', {'pw': 'foo', 'pw2': 'foo'}, status=404)
-        self.app.post('/auth/password_recovery_hash', {'email': 'foo'}, status=404)
+        self.app.post('/auth/set_new_password',
+                      {'pw': 'foo', 'pw2': 'foo'}, status=404)
+        self.app.post('/auth/password_recovery_hash',
+                      {'email': 'foo'}, status=404)
 
 
 class TestOAuth(TestController):
@@ -821,7 +874,8 @@ class TestOAuth(TestController):
     def test_register_deregister_app(self):
         # register
         r = self.app.get('/auth/oauth/')
-        r = self.app.post('/auth/oauth/register', params={'application_name': 'oautstapp', 'application_description': 'Oauth rulez'}).follow()
+        r = self.app.post('/auth/oauth/register',
+                          params={'application_name': 'oautstapp', 'application_description': 'Oauth rulez'}).follow()
         assert 'oautstapp' in r
         # deregister
         assert_equal(r.forms[0].action, 'deregister')
@@ -831,18 +885,21 @@ class TestOAuth(TestController):
 
     def test_generate_revoke_access_token(self):
         # generate
-        r = self.app.post('/auth/oauth/register', params={'application_name': 'oautstapp', 'application_description': 'Oauth rulez'}).follow()
+        r = self.app.post('/auth/oauth/register',
+                          params={'application_name': 'oautstapp', 'application_description': 'Oauth rulez'}).follow()
         assert_equal(r.forms[1].action, 'generate_access_token')
         r.forms[1].submit()
         r = self.app.get('/auth/oauth/')
         assert 'Bearer Token:' in r
-        assert_not_equal(M.OAuthAccessToken.for_user(M.User.by_username('test-admin')), [])
+        assert_not_equal(
+            M.OAuthAccessToken.for_user(M.User.by_username('test-admin')), [])
         # revoke
         assert_equal(r.forms[0].action, 'revoke_access_token')
         r.forms[0].submit()
         r = self.app.get('/auth/oauth/')
         assert_not_equal(r.forms[0].action, 'revoke_access_token')
-        assert_equal(M.OAuthAccessToken.for_user(M.User.by_username('test-admin')), [])
+        assert_equal(
+            M.OAuthAccessToken.for_user(M.User.by_username('test-admin')), [])
 
     @mock.patch('allura.controllers.rest.oauth.Server')
     @mock.patch('allura.controllers.rest.oauth.Request')
@@ -850,27 +907,28 @@ class TestOAuth(TestController):
         M.OAuthConsumerToken.consumer = mock.Mock()
         user = M.User.by_username('test-admin')
         consumer_token = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         ThreadLocalORMSession.flush_all()
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_callback': 'http://my.domain.com/callback',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_callback': 'http://my.domain.com/callback',
+        }
         r = self.app.post('/rest/oauth/request_token', params={})
         rtok = parse_qs(r.body)['oauth_token'][0]
-        r = self.app.post('/rest/oauth/authorize', params={'oauth_token':rtok})
+        r = self.app.post('/rest/oauth/authorize',
+                          params={'oauth_token': rtok})
         r = r.forms[0].submit('yes')
         assert r.location.startswith('http://my.domain.com/callback')
         pin = parse_qs(urlparse(r.location).query)['oauth_verifier'][0]
         #pin = r.html.find(text=re.compile('^PIN: ')).split()[1]
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': rtok,
-                'oauth_verifier': pin,
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': rtok,
+            'oauth_verifier': pin,
+        }
         r = self.app.get('/rest/oauth/access_token')
         atok = parse_qs(r.body)
         assert_equal(len(atok['oauth_token']), 1)
@@ -882,27 +940,33 @@ class TestOAuth(TestController):
         M.OAuthConsumerToken.consumer = mock.Mock()
         user = M.User.by_username('test-user')
         consumer_token = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        req = Request.from_request.return_value = {'oauth_consumer_key': 'api_key'}
-        r = self.app.post('/rest/oauth/request_token', params={'key':'value'})
+        req = Request.from_request.return_value = {
+            'oauth_consumer_key': 'api_key'}
+        r = self.app.post('/rest/oauth/request_token', params={'key': 'value'})
         Request.from_request.assert_called_once_with(
-                'POST', 'http://localhost/rest/oauth/request_token',
-                headers={'Host': 'localhost:80', 'Content-Type': 'application/x-www-form-urlencoded; charset="utf-8"'},
-                parameters={'key':'value'},
-                query_string='')
-        Server().verify_request.assert_called_once_with(req, consumer_token.consumer, None)
-        request_token = M.OAuthRequestToken.query.get(consumer_token_id=consumer_token._id)
+            'POST', 'http://localhost/rest/oauth/request_token',
+            headers={'Host': 'localhost:80', 'Content-Type':
+                    'application/x-www-form-urlencoded; charset="utf-8"'},
+            parameters={'key': 'value'},
+            query_string='')
+        Server().verify_request.assert_called_once_with(
+            req, consumer_token.consumer, None)
+        request_token = M.OAuthRequestToken.query.get(
+            consumer_token_id=consumer_token._id)
         assert_is_not_none(request_token)
         assert_equal(r.body, request_token.to_string())
 
     @mock.patch('allura.controllers.rest.oauth.Server')
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_request_token_no_consumer_token(self, Request, Server):
-        req = Request.from_request.return_value = {'oauth_consumer_key': 'api_key'}
-        r = self.app.post('/rest/oauth/request_token', params={'key':'value'}, status=403)
+        req = Request.from_request.return_value = {
+            'oauth_consumer_key': 'api_key'}
+        r = self.app.post('/rest/oauth/request_token',
+                          params={'key': 'value'}, status=403)
 
     @mock.patch('allura.controllers.rest.oauth.Server')
     @mock.patch('allura.controllers.rest.oauth.Request')
@@ -911,147 +975,157 @@ class TestOAuth(TestController):
         M.OAuthConsumerToken.consumer = mock.Mock()
         user = M.User.by_username('test-user')
         consumer_token = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        req = Request.from_request.return_value = {'oauth_consumer_key': 'api_key'}
-        r = self.app.post('/rest/oauth/request_token', params={'key':'value'}, status=403)
+        req = Request.from_request.return_value = {
+            'oauth_consumer_key': 'api_key'}
+        r = self.app.post('/rest/oauth/request_token',
+                          params={'key': 'value'}, status=403)
 
     def test_authorize_ok(self):
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='oob',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='oob',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        r = self.app.post('/rest/oauth/authorize', params={'oauth_token':'api_key'})
+        r = self.app.post('/rest/oauth/authorize',
+                          params={'oauth_token': 'api_key'})
         assert_in('ctok_desc', r.body)
         assert_in('api_key', r.body)
 
     def test_authorize_invalid(self):
-        r = self.app.post('/rest/oauth/authorize', params={'oauth_token':'api_key'}, status=403)
+        r = self.app.post('/rest/oauth/authorize',
+                          params={'oauth_token': 'api_key'}, status=403)
 
     def test_do_authorize_no(self):
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='oob',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='oob',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        r = self.app.post('/rest/oauth/do_authorize', params={'no': '1', 'oauth_token': 'api_key'})
+        r = self.app.post('/rest/oauth/do_authorize',
+                          params={'no': '1', 'oauth_token': 'api_key'})
         assert_is_none(M.OAuthRequestToken.query.get(api_key='api_key'))
 
     def test_do_authorize_oob(self):
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='oob',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='oob',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        r = self.app.post('/rest/oauth/do_authorize', params={'yes': '1', 'oauth_token': 'api_key'})
+        r = self.app.post('/rest/oauth/do_authorize',
+                          params={'yes': '1', 'oauth_token': 'api_key'})
         assert_is_not_none(r.html.find(text=re.compile('^PIN: ')))
 
     def test_do_authorize_cb(self):
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='http://my.domain.com/callback',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='http://my.domain.com/callback',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        r = self.app.post('/rest/oauth/do_authorize', params={'yes': '1', 'oauth_token': 'api_key'})
-        assert r.location.startswith('http://my.domain.com/callback?oauth_token=api_key&oauth_verifier=')
+        r = self.app.post('/rest/oauth/do_authorize',
+                          params={'yes': '1', 'oauth_token': 'api_key'})
+        assert r.location.startswith(
+            'http://my.domain.com/callback?oauth_token=api_key&oauth_verifier=')
 
     def test_do_authorize_cb_params(self):
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='http://my.domain.com/callback?myparam=foo',
-                user_id=user._id,
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='http://my.domain.com/callback?myparam=foo',
+            user_id=user._id,
+        )
         ThreadLocalORMSession.flush_all()
-        r = self.app.post('/rest/oauth/do_authorize', params={'yes': '1', 'oauth_token': 'api_key'})
-        assert r.location.startswith('http://my.domain.com/callback?myparam=foo&oauth_token=api_key&oauth_verifier=')
+        r = self.app.post('/rest/oauth/do_authorize',
+                          params={'yes': '1', 'oauth_token': 'api_key'})
+        assert r.location.startswith(
+            'http://my.domain.com/callback?myparam=foo&oauth_token=api_key&oauth_verifier=')
 
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_access_token_no_consumer(self, Request):
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': 'api_key',
-                'oauth_verifier': 'good',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': 'api_key',
+            'oauth_verifier': 'good',
+        }
         self.app.get('/rest/oauth/access_token', status=403)
 
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_access_token_no_request(self, Request):
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': 'api_key',
-                'oauth_verifier': 'good',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': 'api_key',
+            'oauth_verifier': 'good',
+        }
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         ThreadLocalORMSession.flush_all()
         self.app.get('/rest/oauth/access_token', status=403)
 
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_access_token_bad_pin(self, Request):
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': 'api_key',
-                'oauth_verifier': 'bad',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': 'api_key',
+            'oauth_verifier': 'bad',
+        }
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='http://my.domain.com/callback?myparam=foo',
-                user_id=user._id,
-                validation_pin='good',
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='http://my.domain.com/callback?myparam=foo',
+            user_id=user._id,
+            validation_pin='good',
+        )
         ThreadLocalORMSession.flush_all()
         self.app.get('/rest/oauth/access_token', status=403)
 
@@ -1059,23 +1133,23 @@ class TestOAuth(TestController):
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_access_token_bad_sig(self, Request, Server):
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': 'api_key',
-                'oauth_verifier': 'good',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': 'api_key',
+            'oauth_verifier': 'good',
+        }
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='http://my.domain.com/callback?myparam=foo',
-                user_id=user._id,
-                validation_pin='good',
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='http://my.domain.com/callback?myparam=foo',
+            user_id=user._id,
+            validation_pin='good',
+        )
         ThreadLocalORMSession.flush_all()
         Server().verify_request.side_effect = ValueError
         self.app.get('/rest/oauth/access_token', status=403)
@@ -1084,23 +1158,23 @@ class TestOAuth(TestController):
     @mock.patch('allura.controllers.rest.oauth.Request')
     def test_access_token_ok(self, Request, Server):
         req = Request.from_request.return_value = {
-                'oauth_consumer_key': 'api_key',
-                'oauth_token': 'api_key',
-                'oauth_verifier': 'good',
-            }
+            'oauth_consumer_key': 'api_key',
+            'oauth_token': 'api_key',
+            'oauth_verifier': 'good',
+        }
         user = M.User.by_username('test-admin')
         ctok = M.OAuthConsumerToken(
-                api_key='api_key',
-                user_id=user._id,
-                description='ctok_desc',
-            )
+            api_key='api_key',
+            user_id=user._id,
+            description='ctok_desc',
+        )
         rtok = M.OAuthRequestToken(
-                api_key='api_key',
-                consumer_token_id=ctok._id,
-                callback='http://my.domain.com/callback?myparam=foo',
-                user_id=user._id,
-                validation_pin='good',
-            )
+            api_key='api_key',
+            consumer_token_id=ctok._id,
+            callback='http://my.domain.com/callback?myparam=foo',
+            user_id=user._id,
+            validation_pin='good',
+        )
         ThreadLocalORMSession.flush_all()
         r = self.app.get('/rest/oauth/access_token')
         atok = parse_qs(r.body)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_discuss.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_discuss.py b/Allura/allura/tests/functional/test_discuss.py
index 981fc6e..816b3d5 100644
--- a/Allura/allura/tests/functional/test_discuss.py
+++ b/Allura/allura/tests/functional/test_discuss.py
@@ -20,40 +20,41 @@ from mock import patch
 from allura.tests import TestController
 from allura import model as M
 
+
 class TestDiscuss(TestController):
 
     def test_subscribe_unsubscribe(self):
         home = self.app.get('/wiki/_discuss/')
-        subscribed = [ i for i in home.html.findAll('input')
-                       if i.get('type') == 'checkbox'][0]
+        subscribed = [i for i in home.html.findAll('input')
+                      if i.get('type') == 'checkbox'][0]
         assert 'checked' not in subscribed.attrMap
-        link = [ a for a in home.html.findAll('a')
-                 if 'thread' in a['href'] ][0]
+        link = [a for a in home.html.findAll('a')
+                if 'thread' in a['href']][0]
         params = {
-            'threads-0._id':link['href'][len('/p/test/wiki/_discuss/thread/'):-1],
-            'threads-0.subscription':'on' }
+            'threads-0._id': link['href'][len('/p/test/wiki/_discuss/thread/'):-1],
+            'threads-0.subscription': 'on'}
         r = self.app.post('/wiki/_discuss/subscribe',
                           params=params,
-                          headers={'Referer':'/wiki/_discuss/'})
+                          headers={'Referer': '/wiki/_discuss/'})
         r = r.follow()
-        subscribed = [ i for i in r.html.findAll('input')
-                       if i.get('type') == 'checkbox'][0]
+        subscribed = [i for i in r.html.findAll('input')
+                      if i.get('type') == 'checkbox'][0]
         assert 'checked' in subscribed.attrMap
         params = {
-            'threads-0._id':link['href'][len('/p/test/wiki/_discuss/thread/'):-1]
-            }
+            'threads-0._id': link['href'][len('/p/test/wiki/_discuss/thread/'):-1]
+        }
         r = self.app.post('/wiki/_discuss/subscribe',
                           params=params,
-                          headers={'Referer':'/wiki/_discuss/'})
+                          headers={'Referer': '/wiki/_discuss/'})
         r = r.follow()
-        subscribed = [ i for i in r.html.findAll('input')
-                       if i.get('type') == 'checkbox'][0]
+        subscribed = [i for i in r.html.findAll('input')
+                      if i.get('type') == 'checkbox'][0]
         assert 'checked' not in subscribed.attrMap
 
     def _make_post(self, text):
         home = self.app.get('/wiki/_discuss/')
-        thread_link = [ a for a in home.html.findAll('a')
-                 if 'thread' in a['href'] ][0]['href']
+        thread_link = [a for a in home.html.findAll('a')
+                       if 'thread' in a['href']][0]['href']
         thread = self.app.get(thread_link)
         for f in thread.html.findAll('form'):
             if f.get('action', '').endswith('/post'):
@@ -62,10 +63,11 @@ class TestDiscuss(TestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = text
         r = self.app.post(f['action'].encode('utf-8'), params=params,
-                          headers={'Referer':thread_link.encode("utf-8")},
+                          headers={'Referer': thread_link.encode("utf-8")},
                           extra_environ=dict(username='root'))
         r = r.follow()
         return r
@@ -73,49 +75,54 @@ class TestDiscuss(TestController):
     @patch('allura.controllers.discuss.g.spam_checker.submit_spam')
     def test_post(self, submit_spam):
         home = self.app.get('/wiki/_discuss/')
-        thread_link = [ a for a in home.html.findAll('a')
-                 if 'thread' in a['href'] ][0]['href']
+        thread_link = [a for a in home.html.findAll('a')
+                       if 'thread' in a['href']][0]['href']
         r = self._make_post('This is a post')
         assert 'This is a post' in r, r
-        post_link = str(r.html.find('div',{'class':'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         r = self.app.get(post_link[:-2], status=302)
         r = self.app.get(post_link)
-        post_form = r.html.find('form',{'action':post_link})
+        post_form = r.html.find('form', {'action': post_link})
         params = dict()
         inputs = post_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[post_form.find('textarea')['name']] = 'This is a new post'
         r = self.app.post(post_link,
                           params=params,
-                          headers={'Referer':thread_link.encode("utf-8")})
+                          headers={'Referer': thread_link.encode("utf-8")})
         r = r.follow()
         assert 'This is a new post' in r, r
         r = self.app.get(post_link)
         assert str(r).count('This is a new post') == 3
-        post_form = r.html.find('form',{'action':post_link + 'reply'})
+        post_form = r.html.find('form', {'action': post_link + 'reply'})
         params = dict()
         inputs = post_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[post_form.find('textarea')['name']] = 'Tis a reply'
         r = self.app.post(post_link + 'reply',
                           params=params,
-                          headers={'Referer':post_link.encode("utf-8")})
+                          headers={'Referer': post_link.encode("utf-8")})
         r = self.app.get(thread_link)
         assert 'Tis a reply' in r, r
-        permalinks = [post.find('form')['action'].encode('utf-8') for post in r.html.findAll('div',{'class':'edit_post_form reply'})]
-        self.app.post(permalinks[1]+'flag')
-        self.app.post(permalinks[1]+'moderate', params=dict(delete='delete'))
-        self.app.post(permalinks[0]+'moderate', params=dict(spam='spam'))
-        assert submit_spam.call_args[0] ==('This is a new post',), submit_spam.call_args[0]
+        permalinks = [post.find('form')['action'].encode('utf-8')
+                      for post in r.html.findAll('div', {'class': 'edit_post_form reply'})]
+        self.app.post(permalinks[1] + 'flag')
+        self.app.post(permalinks[1] + 'moderate', params=dict(delete='delete'))
+        self.app.post(permalinks[0] + 'moderate', params=dict(spam='spam'))
+        assert submit_spam.call_args[0] == (
+            'This is a new post',), submit_spam.call_args[0]
 
     def test_permissions(self):
         home = self.app.get('/wiki/_discuss/')
-        thread_url = [ a for a in home.html.findAll('a')
-                 if 'thread' in a['href'] ][0]['href']
+        thread_url = [a for a in home.html.findAll('a')
+                      if 'thread' in a['href']][0]['href']
         thread_id = thread_url.rstrip('/').split('/')[-1]
         thread = M.Thread.query.get(_id=thread_id)
 
@@ -126,7 +133,8 @@ class TestDiscuss(TestController):
 
         # set wiki page private
         from forgewiki.model import Page
-        page = Page.query.get(_id=thread.ref.artifact._id)  # need to look up the page directly, so ming is aware of our change
+        # need to look up the page directly, so ming is aware of our change
+        page = Page.query.get(_id=thread.ref.artifact._id)
         project = M.Project.query.get(shortname='test')
         role_admin = M.ProjectRole.by_name('Admin', project)._id
         page.acl = [
@@ -134,25 +142,28 @@ class TestDiscuss(TestController):
             M.DENY_ALL,
         ]
 
-        self.app.get(thread_url, status=200, # ok
+        self.app.get(thread_url, status=200,  # ok
                      extra_environ=dict(username='test-admin'))
-        self.app.get(thread_url, status=403, # forbidden
+        self.app.get(thread_url, status=403,  # forbidden
                      extra_environ=dict(username=non_admin))
 
     def test_spam_link(self):
         r = self._make_post('Test post')
         assert '<span>Spam</span>' in r
-        r = self.app.get('/wiki/_discuss/', extra_environ={'username': 'test-user-1'})
+        r = self.app.get('/wiki/_discuss/',
+                         extra_environ={'username': 'test-user-1'})
         assert '<span>Spam</span>' not in r, 'User without moderate perm must not see Spam link'
 
     @patch('allura.controllers.discuss.g.spam_checker.submit_spam')
     def test_moderate(self, submit_spam):
         r = self._make_post('Test post')
-        post_link = str(r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         post = M.Post.query.find().first()
         post.status = 'pending'
         self.app.post(post_link + 'moderate', params=dict(spam='spam'))
-        assert submit_spam.call_args[0] ==('Test post',), submit_spam.call_args[0]
+        assert submit_spam.call_args[0] == (
+            'Test post',), submit_spam.call_args[0]
         post = M.Post.query.find().first()
         assert post.status == 'spam'
         self.app.post(post_link + 'moderate', params=dict(approve='approve'))
@@ -163,8 +174,8 @@ class TestDiscuss(TestController):
 
     def test_post_paging(self):
         home = self.app.get('/wiki/_discuss/')
-        thread_link = [ a for a in home.html.findAll('a')
-                 if 'thread' in a['href'] ][0]['href']
+        thread_link = [a for a in home.html.findAll('a')
+                       if 'thread' in a['href']][0]['href']
         # just make sure it doesn't 500
         r = self.app.get('%s?limit=50&page=0' % thread_link)
 
@@ -175,31 +186,37 @@ class TestDiscuss(TestController):
         assert create_activity.call_args[0][1] == 'posted'
         create_activity.reset_mock()
         thread_url = r.request.url
-        reply_form = r.html.find('div',{'class':'edit_post_form reply'}).find('form')
+        reply_form = r.html.find(
+            'div', {'class': 'edit_post_form reply'}).find('form')
         post_link = str(reply_form['action'])
-        assert 'This is a post' in str(r.html.find('div',{'class':'display_post'}))
-        assert 'Last edit:' not in str(r.html.find('div',{'class':'display_post'}))
+        assert 'This is a post' in str(
+            r.html.find('div', {'class': 'display_post'}))
+        assert 'Last edit:' not in str(
+            r.html.find('div', {'class': 'display_post'}))
         params = dict()
         inputs = reply_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[reply_form.find('textarea')['name']] = 'zzz'
         self.app.post(post_link, params)
         assert create_activity.call_count == 1, create_activity.call_count
         assert create_activity.call_args[0][1] == 'modified'
         r = self.app.get(thread_url)
-        assert 'zzz' in str(r.html.find('div',{'class':'display_post'}))
-        assert 'Last edit: Test Admin less than 1 minute ago' in str(r.html.find('div',{'class':'display_post'}))
+        assert 'zzz' in str(r.html.find('div', {'class': 'display_post'}))
+        assert 'Last edit: Test Admin less than 1 minute ago' in str(
+            r.html.find('div', {'class': 'display_post'}))
+
 
 class TestAttachment(TestController):
 
     def setUp(self):
         super(TestAttachment, self).setUp()
         home = self.app.get('/wiki/_discuss/')
-        self.thread_link = [ a['href'].encode("utf-8")
-                             for a in home.html.findAll('a')
-                             if 'thread' in a['href'] ][0]
+        self.thread_link = [a['href'].encode("utf-8")
+                            for a in home.html.findAll('a')
+                            if 'thread' in a['href']][0]
         thread = self.app.get(self.thread_link)
         for f in thread.html.findAll('form'):
             if f.get('action', '').endswith('/post'):
@@ -209,12 +226,14 @@ class TestAttachment(TestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'Test Post'
         r = self.app.post(f['action'].encode('utf-8'), params=params,
-                          headers={'Referer':self.thread_link})
+                          headers={'Referer': self.thread_link})
         r = r.follow()
-        self.post_link = str(r.html.find('div',{'class':'edit_post_form reply'}).find('form')['action'])
+        self.post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
 
     def test_attach(self):
         r = self.app.post(self.post_link + 'attach',
@@ -237,13 +256,14 @@ class TestAttachment(TestController):
     def test_reply_attach(self, notify):
         notify.return_value = True
         r = self.app.get(self.thread_link)
-        post_form = r.html.find('form', {'action':self.post_link + 'reply'})
+        post_form = r.html.find('form', {'action': self.post_link + 'reply'})
         params = dict()
         inputs = post_form.findAll('input')
 
         for field in inputs:
-            if field.has_key('name') and (field['name']!='file_info'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+            if field.has_key('name') and (field['name'] != 'file_info'):
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[post_form.find('textarea')['name']] = 'Reply'
         r = self.app.post(self.post_link + 'reply',
                           params=params,

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_feeds.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_feeds.py b/Allura/allura/tests/functional/test_feeds.py
index 000ab74..54416a5 100644
--- a/Allura/allura/tests/functional/test_feeds.py
+++ b/Allura/allura/tests/functional/test_feeds.py
@@ -20,7 +20,9 @@ from formencode.variabledecode import variable_encode
 from allura.tests import TestController
 from allura.tests import decorators as td
 
+
 class TestFeeds(TestController):
+
     def setUp(self):
         TestController.setUp(self)
         self._setUp()
@@ -33,7 +35,7 @@ class TestFeeds(TestController):
         self.app.post(
             '/bugs/save_ticket',
             params=variable_encode(dict(
-                    ticket_form=dict(
+                ticket_form=dict(
                     ticket_num='',
                     labels='',
                     assigned_to='',
@@ -49,7 +51,7 @@ class TestFeeds(TestController):
                 title=title,
                 text="Nothing much",
                 labels='',
-                ),
+            ),
             status=302)
         self.app.get('/wiki/%s/' % title)
 
@@ -65,10 +67,10 @@ class TestFeeds(TestController):
     @td.with_wiki
     def test_wiki_page_feed(self):
         self.app.post('/wiki/Root/update', params={
-                'title':'Root',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'Root',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         self.app.get('/wiki/Root/feed.rss')
         self.app.get('/wiki/Root/feed.atom')
 
@@ -82,14 +84,13 @@ class TestFeeds(TestController):
         self.app.get('/bugs/1/feed.rss')
         r = self.app.get('/bugs/1/feed.atom')
         self.app.post('/bugs/1/update_ticket', params=dict(
-                assigned_to='',
-                ticket_num='',
-                labels='',
-                summary='This is a new ticket',
-                status='unread',
-                milestone='',
-                description='This is another description'), extra_environ=dict(username='root'))
+            assigned_to='',
+            ticket_num='',
+            labels='',
+            summary='This is a new ticket',
+            status='unread',
+            milestone='',
+            description='This is another description'), extra_environ=dict(username='root'))
         r = self.app.get('/bugs/1/feed.atom')
         assert '=&amp;gt' in r
         assert '\n+' in r
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_gravatar.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_gravatar.py b/Allura/allura/tests/functional/test_gravatar.py
index 4f6940b..bea43f3 100644
--- a/Allura/allura/tests/functional/test_gravatar.py
+++ b/Allura/allura/tests/functional/test_gravatar.py
@@ -33,7 +33,8 @@ class TestGravatar(TestController):
         email = u'Vin\u00EDcius@example.com'
         expected_id = 'e00968255d68523b034a6a39c522efdb'
         actual_id = gravatar.id(email)
-        assert expected_id == actual_id, 'Expected gravitar ID %s, got %s' % (repr(expected_id), repr(actual_id))
+        assert expected_id == actual_id, 'Expected gravitar ID %s, got %s' % (
+            repr(expected_id), repr(actual_id))
 
     def test_url(self):
         email = 'Wolf@example.com'


[33/36] git commit: [#6484] ticket:492 Move mediawiki import script to separate repo

Posted by jo...@apache.org.
[#6484] ticket:492 Move mediawiki import script to separate repo


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/4f9f2161
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/4f9f2161
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/4f9f2161

Branch: refs/heads/cj/6484
Commit: 4f9f216162bdd3955557072cf7a31c49ad2d16bf
Parents: c93733a
Author: Igor Bondarenko <je...@gmail.com>
Authored: Mon Dec 30 12:47:20 2013 +0200
Committer: Cory Johns <cj...@slashdotmedia.com>
Committed: Fri Jan 10 18:56:53 2014 +0000

----------------------------------------------------------------------
 .../forgewiki/scripts/wiki2markdown/__init__.py |  18 -
 .../scripts/wiki2markdown/extractors.py         | 192 -----------
 .../forgewiki/scripts/wiki2markdown/loaders.py  | 201 -----------
 .../scripts/wiki2markdown/wiki2markdown.py      | 132 --------
 ForgeWiki/forgewiki/tests/test_wiki2markdown.py | 329 -------------------
 ForgeWiki/setup.py                              |   3 -
 6 files changed, 875 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/forgewiki/scripts/wiki2markdown/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/__init__.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/__init__.py
deleted file mode 100644
index f60b66d..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-from wiki2markdown import Wiki2Markdown

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
deleted file mode 100644
index b70672c..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/extractors.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-import os
-import shutil
-import json
-import hashlib
-
-log = logging.getLogger(__name__)
-
-
-class MediawikiExtractor(object):
-
-    """Base class for MediaWiki data provider"""
-
-    def __init__(self, options):
-        self.options = options
-        if os.path.exists(self.options.dump_dir):
-            # clear dump_dir before extraction (there may be an old data)
-            shutil.rmtree(self.options.dump_dir)
-        os.makedirs(self.options.dump_dir)
-
-    def extract(self):
-        """Extract pages with history, attachments, talk-pages, etc"""
-        raise NotImplementedError("subclass must override this")
-
-
-class MySQLExtractor(MediawikiExtractor):
-
-    """Extract MediaWiki data to json.
-
-    Use connection to MySQL database as a data source.
-    """
-
-    def __init__(self, options):
-        super(MySQLExtractor, self).__init__(options)
-        self._connection = None
-        self.db_options = {
-            'host': self.options.host or 'localhost',
-            'user': self.options.user,
-            'passwd': self.options.password,
-            'db': self.options.db_name,
-            'port': self.options.port or 3306
-        }
-
-    def connection(self):
-        try:
-            import MySQLdb
-        except ImportError:
-            raise ImportError(
-                'GPL library MySQL-python is required for this operation')
-
-        if not self._connection:
-            self._connection = MySQLdb.connect(**self.db_options)
-        return self._connection
-
-    def _save(self, content, *paths):
-        """Save json to file in local filesystem"""
-        out_file = os.path.join(self.options.dump_dir, *paths)
-        if not os.path.exists(os.path.dirname(out_file)):
-            os.makedirs(os.path.dirname(out_file))
-        with open(out_file, 'w') as out:
-            out.write(content.encode('utf-8'))
-
-    def _save_attachment(self, filepath, *paths):
-        """Save attachment in dump directory.
-
-        Copy from mediawiki dump directory to our internal dump directory.
-
-        args:
-        filepath - path to attachment in mediawiki dump.
-        *paths - path to internal dump directory.
-        """
-        out_dir = os.path.join(self.options.dump_dir, *paths)
-        if not os.path.exists(out_dir):
-            os.makedirs(out_dir)
-        shutil.copy(filepath, out_dir)
-
-    def _pages(self):
-        """Yield page_data for next wiki page"""
-        c = self.connection().cursor()
-        c.execute('select page.page_id, page.page_title '
-                  'from page where page.page_namespace = 0')
-        for row in c:
-            _id, title = row
-            page_data = {
-                'page_id': _id,
-                'title': title,
-            }
-            yield page_data
-
-    def _history(self, page_id):
-        """Yield page_data for next revision of wiki page"""
-        c = self.connection().cursor()
-        c.execute('select revision.rev_timestamp, text.old_text, '
-                  'revision.rev_user_text '
-                  'from revision '
-                  'left join text on revision.rev_text_id = text.old_id '
-                  'where revision.rev_page = %s', page_id)
-        for row in c:
-            timestamp, text, username = row
-            page_data = {
-                'timestamp': timestamp,
-                'text': text or '',
-                'username': username
-            }
-            yield page_data
-
-    def _talk(self, page_title):
-        """Return page_data for talk page with `page_title` title"""
-        c = self.connection().cursor()
-        query_attrs = (page_title, 1)  # page_namespace == 1 - talk pages
-        c.execute('select text.old_text, revision.rev_timestamp, '
-                  'revision.rev_user_text '
-                  'from page '
-                  'left join revision on revision.rev_id = page.page_latest '
-                  'left join text on text.old_id = revision.rev_text_id '
-                  'where page.page_title = %s and page.page_namespace = %s '
-                  'limit 1', query_attrs)
-
-        row = c.fetchone()
-        if row:
-            text, timestamp, username = row
-            return {'text': text, 'timestamp': timestamp, 'username': username}
-
-    def _attachments(self, page_id):
-        """Yield path to next file attached to wiki page"""
-        c = self.connection().cursor()
-        c.execute('select il_to from imagelinks '
-                  'where il_from = %s' % page_id)
-        for row in c:
-            name = row[0]
-            # mediawiki stores attachmets in subdirectories
-            # based on md5-hash of filename
-            # so we need to build path to file as follows
-            md5 = hashlib.md5(name).hexdigest()
-            path = os.path.join(self.options.attachments_dir,
-                                md5[:1], md5[:2], name)
-            if os.path.isfile(path):
-                yield path
-
-    def extract(self):
-        self.extract_pages()
-
-    def extract_pages(self):
-        log.info('Extracting pages...')
-        for page in self._pages():
-            self.extract_history(page)
-            self.extract_talk(page)
-            self.extract_attachments(page)
-        log.info('Extracting pages done')
-
-    def extract_history(self, page):
-        page_id = page['page_id']
-        for page_data in self._history(page_id):
-            page_data.update(page)
-            self._save(json.dumps(page_data), 'pages', str(page_id),
-                       'history', str(page_data['timestamp']) + '.json')
-        log.info('Extracted history for page %s (%s)', page_id, page['title'])
-
-    def extract_talk(self, page):
-        page_id = page['page_id']
-        talk_page_data = self._talk(page['title'])
-        if talk_page_data:
-            self._save(json.dumps(talk_page_data), 'pages', str(page_id),
-                       'discussion.json')
-            log.info('Extracted talk for page %s (%s)', page_id, page['title'])
-        else:
-            log.info('No talk for page %s (%s)', page_id, page['title'])
-
-    def extract_attachments(self, page):
-        page_id = page['page_id']
-        for filepath in self._attachments(page_id):
-            self._save_attachment(filepath, 'pages', str(page_id),
-                                  'attachments')
-        log.info('Extracted attachments for page %s (%s)',
-                 page_id, page['title'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
deleted file mode 100644
index a3e1e29..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/loaders.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-import os
-import json
-import datetime
-from pylons import tmpl_context as c
-from ming.orm.ormsession import ThreadLocalORMSession
-
-from allura import model as M
-from forgewiki import model as WM
-from forgewiki.converters import mediawiki2markdown
-from forgewiki.converters import mediawiki_internal_links2markdown
-from allura.lib import helpers as h
-from allura.lib import utils
-from allura.model.session import artifact_orm_session
-
-log = logging.getLogger(__name__)
-
-
-class MediawikiLoader(object):
-
-    """Load MediaWiki data from json to Allura wiki tool"""
-    TIMESTAMP_FMT = '%Y%m%d%H%M%S'
-
-    def __init__(self, options):
-        self.options = options
-        self.nbhd = M.Neighborhood.query.get(name=options.nbhd)
-        if not self.nbhd:
-            raise ValueError("Can't find neighborhood with name %s"
-                             % options.nbhd)
-        self.project = M.Project.query.get(shortname=options.project,
-                                           neighborhood_id=self.nbhd._id)
-        if not self.project:
-            raise ValueError("Can't find project with shortname %s "
-                             "and neighborhood_id %s"
-                             % (options.project, self.nbhd._id))
-
-        self.wiki = self.project.app_instance('wiki')
-        if not self.wiki:
-            raise ValueError("Can't find wiki app in given project")
-
-        h.set_context(self.project.shortname, 'wiki', neighborhood=self.nbhd)
-
-    def load(self):
-        try:
-            self.project.notifications_disabled = True
-            artifact_orm_session._get().skip_mod_date = True
-            self.load_pages()
-            ThreadLocalORMSession.flush_all()
-            log.info('Loading wiki done')
-        finally:
-            self.project.notifications_disabled = False
-            artifact_orm_session._get().skip_mod_date = False
-
-    def _pages(self):
-        """Yield path to page dump directory for next wiki page"""
-        pages_dir = os.path.join(self.options.dump_dir, 'pages')
-        pages = []
-        if not os.path.isdir(pages_dir):
-            return
-        pages = os.listdir(pages_dir)
-        for directory in pages:
-            dir_path = os.path.join(pages_dir, directory)
-            if os.path.isdir(dir_path):
-                yield dir_path
-
-    def _history(self, page_dir):
-        """Yield page_data for next wiki page in edit history"""
-        page_dir = os.path.join(page_dir, 'history')
-        if not os.path.isdir(page_dir):
-            return
-        pages = os.listdir(page_dir)
-        pages.sort()  # ensure that history in right order
-        for page in pages:
-            fn = os.path.join(page_dir, page)
-            try:
-                with open(fn, 'r') as pages_file:
-                    page_data = json.load(pages_file)
-            except IOError, e:
-                log.error("Can't open file: %s", str(e))
-                raise
-            except ValueError, e:
-                log.error("Can't load data from file %s: %s", fn, str(e))
-                raise
-            yield page_data
-
-    def _talk(self, page_dir):
-        """Return talk data from json dump"""
-        filename = os.path.join(page_dir, 'discussion.json')
-        if not os.path.isfile(filename):
-            return
-        try:
-            with open(filename, 'r') as talk_file:
-                talk_data = json.load(talk_file)
-        except IOError, e:
-            log.error("Can't open file: %s", str(e))
-            raise
-        except ValueError, e:
-            log.error("Can't load data from file %s: %s", filename, str(e))
-            raise
-        return talk_data
-
-    def _attachments(self, page_dir):
-        """Yield (filename, full path) to next attachment for given page."""
-        attachments_dir = os.path.join(page_dir, 'attachments')
-        if not os.path.isdir(attachments_dir):
-            return
-        attachments = os.listdir(attachments_dir)
-        for filename in attachments:
-            yield filename, os.path.join(attachments_dir, filename)
-
-    def load_pages(self):
-        """Load pages with edit history from json to Allura wiki tool"""
-        log.info('Loading pages into allura...')
-        for page_dir in self._pages():
-            for page in self._history(page_dir):
-                p = WM.Page.upsert(page['title'])
-                p.viewable_by = ['all']
-                p.text = mediawiki_internal_links2markdown(
-                    mediawiki2markdown(page['text']),
-                    page['title'])
-                timestamp = datetime.datetime.strptime(page['timestamp'],
-                                                       self.TIMESTAMP_FMT)
-                p.mod_date = timestamp
-                c.user = (M.User.query.get(username=page['username'].lower())
-                          or M.User.anonymous())
-                ss = p.commit()
-                ss.mod_date = ss.timestamp = timestamp
-
-            # set home to main page
-            if page['title'] == 'Main_Page':
-                gl = WM.Globals.query.get(app_config_id=self.wiki.config._id)
-                if gl is not None:
-                    gl.root = page['title']
-            log.info('Loaded history of page %s (%s)',
-                     page['page_id'], page['title'])
-
-            self.load_talk(page_dir, page['title'])
-            self.load_attachments(page_dir, page['title'])
-
-    def load_talk(self, page_dir, page_title):
-        """Load talk for page.
-
-        page_dir - path to directory with page dump.
-        page_title - page title in Allura Wiki
-        """
-        talk_data = self._talk(page_dir)
-        if not talk_data:
-            return
-        text = mediawiki2markdown(talk_data['text'])
-        page = WM.Page.query.get(app_config_id=self.wiki.config._id,
-                                 title=page_title)
-        if not page:
-            return
-        thread = M.Thread.query.get(ref_id=page.index_id())
-        if not thread:
-            return
-        timestamp = datetime.datetime.strptime(talk_data['timestamp'],
-                                               self.TIMESTAMP_FMT)
-        c.user = (M.User.query.get(username=talk_data['username'].lower())
-                  or M.User.anonymous())
-        thread.add_post(
-            text=text,
-            discussion_id=thread.discussion_id,
-            thread_id=thread._id,
-            timestamp=timestamp,
-            ignore_security=True)
-        log.info('Loaded talk for page %s', page_title)
-
-    def load_attachments(self, page_dir, page_title):
-        """Load attachments for page.
-
-        page_dir - path to directory with page dump.
-        """
-        page = WM.Page.query.get(app_config_id=self.wiki.config._id,
-                                 title=page_title)
-        for filename, path in self._attachments(page_dir):
-            try:
-                with open(path) as fp:
-                    page.attach(filename, fp,
-                                content_type=utils.guess_mime_type(filename))
-            except IOError, e:
-                log.error("Can't open file: %s", str(e))
-                raise
-        log.info('Loaded attachments for page %s.', page_title)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py b/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
deleted file mode 100644
index 8a6e79e..0000000
--- a/ForgeWiki/forgewiki/scripts/wiki2markdown/wiki2markdown.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import argparse
-import logging
-import shutil
-import tempfile
-
-from tg import config
-
-from allura.lib import helpers as h
-from allura.scripts import ScriptTask
-
-from forgewiki.scripts.wiki2markdown.extractors import MySQLExtractor
-from forgewiki.scripts.wiki2markdown.loaders import MediawikiLoader
-
-log = logging.getLogger(__name__)
-
-
-class Wiki2Markdown(ScriptTask):
-
-    """Import MediaWiki to Allura Wiki tool"""
-    @classmethod
-    def parser(cls):
-        parser = argparse.ArgumentParser(description='Import wiki from'
-                                         'mediawiki-dump to allura wiki')
-        parser.add_argument('-e', '--extract-only', action='store_true',
-                            dest='extract',
-                            help='Store data from the mediawiki-dump '
-                            'on the local filesystem; not load into Allura')
-        parser.add_argument(
-            '-l', '--load-only', action='store_true', dest='load',
-            help='Load into Allura previously-extracted data')
-        parser.add_argument('-d', '--dump-dir', dest='dump_dir', default='',
-                            help='Directory for dump files')
-        parser.add_argument('-n', '--neighborhood', dest='nbhd', default='',
-                            help='Neighborhood name to load data')
-        parser.add_argument('-p', '--project', dest='project', default='',
-                            help='Project shortname to load data into')
-        parser.add_argument('-a', '--attachments-dir', dest='attachments_dir',
-                            help='Path to directory with mediawiki attachments dump',
-                            default='')
-        parser.add_argument('--db_config_prefix', dest='db_config_prefix',
-                            help='Key prefix (e.g. "legacy.") in ini file to '
-                            'use instead of commandline db params')
-        parser.add_argument('-s', '--source', dest='source', default='mysql',
-                            help='Database type to extract from (only mysql for now)')
-        parser.add_argument('--db_name', dest='db_name', default='mediawiki',
-                            help='Database name')
-        parser.add_argument('--host', dest='host', default='localhost',
-                            help='Database host')
-        parser.add_argument('--port', dest='port', type=int, default=0,
-                            help='Database port')
-        parser.add_argument('--user', dest='user', default='',
-                            help='User for database connection')
-        parser.add_argument('--password', dest='password', default='',
-                            help='Password for database connection')
-        parser.add_argument(
-            '--keep-dumps', action='store_true', dest='keep_dumps',
-            help='Leave dump files on disk after run')
-        return parser
-
-    @classmethod
-    def execute(cls, options):
-        options = cls.handle_options(options)
-
-        try:
-            if options.extract:
-                MySQLExtractor(options).extract()
-            if options.load:
-                MediawikiLoader(options).load()
-        finally:
-            if not options.keep_dumps:
-                shutil.rmtree(options.dump_dir)
-
-    @classmethod
-    def handle_options(cls, options):
-        if not options.extract and not options.load:
-            # if action doesn't specified - do both
-            options.extract = True
-            options.load = True
-
-        if not options.dump_dir:
-            if options.load and not options.extract:
-                raise ValueError(
-                    'You must specify directory containing dump files')
-            else:
-                options.dump_dir = tempfile.mkdtemp()
-                log.info("Writing temp files to %s", options.dump_dir)
-
-        if options.load and (not options.project or not options.nbhd):
-            raise ValueError('You must specify neighborhood and project '
-                             'to load data')
-
-        if options.extract:
-            if options.db_config_prefix:
-                for k, v in h.config_with_prefix(config, options.db_config_prefix).iteritems():
-                    if k == 'port':
-                        v = int(v)
-                    setattr(options, k, v)
-
-            if options.source == 'mysql':
-                pass
-            elif options.source in ('sqlite', 'postgres', 'sql-dump'):
-                raise ValueError(
-                    'This source not implemented yet. Only mysql for now')
-            else:
-                raise ValueError('You must specify a valid data source')
-
-            if not options.attachments_dir:
-                raise ValueError(
-                    'You must specify path to directory with mediawiki attachmets dump.')
-
-        return options
-
-
-if __name__ == '__main__':
-    Wiki2Markdown.main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/test_wiki2markdown.py b/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
deleted file mode 100644
index f021742..0000000
--- a/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
+++ /dev/null
@@ -1,329 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import os
-import json
-from datetime import datetime
-
-import mock
-from IPython.testing.decorators import module_not_available, skipif
-from pylons import app_globals as g
-
-from forgewiki.scripts.wiki2markdown.extractors import MySQLExtractor
-from forgewiki.scripts.wiki2markdown.loaders import MediawikiLoader
-from alluratest.controller import setup_basic_test
-from allura import model as M
-from forgewiki import model as WM
-from allura.lib import helpers as h
-
-from pylons import tmpl_context as context
-
-
-class TestMySQLExtractor(object):
-
-    def setUp(self):
-        setup_basic_test()
-        self.options = mock.Mock()
-        self.options.dump_dir = os.path.join(g.tmpdir, 'w2m_test')
-
-        # monkey-patch MySQLExtractor for test
-        def pages(self):
-            yield {'page_id': 1, 'title': 'Test title'}
-            yield {'page_id': 2, 'title': 'Main_Page'}
-            yield {'page_id': 3, 'title': 'Test'}
-
-        def history(self, page_id):
-            data = {
-                1: [
-                    {'timestamp': 1, 'text': "Test", 'username': 'test-user'},
-                    {'timestamp': 2, 'text': "Test Text", 'username': 'bad'}
-                ],
-                2: [
-                    {'timestamp': 1, 'text': "Main_Page", 'username': 'b'},
-                    {'timestamp': 2, 'text': "Main_Page text", 'username': 'b'}
-                ],
-                3: [
-                    {'timestamp': 1, 'text': "Some test text", 'username': ''},
-                    {'timestamp': 2, 'text': "", 'username': ''}
-                ]
-            }
-            revisions = data[page_id]
-            for rev in revisions:
-                yield rev
-
-        def talk(self, page_title):
-            return {
-                'text': 'Talk for page %s.' % page_title,
-                'timestamp': 1,
-                'username': 'test-user'
-            }
-
-        def attachments(self, *args, **kwargs):
-            # make 'empty' iterator
-            if False:
-                yield
-
-        MySQLExtractor._pages = pages
-        MySQLExtractor._history = history
-        MySQLExtractor._talk = talk
-        MySQLExtractor._attachments = attachments
-        self.extractor = MySQLExtractor(self.options)
-
-    def test_extract_pages(self):
-        """Test that pages and edit history extracted properly"""
-        self.extractor.extract_pages()
-
-        # rev 1 of page 1
-        with open(os.path.join(self.options.dump_dir, 'pages/1/history/1.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 1,
-            'text': 'Test',
-            'page_id': 1,
-            'title': 'Test title',
-            'username': 'test-user'
-        }
-        assert page == res_page
-
-        # rev 2 of page 1
-        with open(os.path.join(self.options.dump_dir, 'pages/1/history/2.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 2,
-            'text': 'Test Text',
-            'page_id': 1,
-            'title': 'Test title',
-            'username': 'bad'
-        }
-        assert page == res_page
-
-        # rev 1 of page 2
-        with open(os.path.join(self.options.dump_dir, 'pages/2/history/1.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 1,
-            'text': 'Main_Page',
-            'page_id': 2,
-            'title': 'Main_Page',
-            'username': 'b'
-        }
-        assert page == res_page
-
-        # rev 2 of page 2
-        with open(os.path.join(self.options.dump_dir, 'pages/2/history/2.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 2,
-            'text': 'Main_Page text',
-            'page_id': 2,
-            'title': 'Main_Page',
-            'username': 'b'
-        }
-        assert page == res_page
-
-        # rev 1 of page 3
-        with open(os.path.join(self.options.dump_dir, 'pages/3/history/1.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 1,
-            'text': 'Some test text',
-            'page_id': 3,
-            'title': 'Test',
-            'username': ''
-        }
-        assert page == res_page
-
-        # rev 2 of page 3
-        with open(os.path.join(self.options.dump_dir, 'pages/3/history/2.json'), 'r') as f:
-            page = json.load(f)
-        res_page = {
-            'timestamp': 2,
-            'text': '',
-            'page_id': 3,
-            'title': 'Test',
-            'username': ''
-        }
-        assert page == res_page
-
-    def test_extract_talk(self):
-        """Test that talk pages extracted properly."""
-        pages = [
-            {'page_id': 1, 'title': 'Test 1'},
-            {'page_id': 2, 'title': 'Test 2'},
-            {'page_id': 3, 'title': 'Test 3'},
-        ]
-        for page in pages:
-            self.extractor.extract_talk(page)
-
-        with open(os.path.join(self.options.dump_dir, 'pages/1/discussion.json'), 'r') as f:
-            page = json.load(f)
-        assert page == {
-            'text': 'Talk for page Test 1.',
-            'username': 'test-user',
-            'timestamp': 1}
-
-        with open(os.path.join(self.options.dump_dir, 'pages/2/discussion.json'), 'r') as f:
-            page = json.load(f)
-        assert page == {
-            'text': 'Talk for page Test 2.',
-            'timestamp': 1,
-            'username': 'test-user'}
-
-        with open(os.path.join(self.options.dump_dir, 'pages/3/discussion.json'), 'r') as f:
-            page = json.load(f)
-        assert page == {
-            'text': 'Talk for page Test 3.',
-            'timestamp': 1,
-            'username': 'test-user'}
-
-
-class TestMediawikiLoader(object):
-
-    def setUp(self):
-        setup_basic_test()
-        self.options = mock.Mock()
-        # need test project with installed wiki app
-        self.options.nbhd = 'Adobe'
-        self.options.project = '--init--'
-
-        nbhd = M.Neighborhood.query.get(name=self.options.nbhd)
-        h.set_context(self.options.project, 'wiki', neighborhood=nbhd)
-
-        # monkey-patch MediawikiLoader for test
-        def pages(self):
-            yield 1
-            yield 2
-
-        def history(self, page_dir):
-            data = {
-                1: [
-                    {
-                        'title': 'Test title',
-                        'text': "'''bold''' ''italics''",
-                        'page_id': 1,
-                        'timestamp': '20120808000001',
-                        'username': 'test-user'
-                    },
-                    {
-                        'title': 'Test title',
-                        'text': "'''bold'''",
-                        'page_id': 1,
-                        'timestamp': '20120809000001',
-                        'username': 'test-user'
-                    },
-                ],
-                2: [
-                    {
-                        'title': 'Main',
-                        'text': "Main text rev 1",
-                        'page_id': 2,
-                        'timestamp': '20120808000001',
-                        'username': 'bad-user'
-                    },
-                    {
-                        'title': 'Main',
-                        'text': "Main text rev 2",
-                        'page_id': 2,
-                        'timestamp': '20120809000001',
-                        'username': 'bad-user'
-                    },
-
-                ],
-            }
-            for page in data[page_dir]:
-                yield page
-
-        def talk(self, page_dir):
-            data = {
-                1: {
-                    'text': "''Talk page'' for page 1.",
-                    'username': 'test-user',
-                    'timestamp': '20120809000001'
-                },
-                2: {
-                    'text': "''Talk page'' for page 2.",
-                    'username': 'bad-user',
-                    'timestamp': '20120809000001'
-                },
-            }
-            return data[page_dir]
-
-        def attachments(self, *args, **kwargs):
-            # make 'empty' iterator
-            if False:
-                yield
-
-        MediawikiLoader._pages = pages
-        MediawikiLoader._history = history
-        MediawikiLoader._talk = talk
-        MediawikiLoader._attachments = attachments
-        self.loader = MediawikiLoader(self.options)
-
-    def get_page(self, title):
-        return WM.Page.query.get(app_config_id=context.app.config._id,
-                                 title=title)
-
-    def get_post(self, title):
-        page = self.get_page(title)
-        thread = M.Thread.query.get(ref_id=page.index_id())
-        return M.Post.query.get(discussion_id=thread.discussion_id,
-                                thread_id=thread._id)
-
-    @skipif(module_not_available('mediawiki'))
-    @mock.patch('allura.model.discuss.g.director')
-    def test_load_pages(self, director):
-        """Test that pages, edit history and talk loaded properly"""
-        self.loader.load_pages()
-        page = self.get_page('Test title')
-
-        assert page.mod_date == datetime.strptime('20120809000001',
-                                                  self.loader.TIMESTAMP_FMT)
-        assert page.authors()[0].username == 'test-user'
-        assert '**bold**' in page.text
-        # _italics should be only in the first revision of page
-        assert '_italics_' not in page
-
-        page = page.get_version(1)
-        assert '**bold** _italics_' in page.text
-        assert page.mod_date == datetime.strptime('20120808000001',
-                                                  self.loader.TIMESTAMP_FMT)
-        assert page.authors()[0].username == 'test-user'
-
-        page = self.get_page('Main')
-        assert page.mod_date == datetime.strptime('20120809000001',
-                                                  self.loader.TIMESTAMP_FMT)
-        assert page.authors()[0].username == '*anonymous'
-        assert 'Main text rev 2' in page.text
-
-        page = page.get_version(1)
-        assert page.mod_date == datetime.strptime('20120808000001',
-                                                  self.loader.TIMESTAMP_FMT)
-        assert page.authors()[0].username == '*anonymous'
-        assert 'Main text rev 1' in page.text
-
-        # Check that talk pages loaded
-        post = self.get_post('Test title')
-        assert post.timestamp == datetime.strptime('20120809000001',
-                                                   self.loader.TIMESTAMP_FMT)
-        assert post.author().username == 'test-user'
-        assert '_Talk page_ for page 1.' in post.text
-
-        post = self.get_post('Main')
-        assert post.timestamp == datetime.strptime('20120809000001',
-                                                   self.loader.TIMESTAMP_FMT)
-        assert post.author().username == '*anonymous'
-        assert '_Talk page_ for page 2.' in post.text

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/4f9f2161/ForgeWiki/setup.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/setup.py b/ForgeWiki/setup.py
index 6bc7944..864e7f1 100644
--- a/ForgeWiki/setup.py
+++ b/ForgeWiki/setup.py
@@ -44,8 +44,5 @@ setup(name='ForgeWiki',
       # -*- Entry points: -*-
       [allura]
       Wiki=forgewiki.wiki_main:ForgeWikiApp
-
-      [paste.paster_command]
-      wiki2markdown = forgewiki.command.wiki2markdown:Wiki2MarkDownCommand
       """,
       )


[24/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/auth.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/auth.py b/Allura/allura/model/auth.py
index fb8eaf7..c2e9097 100644
--- a/Allura/allura/model/auth.py
+++ b/Allura/allura/model/auth.py
@@ -54,6 +54,7 @@ from .timeline import ActivityNode, ActivityObject
 
 log = logging.getLogger(__name__)
 
+
 def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     """
     Returns a bytestring version of 's', encoded as specified in 'encoding'.
@@ -73,7 +74,7 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
                 # know how to print itself properly. We shouldn't raise a
                 # further exception.
                 return ' '.join([smart_str(arg, encoding, strings_only,
-                        errors) for arg in s])
+                                           errors) for arg in s])
             return unicode(s).encode(encoding, errors)
     elif isinstance(s, unicode):
         r = s.encode(encoding, errors)
@@ -83,10 +84,12 @@ def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
     else:
         return s
 
+
 def generate_smart_str(params):
     for (key, value) in params:
         yield smart_str(key), smart_str(value)
 
+
 def urlencode(params):
     """
     A version of Python's urllib.urlencode() function that can operate on
@@ -102,12 +105,14 @@ class ApiAuthMixIn(object):
         try:
             # Validate timestamp
             timestamp = iso8601.parse_date(params['api_timestamp'])
-            timestamp_utc = timestamp.replace(tzinfo=None) - timestamp.utcoffset()
+            timestamp_utc = timestamp.replace(
+                tzinfo=None) - timestamp.utcoffset()
             if abs(datetime.utcnow() - timestamp_utc) > timedelta(minutes=10):
                 return False
             # Validate signature
             api_signature = params['api_signature']
-            params = sorted((k,v) for k,v in params.iteritems() if k != 'api_signature')
+            params = sorted((k, v)
+                            for k, v in params.iteritems() if k != 'api_signature')
             string_to_sign = path + '?' + urlencode(params)
             digest = hmac.new(self.secret_key, string_to_sign, hashlib.sha256)
             return digest.hexdigest() == api_signature
@@ -115,18 +120,23 @@ class ApiAuthMixIn(object):
             return False
 
     def sign_request(self, path, params):
-        if hasattr(params, 'items'): params = params.items()
+        if hasattr(params, 'items'):
+            params = params.items()
         has_api_key = has_api_timestamp = has_api_signature = False
-        for k,v in params:
-            if k == 'api_key': has_api_key = True
-            if k == 'api_timestamp': has_api_timestamp = True
-            if k == 'api_signature': has_api_signature = True
+        for k, v in params:
+            if k == 'api_key':
+                has_api_key = True
+            if k == 'api_timestamp':
+                has_api_timestamp = True
+            if k == 'api_signature':
+                has_api_signature = True
         if not has_api_key:
             params.append(('api_key', self.api_key))
         if not has_api_timestamp:
             params.append(('api_timestamp', datetime.utcnow().isoformat()))
         if not has_api_signature:
-            string_to_sign = urllib.quote(path) + '?' + urlencode(sorted(params))
+            string_to_sign = urllib.quote(path) + \
+                '?' + urlencode(sorted(params))
             digest = hmac.new(self.secret_key, string_to_sign, hashlib.sha256)
             params.append(('api_signature', digest.hexdigest()))
         return params
@@ -136,14 +146,15 @@ class ApiAuthMixIn(object):
 
 
 class ApiToken(MappedClass, ApiAuthMixIn):
+
     class __mongometa__:
-        name='api_token'
+        name = 'api_token'
         session = main_orm_session
-        unique_indexes = [ 'user_id' ]
+        unique_indexes = ['user_id']
 
     _id = FieldProperty(S.ObjectId)
     user_id = ForeignIdProperty('User')
-    api_key = FieldProperty(str, if_missing=lambda:str(uuid.uuid4()))
+    api_key = FieldProperty(str, if_missing=lambda: str(uuid.uuid4()))
     secret_key = FieldProperty(str, if_missing=h.cryptographic_nonce)
 
     user = RelationProperty('User')
@@ -154,17 +165,19 @@ class ApiToken(MappedClass, ApiAuthMixIn):
 
 
 class ApiTicket(MappedClass, ApiAuthMixIn):
+
     class __mongometa__:
-        name='api_ticket'
+        name = 'api_ticket'
         session = main_orm_session
     PREFIX = 'tck'
 
     _id = FieldProperty(S.ObjectId)
     user_id = ForeignIdProperty('User')
-    api_key = FieldProperty(str, if_missing=lambda: ApiTicket.PREFIX + h.nonce(20))
+    api_key = FieldProperty(
+        str, if_missing=lambda: ApiTicket.PREFIX + h.nonce(20))
     secret_key = FieldProperty(str, if_missing=h.cryptographic_nonce)
     expires = FieldProperty(datetime, if_missing=None)
-    capabilities = FieldProperty({str:None})
+    capabilities = FieldProperty({str: None})
     mod_date = FieldProperty(datetime, if_missing=datetime.utcnow)
 
     user = RelationProperty('User')
@@ -183,16 +196,18 @@ class ApiTicket(MappedClass, ApiAuthMixIn):
     def get_capability(self, key):
         return self.capabilities.get(key)
 
+
 class EmailAddress(MappedClass):
     re_format = re.compile('^.* <(.*)>$')
+
     class __mongometa__:
-        name='email_address'
+        name = 'email_address'
         session = main_orm_session
         indexes = [
             'claimed_by_user_id']
 
     _id = FieldProperty(str)
-    claimed_by_user_id=FieldProperty(S.ObjectId, if_missing=None)
+    claimed_by_user_id = FieldProperty(S.ObjectId, if_missing=None)
     confirmed = FieldProperty(bool)
     nonce = FieldProperty(str)
 
@@ -236,14 +251,16 @@ please visit the following URL:
             message_id=h.gen_message_id(),
             text=text)
 
+
 class OpenId(MappedClass):
+
     class __mongometa__:
-        name='openid'
+        name = 'openid'
         session = main_orm_session
 
     _id = FieldProperty(str)
-    claimed_by_user_id=FieldProperty(S.ObjectId, if_missing=None)
-    display_identifier=FieldProperty(str)
+    claimed_by_user_id = FieldProperty(S.ObjectId, if_missing=None)
+    display_identifier = FieldProperty(str)
 
     @classmethod
     def upsert(cls, url, display_identifier):
@@ -256,8 +273,9 @@ class OpenId(MappedClass):
 
     def claimed_by_user(self):
         if self.claimed_by_user_id:
-            result = User.query.get(_id=self.claimed_by_user_id, disabled=False)
-        else: # pragma no cover
+            result = User.query.get(
+                _id=self.claimed_by_user_id, disabled=False)
+        else:  # pragma no cover
             result = User.register(
                 dict(username=None, password=None,
                      display_name=self.display_identifier,
@@ -266,9 +284,11 @@ class OpenId(MappedClass):
             self.claimed_by_user_id = result._id
         return result
 
+
 class AuthGlobals(MappedClass):
+
     class __mongometa__:
-        name='auth_globals'
+        name = 'auth_globals'
         session = main_orm_session
 
     _id = FieldProperty(int)
@@ -277,12 +297,13 @@ class AuthGlobals(MappedClass):
     @classmethod
     def upsert(cls):
         r = cls.query.get()
-        if r is not None: return r
+        if r is not None:
+            return r
         try:
             r = cls(_id=0)
             session(r).flush(r)
             return r
-        except pymongo.errors.DuplicateKeyError: # pragma no cover
+        except pymongo.errors.DuplicateKeyError:  # pragma no cover
             session(r).flush(r)
             r = cls.query.get()
             return r
@@ -291,68 +312,70 @@ class AuthGlobals(MappedClass):
     def get_next_uid(cls):
         cls.upsert()
         g = cls.query.find_and_modify(
-            query={}, update={'$inc':{'next_uid': 1}},
+            query={}, update={'$inc': {'next_uid': 1}},
             new=True)
         return g.next_uid
 
 
 class User(MappedClass, ActivityNode, ActivityObject):
-    SALT_LEN=8
+    SALT_LEN = 8
+
     class __mongometa__:
-        name='user'
+        name = 'user'
         session = main_orm_session
-        indexes = [ 'tool_data.sfx.userid', 'tool_data.AuthPasswordReset.hash' ]
-        unique_indexes = [ 'username' ]
-
-    _id=FieldProperty(S.ObjectId)
-    sfx_userid=FieldProperty(S.Deprecated)
-    username=FieldProperty(str)
-    open_ids=FieldProperty([str])
-    email_addresses=FieldProperty([str])
-    password=FieldProperty(str)
-    projects=FieldProperty(S.Deprecated)
-    tool_preferences=FieldProperty({str:{str:None}}) # full mount point: prefs dict
-    tool_data = FieldProperty({str:{str:None}}) # entry point: prefs dict
-    display_name=FieldProperty(str)
-    disabled=FieldProperty(bool, if_missing=False)
+        indexes = ['tool_data.sfx.userid', 'tool_data.AuthPasswordReset.hash']
+        unique_indexes = ['username']
+
+    _id = FieldProperty(S.ObjectId)
+    sfx_userid = FieldProperty(S.Deprecated)
+    username = FieldProperty(str)
+    open_ids = FieldProperty([str])
+    email_addresses = FieldProperty([str])
+    password = FieldProperty(str)
+    projects = FieldProperty(S.Deprecated)
+    # full mount point: prefs dict
+    tool_preferences = FieldProperty({str: {str: None}})
+    tool_data = FieldProperty({str: {str: None}})  # entry point: prefs dict
+    display_name = FieldProperty(str)
+    disabled = FieldProperty(bool, if_missing=False)
     # Don't use directly, use get/set_pref() instead
-    preferences=FieldProperty(dict(
-            results_per_page=int,
-            email_address=str,
-            email_format=str,
-            disable_user_messages=bool))
-
-    #Personal data
-    sex=FieldProperty(
+    preferences = FieldProperty(dict(
+        results_per_page=int,
+        email_address=str,
+        email_format=str,
+        disable_user_messages=bool))
+
+    # Personal data
+    sex = FieldProperty(
         S.OneOf('Male', 'Female', 'Other', 'Unknown',
-        if_missing='Unknown'))
-    birthdate=FieldProperty(S.DateTime, if_missing=None)
+                if_missing='Unknown'))
+    birthdate = FieldProperty(S.DateTime, if_missing=None)
 
-    #Availability information
-    availability=FieldProperty([dict(
+    # Availability information
+    availability = FieldProperty([dict(
         week_day=str,
         start_time=dict(h=int, m=int),
         end_time=dict(h=int, m=int))])
-    localization=FieldProperty(dict(city=str,country=str))
-    timezone=FieldProperty(str)
-    sent_user_message_times=FieldProperty([S.DateTime])
-    inactiveperiod=FieldProperty([dict(
+    localization = FieldProperty(dict(city=str, country=str))
+    timezone = FieldProperty(str)
+    sent_user_message_times = FieldProperty([S.DateTime])
+    inactiveperiod = FieldProperty([dict(
         start_date=S.DateTime,
         end_date=S.DateTime)])
 
-    #Additional contacts
-    socialnetworks=FieldProperty([dict(socialnetwork=str,accounturl=str)])
-    telnumbers=FieldProperty([str])
-    skypeaccount=FieldProperty(str)
-    webpages=FieldProperty([str])
+    # Additional contacts
+    socialnetworks = FieldProperty([dict(socialnetwork=str, accounturl=str)])
+    telnumbers = FieldProperty([str])
+    skypeaccount = FieldProperty(str)
+    webpages = FieldProperty([str])
 
-    #Skills list
+    # Skills list
     skills = FieldProperty([dict(
-        category_id = S.ObjectId,
-        level = S.OneOf('low', 'high', 'medium'),
+        category_id=S.ObjectId,
+        level=S.OneOf('low', 'high', 'medium'),
         comment=str)])
 
-    #Statistics
+    # Statistics
     stats_id = FieldProperty(S.ObjectId, if_missing=None)
 
     def can_send_user_message(self):
@@ -366,7 +389,7 @@ class User(MappedClass, ActivityNode, ActivityObject):
         now = datetime.utcnow()
         time_interval = timedelta(seconds=g.user_message_time_interval)
         self.sent_user_message_times = [t for t in self.sent_user_message_times
-                if t + time_interval > now]
+                                        if t + time_interval > now]
         return len(self.sent_user_message_times) < g.user_message_max_messages
 
     def time_to_next_user_message(self):
@@ -379,14 +402,15 @@ class User(MappedClass, ActivityNode, ActivityObject):
         if self.can_send_user_message():
             return 0
         return self.sent_user_message_times[0] + \
-                timedelta(seconds=g.user_message_time_interval) - \
-                datetime.utcnow()
+            timedelta(seconds=g.user_message_time_interval) - \
+            datetime.utcnow()
 
     def send_user_message(self, user, subject, message, cc):
         """Send a user message (email) to ``user``.
 
         """
-        tmpl = g.jinja2_env.get_template('allura:ext/user_profile/templates/message.html')
+        tmpl = g.jinja2_env.get_template(
+            'allura:ext/user_profile/templates/message.html')
         tmpl_context = {
             'message_text': message,
             'site_name': config['site_name'],
@@ -438,7 +462,7 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
     def remove_socialnetwork(self, socialnetwork, oldurl):
         for el in self.socialnetworks:
-            if el.socialnetwork==socialnetwork and el.accounturl==oldurl:
+            if el.socialnetwork == socialnetwork and el.accounturl == oldurl:
                 del self.socialnetworks[self.socialnetworks.index(el)]
                 return
 
@@ -447,7 +471,7 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
     def remove_telephonenumber(self, oldvalue):
         for el in self.telnumbers:
-            if el==oldvalue:
+            if el == oldvalue:
                 del self.telnumbers[self.telnumbers.index(el)]
                 return
 
@@ -456,15 +480,15 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
     def remove_webpage(self, oldvalue):
         for el in self.webpages:
-            if el==oldvalue:
+            if el == oldvalue:
                 del self.webpages[self.webpages.index(el)]
                 return
 
     def add_timeslot(self, weekday, starttime, endtime):
         self.availability.append(
-           dict(week_day=weekday,
-                start_time=starttime,
-                end_time=endtime))
+            dict(week_day=weekday,
+                 start_time=starttime,
+                 end_time=endtime))
 
     def remove_timeslot(self, weekday, starttime, endtime):
         oldel = dict(week_day=weekday, start_time=starttime, end_time=endtime)
@@ -475,8 +499,8 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
     def add_inactive_period(self, startdate, enddate):
         self.inactiveperiod.append(
-           dict(start_date=startdate,
-                end_date=enddate))
+            dict(start_date=startdate,
+                 end_date=enddate))
 
     def remove_inactive_period(self, startdate, enddate):
         oldel = dict(start_date=startdate, end_date=enddate)
@@ -508,28 +532,28 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
             dif_days_start = convtime1.weekday() - today.weekday()
             dif_days_end = convtime2.weekday() - today.weekday()
-            index = (week_day.index(t['week_day'])+dif_days_start) % 7
+            index = (week_day.index(t['week_day']) + dif_days_start) % 7
             week_day_start = week_day[index]
             week_day_end = week_day[index]
 
             if week_day_start == week_day_end:
                 retlist.append(dict(
-                    week_day = week_day_start,
-                    start_time = convtime1.time(),
-                    end_time = convtime2.time()))
+                    week_day=week_day_start,
+                    start_time=convtime1.time(),
+                    end_time=convtime2.time()))
             else:
                 retlist.append(dict(
-                    week_day = week_day_start,
-                    start_time = convtime1.time(),
-                    end_time = time(23, 59)))
+                    week_day=week_day_start,
+                    start_time=convtime1.time(),
+                    end_time=time(23, 59)))
                 retlist.append(dict(
-                    week_day = week_day_end,
-                    start_time = time(0, 0),
-                    end_time = convtime2.time()))
+                    week_day=week_day_end,
+                    start_time=time(0, 0),
+                    end_time=convtime2.time()))
 
         return sorted(
             retlist,
-            key=lambda k:(week_day.index(k['week_day']), k['start_time']))
+            key=lambda k: (week_day.index(k['week_day']), k['start_time']))
 
     def get_skills(self):
         from allura.model.project import TroveCategory
@@ -549,9 +573,9 @@ class User(MappedClass, ActivityNode, ActivityObject):
             (starth, startm) = (start.get('h'), start.get('m'))
             (endh, endm) = (end.get('h'), end.get('m'))
             newdict = dict(
-                week_day  = el.get('week_day'),
-                start_time= time(starth,startm,0),
-                end_time  = time(endh,endm,0))
+                week_day=el.get('week_day'),
+                start_time=time(starth, startm, 0),
+                end_time=time(endh, endm, 0))
             retval.append(newdict)
         return retval
 
@@ -559,7 +583,7 @@ class User(MappedClass, ActivityNode, ActivityObject):
         retval = []
         for el in self.inactiveperiod:
             d1, d2 = (el.get('start_date'), el.get('end_date'))
-            newdict = dict(start_date = d1, end_date = d2)
+            newdict = dict(start_date=d1, end_date=d2)
             if include_past_periods or newdict['end_date'] > datetime.today():
                 retval.append(newdict)
         return retval
@@ -573,10 +597,11 @@ class User(MappedClass, ActivityNode, ActivityObject):
         try:
             private_project = self.private_project()
         except:
-            log.warn('Error getting/creating user-project for %s', self.username, exc_info=True)
+            log.warn('Error getting/creating user-project for %s',
+                     self.username, exc_info=True)
             private_project = None
         if private_project and private_project.icon:
-            icon_url = self.url()+'user_icon'
+            icon_url = self.url() + 'user_icon'
         elif self.preferences.email_address:
             icon_url = g.gravatar(self.preferences.email_address)
         return icon_url
@@ -584,7 +609,8 @@ class User(MappedClass, ActivityNode, ActivityObject):
     @classmethod
     def upsert(cls, username):
         u = cls.query.get(username=username)
-        if u is not None: return u
+        if u is not None:
+            return u
         try:
             u = cls(username=username)
             session(u).flush(u)
@@ -596,7 +622,8 @@ class User(MappedClass, ActivityNode, ActivityObject):
     @classmethod
     def by_email_address(cls, addr):
         ea = EmailAddress.query.get(_id=addr)
-        if ea is None: return None
+        if ea is None:
+            return None
         return ea.claimed_by_user()
 
     @classmethod
@@ -629,14 +656,16 @@ class User(MappedClass, ActivityNode, ActivityObject):
     def claim_openid(self, oid_url):
         oid_obj = OpenId.upsert(oid_url, self.get_pref('display_name'))
         oid_obj.claimed_by_user_id = self._id
-        if oid_url in self.open_ids: return
+        if oid_url in self.open_ids:
+            return
         self.open_ids.append(oid_url)
 
     def claim_address(self, email_address):
         addr = EmailAddress.canonical(email_address)
         email_addr = EmailAddress.upsert(addr)
         email_addr.claimed_by_user_id = self._id
-        if addr in self.email_addresses: return
+        if addr in self.email_addresses:
+            return
         self.email_addresses.append(addr)
 
     def claim_only_addresses(self, *addresses):
@@ -644,12 +673,13 @@ class User(MappedClass, ActivityNode, ActivityObject):
         attribute to True on all.
         '''
         self.email_addresses = [
-            EmailAddress.canonical(a) for a in addresses ]
+            EmailAddress.canonical(a) for a in addresses]
         addresses = set(self.email_addresses)
         for addr in EmailAddress.query.find(
-            dict(claimed_by_user_id=self._id)):
+                dict(claimed_by_user_id=self._id)):
             if addr._id in addresses:
-                if not addr.confirmed: addr.confirmed = True
+                if not addr.confirmed:
+                    addr.confirmed = True
                 addresses.remove(addr._id)
             else:
                 addr.delete()
@@ -689,16 +719,21 @@ class User(MappedClass, ActivityNode, ActivityObject):
         n = self.neighborhood
         auth_provider = plugin.AuthenticationProvider.get(request)
         project_shortname = auth_provider.user_project_shortname(self)
-        p = M.Project.query.get(shortname=project_shortname, neighborhood_id=n._id)
+        p = M.Project.query.get(
+            shortname=project_shortname, neighborhood_id=n._id)
         if p and p.deleted:
-            # really delete it, since registering a new project would conflict with the "deleted" one
-            log.info('completely deleting user project (was already flagged as deleted) %s', project_shortname)
+            # really delete it, since registering a new project would conflict
+            # with the "deleted" one
+            log.info(
+                'completely deleting user project (was already flagged as deleted) %s',
+                project_shortname)
             p.delete()
             ThreadLocalORMSession.flush_all()
             p = None
         if not p and not self.is_anonymous():
             # create user-project on demand if it is missing
-            p = n.register_project(project_shortname, user=self, user_project=True)
+            p = n.register_project(
+                project_shortname, user=self, user_project=True)
         return p
 
     @property
@@ -714,8 +749,10 @@ class User(MappedClass, ActivityNode, ActivityObject):
         """
         if self.is_anonymous():
             return
-        reaching_role_ids = list(g.credentials.user_roles(user_id=self._id).reaching_ids_set)
-        reaching_roles = ProjectRole.query.find({'_id': {'$in': reaching_role_ids}}).all()
+        reaching_role_ids = list(
+            g.credentials.user_roles(user_id=self._id).reaching_ids_set)
+        reaching_roles = ProjectRole.query.find(
+            {'_id': {'$in': reaching_role_ids}}).all()
         if not role_name:
             named_roles = [r for r in reaching_roles
                            if r.name and r.project and not r.project.deleted]
@@ -724,7 +761,8 @@ class User(MappedClass, ActivityNode, ActivityObject):
                            if r.name == role_name and r.project and not r.project.deleted]
         seen_project_ids = set()
         for r in named_roles:
-            if r.project_id in seen_project_ids: continue
+            if r.project_id in seen_project_ids:
+                continue
             seen_project_ids.add(r.project_id)
             yield r.project
 
@@ -750,7 +788,7 @@ class User(MappedClass, ActivityNode, ActivityObject):
 
     @classmethod
     def withskill(cls, skill):
-        return cls.query.find({"skills.category_id" : skill._id})
+        return cls.query.find({"skills.category_id": skill._id})
 
     def __json__(self):
         return dict(
@@ -759,13 +797,17 @@ class User(MappedClass, ActivityNode, ActivityObject):
             url=h.absurl(self.url()),
         )
 
+
 class OldProjectRole(MappedClass):
+
     class __mongometa__:
         session = project_orm_session
-        name='user'
-        unique_indexes = [ ('user_id', 'project_id', 'name') ]
+        name = 'user'
+        unique_indexes = [('user_id', 'project_id', 'name')]
+
 
 class ProjectRole(MappedClass):
+
     """
     Per-project roles, called "Groups" in the UI.
     This can be a proxy for a single user.  It can also inherit roles.
@@ -778,13 +820,13 @@ class ProjectRole(MappedClass):
 
     class __mongometa__:
         session = main_orm_session
-        name='project_role'
-        unique_indexes = [ ('user_id', 'project_id', 'name') ]
+        name = 'project_role'
+        unique_indexes = [('user_id', 'project_id', 'name')]
         indexes = [
             ('user_id',),
-            ('project_id', 'name'), # used in ProjectRole.by_name()
+            ('project_id', 'name'),  # used in ProjectRole.by_name()
             ('roles',),
-            ]
+        ]
 
     _id = FieldProperty(S.ObjectId)
     user_id = ForeignIdProperty('User', if_missing=None)
@@ -800,34 +842,40 @@ class ProjectRole(MappedClass):
         super(ProjectRole, self).__init__(**kw)
 
     def display(self):
-        if self.name: return self.name
+        if self.name:
+            return self.name
         if self.user_id:
             u = self.user
-            if u.username: uname = u.username
-            elif u.get_pref('display_name'): uname = u.get_pref('display_name')
-            else: uname = u._id
+            if u.username:
+                uname = u.username
+            elif u.get_pref('display_name'):
+                uname = u.get_pref('display_name')
+            else:
+                uname = u._id
             return '*user-%s' % uname
-        return '**unknown name role: %s' % self._id # pragma no cover
+        return '**unknown name role: %s' % self._id  # pragma no cover
 
     @classmethod
     def by_user(cls, user, project=None, upsert=False):
-        if project is None: project = c.project
+        if project is None:
+            project = c.project
         if user.is_anonymous():
             return cls.anonymous(project)
         if upsert:
             return cls.upsert(
-                    user_id=user._id,
-                    project_id=project.root_project._id,
-                )
+                user_id=user._id,
+                project_id=project.root_project._id,
+            )
         else:
             return cls.query.get(
-                    user_id=user._id,
-                    project_id=project.root_project._id,
-                )
+                user_id=user._id,
+                project_id=project.root_project._id,
+            )
 
     @classmethod
     def by_name(cls, name, project=None):
-        if project is None: project = c.project
+        if project is None:
+            project = c.project
         if hasattr(project, 'root_project'):
             project = project.root_project
         if hasattr(project, '_id'):
@@ -850,7 +898,8 @@ class ProjectRole(MappedClass):
     @classmethod
     def upsert(cls, **kw):
         obj = cls.query.get(**kw)
-        if obj is not None: return obj
+        if obj is not None:
+            return obj
         try:
             obj = cls(**kw)
             session(obj).insert_now(obj, state(obj))
@@ -865,13 +914,13 @@ class ProjectRole(MappedClass):
             return '*' == self.name[0]
         if self.user_id:
             return True
-        return False # pragma no cover
+        return False  # pragma no cover
 
     @property
     def user(self):
         if (self.user_id is None
-            and self.name
-            and self.name != '*anonymous'):
+                and self.name
+                and self.name != '*anonymous'):
             return None
         return User.query.get(_id=self.user_id)
 
@@ -885,21 +934,21 @@ class ProjectRole(MappedClass):
         return self.query.find({'roles': self._id}).all()
 
     def child_roles(self):
-        to_check = []+self.roles
+        to_check = [] + self.roles
         found_roles = []
         while to_check:
             checking = to_check.pop()
             for role in self.query.find({'_id': checking}).all():
                 if role not in found_roles:
                     found_roles.append(role)
-                    to_check=to_check+role.roles
+                    to_check = to_check + role.roles
         return found_roles
 
     def users_with_role(self, project=None):
         if not project:
             project = c.project
         return self.query.find(dict(project_id=project._id,
-            user_id={'$ne': None}, roles=self._id)).all()
+                                    user_id={'$ne': None}, roles=self._id)).all()
 
 audit_log = collection(
     'audit_log', main_doc_session,
@@ -911,6 +960,7 @@ audit_log = collection(
     Field('url', str),
     Field('message', str))
 
+
 class AuditLog(object):
 
     @property
@@ -941,7 +991,7 @@ class AuditLog(object):
         return cls(project_id=project._id, user_id=user._id, url=url, message=message)
 
 main_orm_session.mapper(AuditLog, audit_log, properties=dict(
-        project_id=ForeignIdProperty('Project'),
-        project=RelationProperty('Project'),
-        user_id=ForeignIdProperty('User'),
-        user=RelationProperty('User')))
+    project_id=ForeignIdProperty('Project'),
+    project=RelationProperty('Project'),
+    user_id=ForeignIdProperty('User'),
+    user=RelationProperty('User')))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/discuss.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/discuss.py b/Allura/allura/model/discuss.py
index efa7b35..4815ceb 100644
--- a/Allura/allura/model/discuss.py
+++ b/Allura/allura/model/discuss.py
@@ -44,6 +44,7 @@ log = logging.getLogger(__name__)
 
 
 class Discussion(Artifact, ActivityObject):
+
     class __mongometa__:
         name = 'discussion'
     type_s = 'Discussion'
@@ -95,9 +96,9 @@ class Discussion(Artifact, ActivityObject):
     @LazyProperty
     def last_post(self):
         q = self.post_class().query.find(dict(
-                discussion_id=self._id,
-                status='ok'
-                ))\
+            discussion_id=self._id,
+            status='ok'
+        ))\
             .sort('timestamp', pymongo.DESCENDING)\
             .limit(1)
         return q.first()
@@ -132,6 +133,7 @@ class Discussion(Artifact, ActivityObject):
 
 
 class Thread(Artifact, ActivityObject):
+
     class __mongometa__:
         name = 'thread'
         indexes = [
@@ -141,7 +143,7 @@ class Thread(Artifact, ActivityObject):
              ('last_post_date', pymongo.DESCENDING),
              ('mod_date', pymongo.DESCENDING)),
             ('discussion_id',),
-            ]
+        ]
     type_s = 'Thread'
 
     _id = FieldProperty(str, if_missing=lambda: h.nonce(8))
@@ -174,7 +176,7 @@ class Thread(Artifact, ActivityObject):
                         attachments=[dict(bytes=attach.length,
                                           url=h.absurl(attach.url())) for attach in p.attachments])
                    for p in self.query_posts(status='ok', style='chronological', limit=limit, page=page)
-                ]
+                   ]
         )
 
     @property
@@ -193,7 +195,8 @@ class Thread(Artifact, ActivityObject):
                 session(thread).flush(thread)
                 return thread
             except DuplicateKeyError as err:
-                log.warning('Got DuplicateKeyError: attempt #%s, trying again. %s', i, err)
+                log.warning(
+                    'Got DuplicateKeyError: attempt #%s, trying again. %s', i, err)
                 if i == 4:
                     raise
                 session(thread).expunge(thread)
@@ -221,9 +224,9 @@ class Thread(Artifact, ActivityObject):
     @property
     def post_count(self):
         return Post.query.find(dict(
-                discussion_id=self.discussion_id,
-                thread_id=self._id,
-                status={'$in': ['ok', 'pending']})).count()
+            discussion_id=self.discussion_id,
+            thread_id=self._id,
+            status={'$in': ['ok', 'pending']})).count()
 
     def primary(self):
         if self.ref is None:
@@ -241,7 +244,8 @@ class Thread(Artifact, ActivityObject):
         if self.app.tool_label.lower() == 'tickets':
             link = p.url_paginated()
         if self.ref:
-            Feed.post(self.primary(), title=p.subject, description=p.text, link=link)
+            Feed.post(self.primary(), title=p.subject,
+                      description=p.text, link=link)
         return p
 
     def is_spam(self, post):
@@ -285,25 +289,25 @@ class Thread(Artifact, ActivityObject):
         ''' Notify moderators that a post needs approval [#2963] '''
         artifact = self.artifact or self
         subject = '[%s:%s] Moderation action required' % (
-                c.project.shortname, c.app.config.options.mount_point)
+            c.project.shortname, c.app.config.options.mount_point)
         author = post.author()
         url = self.discussion_class().query.get(_id=self.discussion_id).url()
         text = ('The following submission requires approval at %s before '
                 'it can be approved for posting:\n\n%s'
                 % (h.absurl(url + 'moderate'), post.text))
         n = Notification(
-                ref_id=artifact.index_id(),
-                topic='message',
-                link=artifact.url(),
-                _id=artifact.url() + post._id,
-                from_address=str(author._id) if author != User.anonymous()
-                                             else None,
-                reply_to_address=u'noreply@in.sf.net',
-                subject=subject,
-                text=text,
-                in_reply_to=post.parent_id,
-                author_id=author._id,
-                pubdate=datetime.utcnow())
+            ref_id=artifact.index_id(),
+            topic='message',
+            link=artifact.url(),
+            _id=artifact.url() + post._id,
+            from_address=str(author._id) if author != User.anonymous()
+            else None,
+            reply_to_address=u'noreply@in.sf.net',
+            subject=subject,
+            text=text,
+            in_reply_to=post.parent_id,
+            author_id=author._id,
+            pubdate=datetime.utcnow())
         users = self.app_config.project.users()
         for u in users:
             if (has_access(self, 'moderate', u)
@@ -318,7 +322,7 @@ class Thread(Artifact, ActivityObject):
     @property
     def last_post(self):
         q = self.post_class().query.find(dict(
-                thread_id=self._id)).sort('timestamp', pymongo.DESCENDING)
+            thread_id=self._id)).sort('timestamp', pymongo.DESCENDING)
         return q.first()
 
     def create_post_threads(self, posts):
@@ -337,10 +341,10 @@ class Thread(Artifact, ActivityObject):
                     timestamp=None, style='threaded', status=None):
         if timestamp:
             terms = dict(discussion_id=self.discussion_id, thread_id=self._id,
-                    status={'$in': ['ok', 'pending']}, timestamp=timestamp)
+                         status={'$in': ['ok', 'pending']}, timestamp=timestamp)
         else:
             terms = dict(discussion_id=self.discussion_id, thread_id=self._id,
-                    status={'$in': ['ok', 'pending']})
+                         status={'$in': ['ok', 'pending']})
         if status:
             terms['status'] = status
         q = self.post_class().query.find(terms)
@@ -371,10 +375,10 @@ class Thread(Artifact, ActivityObject):
     def index(self):
         result = Artifact.index(self)
         result.update(
-           title=self.subject or '(no subject)',
-           name_s=self.subject,
-           views_i=self.num_views,
-           text=self.subject)
+            title=self.subject or '(no subject)',
+            name_s=self.subject,
+            views_i=self.num_views,
+            text=self.subject)
         return result
 
     def _get_subscription(self):
@@ -400,6 +404,7 @@ class Thread(Artifact, ActivityObject):
 
 
 class PostHistory(Snapshot):
+
     class __mongometa__:
         name = 'post_history'
 
@@ -434,11 +439,13 @@ class PostHistory(Snapshot):
 
 
 class Post(Message, VersionedArtifact, ActivityObject):
+
     class __mongometa__:
         name = 'post'
         history_class = PostHistory
         indexes = [
-            ('discussion_id', 'status', 'timestamp'),  # used in general lookups, last_post, etc
+            # used in general lookups, last_post, etc
+            ('discussion_id', 'status', 'timestamp'),
             'thread_id'
         ]
     type_s = 'Post'
@@ -488,10 +495,10 @@ class Post(Message, VersionedArtifact, ActivityObject):
         artifact_access = True
         if self.thread.artifact:
             artifact_access = security.has_access(self.thread.artifact, perm,
-                    user, self.thread.artifact.project)
+                                                  user, self.thread.artifact.project)
 
         return artifact_access and security.has_access(self, perm, user,
-                self.project)
+                                                       self.project)
 
     @property
     def activity_extras(self):
@@ -500,7 +507,7 @@ class Post(Message, VersionedArtifact, ActivityObject):
         # strip all tags, and truncate near the 80 char mark
         LEN = 80
         summary = jinja2.Markup.escape(
-                g.markdown.cached_convert(self, 'text')).striptags()
+            g.markdown.cached_convert(self, 'text')).striptags()
         if len(summary) > LEN:
             split = max(summary.find(' ', LEN), LEN)
             summary = summary[:split] + '...'
@@ -595,6 +602,7 @@ class Post(Message, VersionedArtifact, ActivityObject):
             def find_i(posts):
                 '''Find the index number of this post in the display order'''
                 q = []
+
                 def traverse(posts):
                     for p in posts:
                         if p['post']._id == self._id:
@@ -617,7 +625,6 @@ class Post(Message, VersionedArtifact, ActivityObject):
             return '%s?limit=%s#%s' % (url, limit, slug)
         return '%s?limit=%s&page=%s#%s' % (url, limit, page, slug)
 
-
     def shorthand_id(self):
         if self.thread:
             return '%s#%s' % (self.thread.shorthand_id(), self.slug)
@@ -643,12 +650,13 @@ class Post(Message, VersionedArtifact, ActivityObject):
             return
         self.status = 'ok'
         author = self.author()
-        author_role = ProjectRole.by_user(author, project=self.project, upsert=True)
+        author_role = ProjectRole.by_user(
+            author, project=self.project, upsert=True)
         security.simple_grant(
             self.acl, author_role._id, 'moderate')
         self.commit()
         if (c.app.config.options.get('PostingPolicy') == 'ApproveOnceModerated'
-            and author._id != None):
+                and author._id != None):
             security.simple_grant(
                 self.acl, author_role._id, 'unmoderated_post')
         if notify:
@@ -663,7 +671,7 @@ class Post(Message, VersionedArtifact, ActivityObject):
             artifact.update_stats()
         if self.text:
             g.director.create_activity(author, 'posted', self, target=artifact,
-                    related_nodes=[self.app_config.project])
+                                       related_nodes=[self.app_config.project])
 
     def notify(self, file_info=None, check_dup=False):
         if self.project.notifications_disabled:
@@ -674,7 +682,8 @@ class Post(Message, VersionedArtifact, ActivityObject):
         if not n:
             n = Notification.post(artifact, 'message', post=self,
                                   file_info=file_info)
-        if not n: return
+        if not n:
+            return
         if (hasattr(artifact, "monitoring_email")
                 and artifact.monitoring_email):
             if hasattr(artifact, 'notify_post'):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/filesystem.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/filesystem.py b/Allura/allura/model/filesystem.py
index dbe1aa7..54b7a18 100644
--- a/Allura/allura/model/filesystem.py
+++ b/Allura/allura/model/filesystem.py
@@ -33,24 +33,26 @@ from allura.lib import utils
 
 log = logging.getLogger(__name__)
 
-SUPPORTED_BY_PIL=set([
-        'image/jpg',
-        'image/jpeg',
-        'image/pjpeg',
-        'image/png',
-        'image/x-png',
-        'image/gif'])
+SUPPORTED_BY_PIL = set([
+    'image/jpg',
+    'image/jpeg',
+    'image/pjpeg',
+    'image/png',
+    'image/x-png',
+    'image/gif'])
+
 
 class File(MappedClass):
+
     class __mongometa__:
         session = project_orm_session
         name = 'fs'
-        indexes = [ 'filename' ]
+        indexes = ['filename']
 
     _id = FieldProperty(schema.ObjectId)
     file_id = FieldProperty(schema.ObjectId)
-    filename=FieldProperty(str, if_missing='unknown')
-    content_type=FieldProperty(str)
+    filename = FieldProperty(str, if_missing='unknown')
+    content_type = FieldProperty(str)
 
     def __init__(self, **kw):
         super(File, self).__init__(**kw)
@@ -78,7 +80,8 @@ class File(MappedClass):
         with obj.wfile() as fp_w:
             while True:
                 s = stream.read()
-                if not s: break
+                if not s:
+                    break
                 fp_w.write(s)
         return obj
 
@@ -116,25 +119,25 @@ class File(MappedClass):
 
     @classmethod
     def save_thumbnail(cls, filename, image,
-                   content_type,
-                   thumbnail_size=None,
-                   thumbnail_meta=None,
-                   square=False):
+                       content_type,
+                       thumbnail_size=None,
+                       thumbnail_meta=None,
+                       square=False):
         format = image.format
         height = image.size[0]
         width = image.size[1]
         if square and height != width:
             sz = max(width, height)
             if 'transparency' in image.info:
-                new_image = PIL.Image.new('RGBA', (sz,sz))
+                new_image = PIL.Image.new('RGBA', (sz, sz))
             else:
-                new_image = PIL.Image.new('RGB', (sz,sz), 'white')
+                new_image = PIL.Image.new('RGB', (sz, sz), 'white')
             if height < width:
                 # image is wider than tall, so center horizontally
-                new_image.paste(image, ((width-height)/2, 0))
+                new_image.paste(image, ((width - height) / 2, 0))
             elif height > width:
                 # image is taller than wide, so center vertically
-                new_image.paste(image, (0, (height-width)/2))
+                new_image.paste(image, (0, (height - width) / 2))
             image = new_image
 
         if thumbnail_size:
@@ -145,7 +148,8 @@ class File(MappedClass):
             filename=filename, content_type=content_type, **thumbnail_meta)
         with thumbnail.wfile() as fp_w:
             if 'transparency' in image.info:
-                image.save(fp_w, format, transparency=image.info['transparency'])
+                image.save(fp_w,
+                           format, transparency=image.info['transparency'])
             else:
                 image.save(fp_w, format)
 
@@ -162,7 +166,8 @@ class File(MappedClass):
         if content_type is None:
             content_type = utils.guess_mime_type(filename)
         if not content_type.lower() in SUPPORTED_BY_PIL:
-            log.debug('Content type %s from file %s not supported', content_type, filename)
+            log.debug('Content type %s from file %s not supported',
+                      content_type, filename)
             return None, None
 
         try:
@@ -179,7 +184,8 @@ class File(MappedClass):
             with original.wfile() as fp_w:
                 try:
                     if 'transparency' in image.info:
-                        image.save(fp_w, format, transparency=image.info['transparency'])
+                        image.save(fp_w,
+                                   format, transparency=image.info['transparency'])
                     else:
                         image.save(fp_w, format)
                 except Exception as e:
@@ -189,7 +195,8 @@ class File(MappedClass):
         else:
             original = None
 
-        thumbnail = cls.save_thumbnail(filename, image, content_type, thumbnail_size, thumbnail_meta, square)
+        thumbnail = cls.save_thumbnail(
+            filename, image, content_type, thumbnail_size, thumbnail_meta, square)
 
         return original, thumbnail
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/index.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/index.py b/Allura/allura/model/index.py
index f7e3538..573414b 100644
--- a/Allura/allura/model/index.py
+++ b/Allura/allura/model/index.py
@@ -44,10 +44,10 @@ ArtifactReferenceDoc = collection(
     'artifact_reference', main_doc_session,
     Field('_id', str),
     Field('artifact_reference', dict(
-            cls=S.Binary(),
-            project_id=S.ObjectId(),
-            app_config_id=S.ObjectId(),
-            artifact_id=S.Anything(if_missing=None))),
+        cls=S.Binary(),
+        project_id=S.ObjectId(),
+        app_config_id=S.ObjectId(),
+        artifact_id=S.Anything(if_missing=None))),
     Field('references', [str], index=True),
     Index('artifact_reference.project_id'),  # used in ReindexCommand
 )
@@ -55,22 +55,28 @@ ArtifactReferenceDoc = collection(
 ShortlinkDoc = collection(
     'shortlink', main_doc_session,
     Field('_id', S.ObjectId()),
-    Field('ref_id', str, index=True),  # index needed for from_artifact() and index_tasks.py:del_artifacts
+    # index needed for from_artifact() and index_tasks.py:del_artifacts
+    Field('ref_id', str, index=True),
     Field('project_id', S.ObjectId()),
     Field('app_config_id', S.ObjectId()),
     Field('link', str),
     Field('url', str),
-    Index('project_id', 'link'), # used by from_links()  More helpful to have project_id first, for other queries
+    # used by from_links()  More helpful to have project_id first, for other
+    # queries
+    Index('project_id', 'link'),
 )
 
 # Class definitions
+
+
 class ArtifactReference(object):
 
     @classmethod
     def from_artifact(cls, artifact):
         '''Upsert logic to generate an ArtifactReference object from an artifact'''
         obj = cls.query.get(_id=artifact.index_id())
-        if obj is not None: return obj
+        if obj is not None:
+            return obj
         try:
             obj = cls(
                 _id=artifact.index_id(),
@@ -81,7 +87,7 @@ class ArtifactReference(object):
                     artifact_id=artifact._id))
             session(obj).flush(obj)
             return obj
-        except pymongo.errors.DuplicateKeyError: # pragma no cover
+        except pymongo.errors.DuplicateKeyError:  # pragma no cover
             session(obj).expunge(obj)
             return cls.query.get(_id=artifact.index_id())
 
@@ -97,7 +103,9 @@ class ArtifactReference(object):
             log.exception('Error loading artifact for %s: %r',
                           self._id, aref)
 
+
 class Shortlink(object):
+
     '''Collection mapping shorthand_ids for artifacts to ArtifactReferences'''
 
     # Regexes used to find shortlinks
@@ -107,14 +115,14 @@ class Shortlink(object):
             (?P<artifact_id>.*)             # artifact ID
     \])'''
     re_link_1 = re.compile(r'\s' + _core_re, re.VERBOSE)
-    re_link_2 = re.compile(r'^' +  _core_re, re.VERBOSE)
+    re_link_2 = re.compile(r'^' + _core_re, re.VERBOSE)
 
     def __repr__(self):
         return '<Shortlink %s %s %s -> %s>' % (
-                    self.project_id,
-                    self.app_config_id,
-                    self.link,
-                    self.ref_id)
+            self.project_id,
+            self.app_config_id,
+            self.link,
+            self.ref_id)
 
     @classmethod
     def lookup(cls, link):
@@ -126,11 +134,11 @@ class Shortlink(object):
         if result is None:
             try:
                 result = cls(
-                    ref_id = a.index_id(),
-                    project_id = a.app_config.project_id,
-                    app_config_id = a.app_config._id)
+                    ref_id=a.index_id(),
+                    project_id=a.app_config.project_id,
+                    app_config_id=a.app_config._id)
                 session(result).flush(result)
-            except pymongo.errors.DuplicateKeyError: # pragma no cover
+            except pymongo.errors.DuplicateKeyError:  # pragma no cover
                 session(result).expunge(result)
                 result = cls.query.get(ref_id=a.index_id())
         result.link = a.shorthand_id()
@@ -146,7 +154,8 @@ class Shortlink(object):
         if len(links):
             result = {}
             # Parse all the links
-            parsed_links = dict((link, cls._parse_link(link)) for link in links)
+            parsed_links = dict((link, cls._parse_link(link))
+                                for link in links)
             links_by_artifact = defaultdict(list)
             project_ids = set()
             for link, d in parsed_links.items():
@@ -156,20 +165,20 @@ class Shortlink(object):
                 else:
                     result[link] = parsed_links.pop(link)
             q = cls.query.find(dict(
-                    link={'$in': links_by_artifact.keys()},
-                    project_id={'$in': list(project_ids)}
-                ), validate=False)
+                link={'$in': links_by_artifact.keys()},
+                project_id={'$in': list(project_ids)}
+            ), validate=False)
             matches_by_artifact = dict(
                 (link, list(matches))
-                for link, matches in groupby(q, key=lambda s:unquote(s.link)))
+                for link, matches in groupby(q, key=lambda s: unquote(s.link)))
             for link, d in parsed_links.iteritems():
                 matches = matches_by_artifact.get(unquote(d['artifact']), [])
                 matches = (
                     m for m in matches
                     if m.project.shortname == d['project'] and
-                       m.project.neighborhood_id == d['nbhd'] and
-                       m.app_config is not None and
-                       m.project.app_instance(m.app_config.options.mount_point))
+                    m.project.neighborhood_id == d['nbhd'] and
+                    m.app_config is not None and
+                    m.project.app_instance(m.app_config.options.mount_point))
                 if d['app']:
                     matches = (
                         m for m in matches
@@ -233,9 +242,9 @@ class Shortlink(object):
 # Mapper definitions
 mapper(ArtifactReference, ArtifactReferenceDoc, main_orm_session)
 mapper(Shortlink, ShortlinkDoc, main_orm_session, properties=dict(
-    ref_id = ForeignIdProperty(ArtifactReference),
-    project_id = ForeignIdProperty('Project'),
-    app_config_id = ForeignIdProperty('AppConfig'),
-    project = RelationProperty('Project'),
-    app_config = RelationProperty('AppConfig'),
-    ref = RelationProperty(ArtifactReference)))
+    ref_id=ForeignIdProperty(ArtifactReference),
+    project_id=ForeignIdProperty('Project'),
+    app_config_id=ForeignIdProperty('AppConfig'),
+    project=RelationProperty('Project'),
+    app_config=RelationProperty('AppConfig'),
+    ref=RelationProperty(ArtifactReference)))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/monq_model.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/monq_model.py b/Allura/allura/model/monq_model.py
index c5d3431..d4359f5 100644
--- a/Allura/allura/model/monq_model.py
+++ b/Allura/allura/model/monq_model.py
@@ -39,6 +39,7 @@ log = logging.getLogger(__name__)
 
 
 class MonQTask(MappedClass):
+
     '''Task to be executed by the taskd daemon.
 
     Properties
@@ -60,6 +61,7 @@ class MonQTask(MappedClass):
     '''
     states = ('ready', 'busy', 'error', 'complete', 'skipped')
     result_types = ('keep', 'forget')
+
     class __mongometa__:
         session = task_orm_session
         name = 'monq_task'
@@ -90,12 +92,12 @@ class MonQTask(MappedClass):
     task_name = FieldProperty(str)
     process = FieldProperty(str)
     context = FieldProperty(dict(
-            project_id=S.ObjectId,
-            app_config_id=S.ObjectId,
-            user_id=S.ObjectId,
-            notifications_disabled=bool))
+        project_id=S.ObjectId,
+        app_config_id=S.ObjectId,
+        user_id=S.ObjectId,
+        notifications_disabled=bool))
     args = FieldProperty([])
-    kwargs = FieldProperty({None:None})
+    kwargs = FieldProperty({None: None})
     result = FieldProperty(None, if_missing=None)
 
     def __repr__(self):
@@ -137,8 +139,10 @@ class MonQTask(MappedClass):
              priority=10,
              delay=0):
         '''Create a new task object based on the current context.'''
-        if args is None: args = ()
-        if kwargs is None: kwargs = {}
+        if args is None:
+            args = ()
+        if kwargs is None:
+            kwargs = {}
         task_name = '%s.%s' % (
             function.__module__,
             function.__name__)
@@ -148,12 +152,13 @@ class MonQTask(MappedClass):
             user_id=None,
             notifications_disabled=False)
         if getattr(c, 'project', None):
-            context['project_id']=c.project._id
-            context['notifications_disabled']=c.project.notifications_disabled
+            context['project_id'] = c.project._id
+            context[
+                'notifications_disabled'] = c.project.notifications_disabled
         if getattr(c, 'app', None):
-            context['app_config_id']=c.app.config._id
+            context['app_config_id'] = c.app.config._id
         if getattr(c, 'user', None):
-            context['user_id']=c.user._id
+            context['user_id'] = c.user._id
         obj = cls(
             state='ready',
             priority=priority,
@@ -182,8 +187,8 @@ class MonQTask(MappedClass):
         StopIteration, stop waiting for a task
         '''
         sort = [
-                ('priority', ming.DESCENDING),
-                ('time_queue', ming.ASCENDING)]
+            ('priority', ming.DESCENDING),
+            ('time_queue', ming.ASCENDING)]
         while True:
             try:
                 query = dict(state=state)
@@ -196,10 +201,11 @@ class MonQTask(MappedClass):
                         '$set': dict(
                             state='busy',
                             process=process)
-                        },
+                    },
                     new=True,
                     sort=sort)
-                if obj is not None: return obj
+                if obj is not None:
+                    return obj
             except pymongo.errors.OperationFailure, exc:
                 if 'No matching object found' not in exc.args[0]:
                     raise
@@ -215,7 +221,7 @@ class MonQTask(MappedClass):
         '''Mark all busy tasks older than a certain datetime as 'ready' again.
         Used to retry 'stuck' tasks.'''
         spec = dict(state='busy')
-        spec['time_start'] = {'$lt':older_than}
+        spec['time_start'] = {'$lt': older_than}
         cls.query.update(spec, {'$set': dict(state='ready')}, multi=True)
 
     @classmethod
@@ -227,7 +233,7 @@ class MonQTask(MappedClass):
     @classmethod
     def run_ready(cls, worker=None):
         '''Run all the tasks that are currently ready'''
-        i=0
+        i = 0
         for i, task in enumerate(cls.query.find(dict(state='ready')).all()):
             task.process = worker
             task()
@@ -250,8 +256,10 @@ class MonQTask(MappedClass):
             c.project = M.Project.query.get(_id=self.context.project_id)
             c.app = None
             if c.project:
-                c.project.notifications_disabled = self.context.get('notifications_disabled', False)
-                app_config = M.AppConfig.query.get(_id=self.context.app_config_id)
+                c.project.notifications_disabled = self.context.get(
+                    'notifications_disabled', False)
+                app_config = M.AppConfig.query.get(
+                    _id=self.context.app_config_id)
                 if app_config:
                     c.app = c.project.app_instance(app_config)
             c.user = M.User.query.get(_id=self.context.user_id)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/neighborhood.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/neighborhood.py b/Allura/allura/model/neighborhood.py
index 7fd98e3..e2137e2 100644
--- a/Allura/allura/model/neighborhood.py
+++ b/Allura/allura/model/neighborhood.py
@@ -36,10 +36,12 @@ from .types import MarkdownCache
 
 log = logging.getLogger(__name__)
 
+
 class NeighborhoodFile(File):
+
     class __mongometa__:
         session = main_orm_session
-        indexes = [ 'neighborhood_id' ]
+        indexes = ['neighborhood_id']
 
     neighborhood_id = FieldProperty(S.ObjectId)
 
@@ -51,7 +53,9 @@ re_bgcolor_titlebar = re.compile('background\-color:([^;}]+);')
 re_color_titlebar = re.compile('color:([^;}]+);')
 re_icon_theme = re.compile('neo-icon-set-(ffffff|454545)-256x350.png')
 
+
 class Neighborhood(MappedClass):
+
     '''Provide a grouping of related projects.
 
     url_prefix - location of neighborhood (may include scheme and/or host)
@@ -60,11 +64,12 @@ class Neighborhood(MappedClass):
     class __mongometa__:
         session = main_orm_session
         name = 'neighborhood'
-        unique_indexes = [ 'url_prefix' ]
+        unique_indexes = ['url_prefix']
 
     _id = FieldProperty(S.ObjectId)
     name = FieldProperty(str)
-    url_prefix = FieldProperty(str) # e.g. http://adobe.openforge.com/ or projects/
+    # e.g. http://adobe.openforge.com/ or projects/
+    url_prefix = FieldProperty(str)
     shortname_prefix = FieldProperty(str, if_missing='')
     css = FieldProperty(str, if_missing='')
     homepage = FieldProperty(str, if_missing='')
@@ -107,7 +112,7 @@ class Neighborhood(MappedClass):
         if url.startswith('//'):
             try:
                 return request.scheme + ':' + url
-            except TypeError: # pragma no cover
+            except TypeError:  # pragma no cover
                 return 'http:' + url
         else:
             return url
@@ -154,12 +159,17 @@ class Neighborhood(MappedClass):
         return self.features['max_projects']
 
     def get_css_for_picker(self):
-        projecttitlefont = {'label': 'Project title, font', 'name': 'projecttitlefont', 'value':'', 'type': 'font'}
-        projecttitlecolor = {'label': 'Project title, color', 'name': 'projecttitlecolor', 'value':'', 'type': 'color'}
-        barontop = {'label': 'Bar on top', 'name': 'barontop', 'value': '', 'type': 'color'}
-        titlebarbackground = {'label': 'Title bar, background', 'name': 'titlebarbackground', 'value': '', 'type': 'color'}
-        titlebarcolor = {'label': 'Title bar, foreground', 'name': 'titlebarcolor', 'value': '', 'type': 'color',
-                         'additional': """<label>Icons theme:</label> <select name="css-addopt-icon-theme" class="add_opt">
+        projecttitlefont = {'label': 'Project title, font',
+                            'name': 'projecttitlefont', 'value': '', 'type': 'font'}
+        projecttitlecolor = {'label': 'Project title, color',
+                             'name': 'projecttitlecolor', 'value': '', 'type': 'color'}
+        barontop = {'label': 'Bar on top', 'name':
+                    'barontop', 'value': '', 'type': 'color'}
+        titlebarbackground = {'label': 'Title bar, background',
+                              'name': 'titlebarbackground', 'value': '', 'type': 'color'}
+        titlebarcolor = {
+            'label': 'Title bar, foreground', 'name': 'titlebarcolor', 'value': '', 'type': 'color',
+            'additional': """<label>Icons theme:</label> <select name="css-addopt-icon-theme" class="add_opt">
                         <option value="default">default</option>
                         <option value="dark"%(titlebarcolor_dark)s>dark</option>
                         <option value="white"%(titlebarcolor_white)s>white</option>
@@ -206,8 +216,9 @@ class Neighborhood(MappedClass):
                             elif icon_theme == "454545":
                                 titlebarcolor_white = ' selected="selected"'
 
-        titlebarcolor['additional'] = titlebarcolor['additional'] % {'titlebarcolor_dark': titlebarcolor_dark,
-                                                                     'titlebarcolor_white': titlebarcolor_white}
+        titlebarcolor[
+            'additional'] = titlebarcolor['additional'] % {'titlebarcolor_dark': titlebarcolor_dark,
+                                                           'titlebarcolor_white': titlebarcolor_white}
 
         styles_list = []
         styles_list.append(projecttitlefont)
@@ -226,32 +237,35 @@ class Neighborhood(MappedClass):
 
         css_text = ""
         if 'projecttitlefont' in css_form_dict and css_form_dict['projecttitlefont'] != '':
-           css_text += "/*projecttitlefont*/.project_title{font-family:%s;}\n" % (css_form_dict['projecttitlefont'])
+            css_text += "/*projecttitlefont*/.project_title{font-family:%s;}\n" % (
+                css_form_dict['projecttitlefont'])
 
         if 'projecttitlecolor' in css_form_dict and css_form_dict['projecttitlecolor'] != '':
-           css_text += "/*projecttitlecolor*/.project_title{color:%s;}\n" % (css_form_dict['projecttitlecolor'])
+            css_text += "/*projecttitlecolor*/.project_title{color:%s;}\n" % (
+                css_form_dict['projecttitlecolor'])
 
         if 'barontop' in css_form_dict and css_form_dict['barontop'] != '':
-           css_text += "/*barontop*/.pad h2.colored {background-color:%(bgcolor)s; background-image: none;}\n" % \
-                       {'bgcolor': css_form_dict['barontop']}
+            css_text += "/*barontop*/.pad h2.colored {background-color:%(bgcolor)s; background-image: none;}\n" % \
+                        {'bgcolor': css_form_dict['barontop']}
 
         if 'titlebarbackground' in css_form_dict and css_form_dict['titlebarbackground'] != '':
-           css_text += "/*titlebarbackground*/.pad h2.title{background-color:%(bgcolor)s; background-image: none;}\n" % \
-                       {'bgcolor': css_form_dict['titlebarbackground']}
+            css_text += "/*titlebarbackground*/.pad h2.title{background-color:%(bgcolor)s; background-image: none;}\n" % \
+                        {'bgcolor': css_form_dict['titlebarbackground']}
 
         if 'titlebarcolor' in css_form_dict and css_form_dict['titlebarcolor'] != '':
-           icon_theme = ''
-           if 'addopt-icon-theme' in css_form_dict:
-               if css_form_dict['addopt-icon-theme'] == "dark":
-                  icon_theme = ".pad h2.dark small b.ico {background-image: url('%s%s');}" % (
-                               g.theme_href(''),
-                               'images/neo-icon-set-ffffff-256x350.png')
-               elif css_form_dict['addopt-icon-theme'] == "white":
-                  icon_theme = ".pad h2.dark small b.ico {background-image: url('%s%s');}" % (
-                               g.theme_href(''),
-                               'images/neo-icon-set-454545-256x350.png')
-
-           css_text += "/*titlebarcolor*/.pad h2.title, .pad h2.title small a {color:%s;} %s\n" % (css_form_dict['titlebarcolor'], icon_theme)
+            icon_theme = ''
+            if 'addopt-icon-theme' in css_form_dict:
+                if css_form_dict['addopt-icon-theme'] == "dark":
+                    icon_theme = ".pad h2.dark small b.ico {background-image: url('%s%s');}" % (
+                                 g.theme_href(''),
+                        'images/neo-icon-set-ffffff-256x350.png')
+                elif css_form_dict['addopt-icon-theme'] == "white":
+                    icon_theme = ".pad h2.dark small b.ico {background-image: url('%s%s');}" % (
+                                 g.theme_href(''),
+                        'images/neo-icon-set-454545-256x350.png')
+
+            css_text += "/*titlebarcolor*/.pad h2.title, .pad h2.title small a {color:%s;} %s\n" % (
+                css_form_dict['titlebarcolor'], icon_theme)
 
         return css_text
 
@@ -262,7 +276,8 @@ class Neighborhood(MappedClass):
         if not self.anchored_tools:
             return dict()
         try:
-            anchored_tools = [at.strip() for at in self.anchored_tools.split(',')]
+            anchored_tools = [at.strip()
+                              for at in self.anchored_tools.split(',')]
             return OrderedDict((tool.split(':')[0].lower(), tool.split(':')[1]) for tool in anchored_tools)
         except Exception:
             log.warning("anchored_tools isn't valid", exc_info=True)


[21/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/refreshrepo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/refreshrepo.py b/Allura/allura/scripts/refreshrepo.py
index f55a054..1e0c3ee 100644
--- a/Allura/allura/scripts/refreshrepo.py
+++ b/Allura/allura/scripts/refreshrepo.py
@@ -31,6 +31,7 @@ log = logging.getLogger(__name__)
 
 
 class RefreshRepo(ScriptTask):
+
     @classmethod
     def execute(cls, options):
         q_project = {}
@@ -62,73 +63,92 @@ class RefreshRepo(ScriptTask):
                         continue
                     if c.app.repo.tool.lower() not in options.repo_types:
                         log.info("Skipping %r: wrong type (%s)", c.app.repo,
-                                c.app.repo.tool.lower())
+                                 c.app.repo.tool.lower())
                         continue
 
                     if options.clean:
                         ci_ids = list(c.app.repo.all_commit_ids())
-                        log.info("Deleting mongo data for %i commits...", len(ci_ids))
+                        log.info("Deleting mongo data for %i commits...",
+                                 len(ci_ids))
                         # like the tree_ids themselves below, we need to process these in
                         # chunks to avoid hitting the BSON max size limit
                         tree_ids = []
                         for ci_ids_chunk in chunked_list(ci_ids, 3000):
                             tree_ids.extend([
-                                    tree_id for doc in
-                                    M.repo.TreesDoc.m.find({"_id": {"$in": ci_ids_chunk}},
-                                                           {"tree_ids": 1})
-                                    for tree_id in doc.get("tree_ids", [])])
-
-                            i = M.repo.CommitDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                                tree_id for doc in
+                                M.repo.TreesDoc.m.find(
+                                    {"_id": {"$in": ci_ids_chunk}},
+                                    {"tree_ids": 1})
+                                for tree_id in doc.get("tree_ids", [])])
+
+                            i = M.repo.CommitDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i CommitDoc docs...", i)
-                                M.repo.CommitDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.CommitDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
                         # delete these in chunks, otherwise the query doc can
                         # exceed the max BSON size limit (16MB at the moment)
                         for tree_ids_chunk in chunked_list(tree_ids, 300000):
-                            i = M.repo.TreeDoc.m.find({"_id": {"$in": tree_ids_chunk}}).count()
+                            i = M.repo.TreeDoc.m.find(
+                                {"_id": {"$in": tree_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i TreeDoc docs...", i)
-                                M.repo.TreeDoc.m.remove({"_id": {"$in": tree_ids_chunk}})
+                                M.repo.TreeDoc.m.remove(
+                                    {"_id": {"$in": tree_ids_chunk}})
                         del tree_ids
 
                         # delete these after TreeDoc and LastCommitDoc so that if
                         # we crash, we don't lose the ability to delete those
                         for ci_ids_chunk in chunked_list(ci_ids, 3000):
                             # delete TreesDocs
-                            i = M.repo.TreesDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.TreesDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i TreesDoc docs...", i)
-                                M.repo.TreesDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.TreesDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
                             # delete LastCommitDocs
-                            i = M.repo.LastCommitDoc.m.find(dict(commit_ids={'$in': ci_ids_chunk})).count()
+                            i = M.repo.LastCommitDoc.m.find(
+                                dict(commit_ids={'$in': ci_ids_chunk})).count()
                             if i:
-                                log.info("Deleting %i remaining LastCommitDoc docs, by repo id...", i)
-                                M.repo.LastCommitDoc.m.remove(dict(commit_ids={'$in': ci_ids_chunk}))
+                                log.info(
+                                    "Deleting %i remaining LastCommitDoc docs, by repo id...", i)
+                                M.repo.LastCommitDoc.m.remove(
+                                    dict(commit_ids={'$in': ci_ids_chunk}))
 
-                            i = M.repo.DiffInfoDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.DiffInfoDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i DiffInfoDoc docs...", i)
-                                M.repo.DiffInfoDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.DiffInfoDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
-                            i = M.repo.CommitRunDoc.m.find({"commit_ids": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.CommitRunDoc.m.find(
+                                {"commit_ids": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i CommitRunDoc docs...", i)
-                                M.repo.CommitRunDoc.m.remove({"commit_ids": {"$in": ci_ids_chunk}})
+                                M.repo.CommitRunDoc.m.remove(
+                                    {"commit_ids": {"$in": ci_ids_chunk}})
                         del ci_ids
 
                     try:
                         if options.all:
-                            log.info('Refreshing ALL commits in %r', c.app.repo)
+                            log.info('Refreshing ALL commits in %r',
+                                     c.app.repo)
                         else:
-                            log.info('Refreshing NEW commits in %r', c.app.repo)
+                            log.info('Refreshing NEW commits in %r',
+                                     c.app.repo)
                         if options.profile:
                             import cProfile
-                            cProfile.runctx('c.app.repo.refresh(options.all, notify=options.notify)',
-                                    globals(), locals(), 'refresh.profile')
+                            cProfile.runctx(
+                                'c.app.repo.refresh(options.all, notify=options.notify)',
+                                globals(), locals(), 'refresh.profile')
                         else:
-                            c.app.repo.refresh(options.all, notify=options.notify)
+                            c.app.repo.refresh(
+                                options.all, notify=options.notify)
                     except:
                         log.exception('Error refreshing %r', c.app.repo)
             ThreadLocalORMSession.flush_all()
@@ -141,41 +161,44 @@ class RefreshRepo(ScriptTask):
                 repo_type = repo_type.strip()
                 if repo_type not in ['svn', 'git', 'hg']:
                     raise argparse.ArgumentTypeError(
-                            '{} is not a valid repo type.'.format(repo_type))
+                        '{} is not a valid repo type.'.format(repo_type))
                 repo_types.append(repo_type)
             return repo_types
 
         parser = argparse.ArgumentParser(description='Scan repos on filesytem and '
-                'update repo metadata in MongoDB. Run for all repos (no args), '
-                'or restrict by neighborhood, project, or code tool mount point.')
+                                         'update repo metadata in MongoDB. Run for all repos (no args), '
+                                         'or restrict by neighborhood, project, or code tool mount point.')
         parser.add_argument('--nbhd', action='store', default='', dest='nbhd',
-                help='Restrict update to a particular neighborhood, e.g. /p/.')
-        parser.add_argument('--project', action='store', default='', dest='project',
-                help='Restrict update to a particular project. To specify a '
-                'subproject, use a slash: project/subproject.')
+                            help='Restrict update to a particular neighborhood, e.g. /p/.')
+        parser.add_argument(
+            '--project', action='store', default='', dest='project',
+            help='Restrict update to a particular project. To specify a '
+            'subproject, use a slash: project/subproject.')
         parser.add_argument('--project-regex', action='store', default='',
-                dest='project_regex',
-                help='Restrict update to projects for which the shortname matches '
-                'the provided regex.')
-        parser.add_argument('--repo-types', action='store', type=repo_type_list,
-                default=['svn', 'git', 'hg'], dest='repo_types',
-                help='Only refresh repos of the given type(s). Defaults to: '
-                'svn,git,hg. Example: --repo-types=git,hg')
+                            dest='project_regex',
+                            help='Restrict update to projects for which the shortname matches '
+                            'the provided regex.')
+        parser.add_argument(
+            '--repo-types', action='store', type=repo_type_list,
+            default=['svn', 'git', 'hg'], dest='repo_types',
+            help='Only refresh repos of the given type(s). Defaults to: '
+            'svn,git,hg. Example: --repo-types=git,hg')
         parser.add_argument('--mount-point', default='', dest='mount_point',
-                help='Restrict update to repos at the given tool mount point. ')
+                            help='Restrict update to repos at the given tool mount point. ')
         parser.add_argument('--clean', action='store_true', dest='clean',
-                default=False, help='Remove repo-related mongo docs (for '
-                'project(s) being refreshed only) before doing the refresh.')
-        parser.add_argument('--all', action='store_true', dest='all', default=False,
-                help='Refresh all commits (not just the ones that are new).')
+                            default=False, help='Remove repo-related mongo docs (for '
+                            'project(s) being refreshed only) before doing the refresh.')
+        parser.add_argument(
+            '--all', action='store_true', dest='all', default=False,
+            help='Refresh all commits (not just the ones that are new).')
         parser.add_argument('--notify', action='store_true', dest='notify',
-                default=False, help='Send email notifications of new commits.')
+                            default=False, help='Send email notifications of new commits.')
         parser.add_argument('--dry-run', action='store_true', dest='dry_run',
-                default=False, help='Log names of projects that would have their '
-                'repos refreshed, but do not perform the actual refresh.')
+                            default=False, help='Log names of projects that would have their '
+                            'repos refreshed, but do not perform the actual refresh.')
         parser.add_argument('--profile', action='store_true', dest='profile',
-                default=False, help='Enable the profiler (slow). Will log '
-                'profiling output to ./refresh.profile')
+                            default=False, help='Enable the profiler (slow). Will log '
+                            'profiling output to ./refresh.profile')
         return parser
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/scripttask.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/scripttask.py b/Allura/allura/scripts/scripttask.py
index 229bbe6..0b9d2be 100644
--- a/Allura/allura/scripts/scripttask.py
+++ b/Allura/allura/scripts/scripttask.py
@@ -55,14 +55,16 @@ from allura.lib.decorators import task
 log = logging.getLogger(__name__)
 
 
-
 class ScriptTask(object):
+
     """Base class for a command-line script that is also executable as a task."""
 
     class __metaclass__(type):
+
         @property
         def __doc__(cls):
             return cls.parser().format_help()
+
         def __new__(meta, classname, bases, classDict):
             return task(type.__new__(meta, classname, bases, classDict))
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/trac_export.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/trac_export.py b/Allura/allura/scripts/trac_export.py
index 8be9fe0..f5cd7c3 100644
--- a/Allura/allura/scripts/trac_export.py
+++ b/Allura/allura/scripts/trac_export.py
@@ -49,12 +49,18 @@ def parse_options():
     optparser = OptionParser(usage=''' %prog <Trac URL>
 
 Export ticket data from a Trac instance''')
-    optparser.add_option('-o', '--out-file', dest='out_filename', help='Write to file (default stdout)')
-    optparser.add_option('--no-attachments', dest='do_attachments', action='store_false', default=True, help='Export attachment info')
-    optparser.add_option('--only-tickets', dest='only_tickets', action='store_true', help='Export only ticket list')
-    optparser.add_option('--start', dest='start_id', type='int', default=1, help='Start with given ticket numer (or next accessible)')
-    optparser.add_option('--limit', dest='limit', type='int', default=None, help='Limit number of tickets')
-    optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose operation')
+    optparser.add_option('-o', '--out-file', dest='out_filename',
+                         help='Write to file (default stdout)')
+    optparser.add_option('--no-attachments', dest='do_attachments',
+                         action='store_false', default=True, help='Export attachment info')
+    optparser.add_option('--only-tickets', dest='only_tickets',
+                         action='store_true', help='Export only ticket list')
+    optparser.add_option('--start', dest='start_id', type='int', default=1,
+                         help='Start with given ticket numer (or next accessible)')
+    optparser.add_option('--limit', dest='limit', type='int',
+                         default=None, help='Limit number of tickets')
+    optparser.add_option('-v', '--verbose', dest='verbose',
+                         action='store_true', help='Verbose operation')
     options, args = optparser.parse_args()
     if len(args) != 1:
         optparser.error("Wrong number of arguments.")
@@ -65,8 +71,9 @@ class TracExport(object):
 
     PAGE_SIZE = 100
     TICKET_URL = 'ticket/%d'
-    QUERY_MAX_ID_URL  = 'query?col=id&order=id&desc=1&max=2'
-    QUERY_BY_PAGE_URL = 'query?col=id&col=time&col=changetime&order=id&max=' + str(PAGE_SIZE)+ '&page=%d'
+    QUERY_MAX_ID_URL = 'query?col=id&order=id&desc=1&max=2'
+    QUERY_BY_PAGE_URL = 'query?col=id&col=time&col=changetime&order=id&max=' + \
+        str(PAGE_SIZE) + '&page=%d'
     ATTACHMENT_LIST_URL = 'attachment/ticket/%d/'
     ATTACHMENT_URL = 'raw-attachment/ticket/%d/%s'
 
@@ -108,7 +115,7 @@ class TracExport(object):
         if type is None:
             return url
         glue = '&' if '?' in suburl else '?'
-        return  url + glue + 'format=' + type
+        return url + glue + 'format=' + type
 
     def log_url(self, url):
         log.info(url)
@@ -134,7 +141,8 @@ class TracExport(object):
         # telling that access denied. So, we'll emulate 403 ourselves.
         # TODO: currently, any non-csv result treated as 403.
         if not f.info()['Content-Type'].startswith('text/csv'):
-            raise urllib2.HTTPError(url, 403, 'Forbidden - emulated', f.info(), f)
+            raise urllib2.HTTPError(
+                url, 403, 'Forbidden - emulated', f.info(), f)
         return f
 
     def parse_ticket(self, id):
@@ -154,12 +162,15 @@ class TracExport(object):
         d = BeautifulSoup(urlopen(url))
         self.clean_missing_wiki_links(d)
         desc = d.find('div', 'description').find('div', 'searchable')
-        ticket['description'] = html2text.html2text(desc.renderContents('utf8').decode('utf8')) if desc else ''
+        ticket['description'] = html2text.html2text(
+            desc.renderContents('utf8').decode('utf8')) if desc else ''
         comments = []
         for comment in d.findAll('form', action='#comment'):
             c = {}
-            c['submitter'] = re.sub(r'.* by ', '', comment.find('h3', 'change').text).strip()
-            c['date'] = self.trac2z_date(comment.find('a', 'timeline')['title'].replace(' in Timeline', ''))
+            c['submitter'] = re.sub(
+                r'.* by ', '', comment.find('h3', 'change').text).strip()
+            c['date'] = self.trac2z_date(
+                comment.find('a', 'timeline')['title'].replace(' in Timeline', ''))
             changes = unicode(comment.find('ul', 'changes') or '')
             body = comment.find('div', 'comment')
             body = body.renderContents('utf8').decode('utf8') if body else ''
@@ -189,18 +200,22 @@ class TracExport(object):
             size_s = attach.span['title']
             d['size'] = int(self.match_pattern(SIZE_PATTERN, size_s))
             timestamp_s = attach.find('a', {'class': 'timeline'})['title']
-            d['date'] = self.trac2z_date(self.match_pattern(TIMESTAMP_PATTERN, timestamp_s))
-            d['by'] = attach.find(text=re.compile('added by')).nextSibling.renderContents()
+            d['date'] = self.trac2z_date(
+                self.match_pattern(TIMESTAMP_PATTERN, timestamp_s))
+            d['by'] = attach.find(
+                text=re.compile('added by')).nextSibling.renderContents()
             d['description'] = ''
             # Skip whitespace
             while attach.nextSibling and type(attach.nextSibling) is NavigableString:
                 attach = attach.nextSibling
-            # if there's a description, there will be a <dd> element, other immediately next <dt>
+            # if there's a description, there will be a <dd> element, other
+            # immediately next <dt>
             if attach.nextSibling and attach.nextSibling.name == 'dd':
                 desc_el = attach.nextSibling
                 if desc_el:
                     # TODO: Convert to Allura link syntax as needed
-                    d['description'] = ''.join(desc_el.findAll(text=True)).strip()
+                    d['description'] = ''.join(
+                        desc_el.findAll(text=True)).strip()
             list.append(d)
         return list
 
@@ -245,7 +260,8 @@ class TracExport(object):
         for r in reader:
             if r and r[0].isdigit():
                 id = int(r[0])
-                extra = {'date': self.trac2z_date(r[1]), 'date_updated': self.trac2z_date(r[2])}
+                extra = {'date': self.trac2z_date(
+                    r[1]), 'date_updated': self.trac2z_date(r[2])}
                 res.append((id, extra))
         self.page += 1
 
@@ -276,6 +292,7 @@ class TracExport(object):
 
 
 class DateJSONEncoder(json.JSONEncoder):
+
     def default(self, obj):
         if isinstance(obj, time.struct_time):
             return time.strftime('%Y-%m-%dT%H:%M:%SZ', obj)
@@ -283,9 +300,9 @@ class DateJSONEncoder(json.JSONEncoder):
 
 
 def export(url, start_id=1, verbose=False, do_attachments=True,
-        only_tickets=False, limit=None):
+           only_tickets=False, limit=None):
     ex = TracExport(url, start_id=start_id,
-            verbose=verbose, do_attachments=do_attachments)
+                    verbose=verbose, do_attachments=do_attachments)
 
     doc = [t for t in islice(ex, limit)]
 
@@ -304,7 +321,8 @@ def main():
     out_file = sys.stdout
     if options.out_filename:
         out_file = open(options.out_filename, 'w')
-    out_file.write(json.dumps(doc, cls=DateJSONEncoder, indent=2, sort_keys=True))
+    out_file.write(
+        json.dumps(doc, cls=DateJSONEncoder, indent=2, sort_keys=True))
     # It's bad habit not to terminate lines
     out_file.write('\n')
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/update_checkout_url.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/update_checkout_url.py b/Allura/allura/scripts/update_checkout_url.py
index 7420dab..a1fc982 100644
--- a/Allura/allura/scripts/update_checkout_url.py
+++ b/Allura/allura/scripts/update_checkout_url.py
@@ -34,6 +34,7 @@ log = logging.getLogger(__name__)
 
 
 class UpdateCheckoutUrl(ScriptTask):
+
     @classmethod
     def execute(cls, options):
         query = {'tool_name': {'$regex': '^svn$', '$options': 'i'},
@@ -41,7 +42,8 @@ class UpdateCheckoutUrl(ScriptTask):
         for chunk in utils.chunked_find(M.AppConfig, query):
             for config in chunk:
                 repo = Repository.query.get(app_config_id=config._id)
-                trunk_path = "file://{0}{1}/trunk".format(repo.fs_path, repo.name)
+                trunk_path = "file://{0}{1}/trunk".format(repo.fs_path,
+                                                          repo.name)
                 if svn_path_exists(trunk_path):
                     config.options['checkout_url'] = "trunk"
                     log.info("Update checkout_url for: %s", trunk_path)
@@ -49,4 +51,3 @@ class UpdateCheckoutUrl(ScriptTask):
 
 if __name__ == '__main__':
     UpdateCheckoutUrl.main()
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/admin_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/admin_tasks.py b/Allura/allura/tasks/admin_tasks.py
index 44ed421..65f2775 100644
--- a/Allura/allura/tasks/admin_tasks.py
+++ b/Allura/allura/tasks/admin_tasks.py
@@ -35,5 +35,5 @@ install_app.__doc__ += '''
     Arguments::
 
         ''' + inspect.formatargspec(*inspect.getargspec(
-        M.Project.install_app
-    )).replace('self, ','')
+    M.Project.install_app
+)).replace('self, ', '')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/event_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/event_tasks.py b/Allura/allura/tasks/event_tasks.py
index 0f27266..8a2de35 100644
--- a/Allura/allura/tasks/event_tasks.py
+++ b/Allura/allura/tasks/event_tasks.py
@@ -20,6 +20,7 @@ import sys
 from allura.lib.decorators import task, event_handler
 from allura.lib.exceptions import CompoundError
 
+
 @task
 def event(event_type, *args, **kwargs):
     exceptions = []
@@ -33,4 +34,3 @@ def event(event_type, *args, **kwargs):
             raise exceptions[0][0], exceptions[0][1], exceptions[0][2]
         else:
             raise CompoundError(*exceptions)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/export_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/export_tasks.py b/Allura/allura/tasks/export_tasks.py
index 3fd9499..cafb288 100644
--- a/Allura/allura/tasks/export_tasks.py
+++ b/Allura/allura/tasks/export_tasks.py
@@ -48,9 +48,11 @@ def bulk_export(tools, filename=None, send_email=True):
 
 
 class BulkExport(object):
+
     def process(self, project, tools, user, filename=None, send_email=True):
         export_filename = filename or project.bulk_export_filename()
-        export_path = self.get_export_path(project.bulk_export_path(), export_filename)
+        export_path = self.get_export_path(
+            project.bulk_export_path(), export_filename)
         if not os.path.exists(export_path):
             os.makedirs(export_path)
         apps = [project.app_instance(tool) for tool in tools]
@@ -58,7 +60,8 @@ class BulkExport(object):
         results = [self.export(export_path, app) for app in exportable]
         exported = self.filter_successful(results)
         if exported:
-            zipdir(export_path, os.path.join(os.path.dirname(export_path), export_filename))
+            zipdir(export_path,
+                   os.path.join(os.path.dirname(export_path), export_filename))
         shutil.rmtree(export_path)
 
         if not user:
@@ -67,13 +70,14 @@ class BulkExport(object):
         if not send_email:
             return
 
-        tmpl = g.jinja2_env.get_template('allura:templates/mail/bulk_export.html')
+        tmpl = g.jinja2_env.get_template(
+            'allura:templates/mail/bulk_export.html')
         instructions = tg.config.get('bulk_export_download_instructions', '')
         instructions = instructions.format(
-                project=project.shortname,
-                filename=export_filename,
-                c=c,
-            )
+            project=project.shortname,
+            filename=export_filename,
+            c=c,
+        )
         exported_names = [a.config.options.mount_point for a in exported]
         tmpl_context = {
             'instructions': instructions,
@@ -110,7 +114,8 @@ class BulkExport(object):
             with open(json_file, 'w') as f:
                 app.bulk_export(f)
         except Exception as e:
-            log.error('Error exporting: %s on %s', tool, app.project.shortname, exc_info=True)
+            log.error('Error exporting: %s on %s', tool,
+                      app.project.shortname, exc_info=True)
             return None
         else:
             return app

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/index_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/index_tasks.py b/Allura/allura/tasks/index_tasks.py
index 0833bbb..7b6b4c6 100644
--- a/Allura/allura/tasks/index_tasks.py
+++ b/Allura/allura/tasks/index_tasks.py
@@ -27,6 +27,7 @@ from allura.lib.solr import make_solr_from_config
 
 log = logging.getLogger(__name__)
 
+
 @task
 def add_artifacts(ref_ids, update_solr=True, update_refs=True, solr_hosts=None):
     '''
@@ -69,19 +70,23 @@ def add_artifacts(ref_ids, update_solr=True, update_refs=True, solr_hosts=None):
     if exceptions:
         raise CompoundError(*exceptions)
 
+
 @task
 def del_artifacts(ref_ids):
     from allura import model as M
-    if not ref_ids: return
+    if not ref_ids:
+        return
     solr_query = 'id:({0})'.format(' || '.join(ref_ids))
     g.solr.delete(q=solr_query)
-    M.ArtifactReference.query.remove(dict(_id={'$in':ref_ids}))
-    M.Shortlink.query.remove(dict(ref_id={'$in':ref_ids}))
+    M.ArtifactReference.query.remove(dict(_id={'$in': ref_ids}))
+    M.Shortlink.query.remove(dict(ref_id={'$in': ref_ids}))
+
 
 @task
 def commit():
     g.solr.commit()
 
+
 @contextmanager
 def _indexing_disabled(session):
     session.disable_artifact_index = session.skip_mod_date = True

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/mail_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/mail_tasks.py b/Allura/allura/tasks/mail_tasks.py
index 0b87024..31c16f0 100644
--- a/Allura/allura/tasks/mail_tasks.py
+++ b/Allura/allura/tasks/mail_tasks.py
@@ -30,9 +30,10 @@ log = logging.getLogger(__name__)
 
 smtp_client = mail_util.SMTPClient()
 
+
 @task
 def route_email(
-    peer, mailfrom, rcpttos, data):
+        peer, mailfrom, rcpttos, data):
     '''Route messages according to their destination:
 
     <topic>@<mount_point>.<subproj2>.<subproj1>.<project>.projects.sourceforge.net
@@ -40,7 +41,7 @@ def route_email(
     '''
     try:
         msg = mail_util.parse_message(data)
-    except: # pragma no cover
+    except:  # pragma no cover
         log.exception('Parse Error: (%r,%r,%r)', peer, mailfrom, rcpttos)
         return
     if mail_util.is_autoreply(msg):
@@ -49,18 +50,21 @@ def route_email(
     mail_user = mail_util.identify_sender(peer, mailfrom, msg['headers'], msg)
     with h.push_config(c, user=mail_user):
         log.info('Received email from %s', c.user.username)
-        # For each of the addrs, determine the project/app and route appropriately
+        # For each of the addrs, determine the project/app and route
+        # appropriately
         for addr in rcpttos:
             try:
                 userpart, project, app = mail_util.parse_address(addr)
                 with h.push_config(c, project=project, app=app):
                     if not app.has_access(c.user, userpart):
-                        log.info('Access denied for %s to mailbox %s', c.user, userpart)
+                        log.info('Access denied for %s to mailbox %s',
+                                 c.user, userpart)
                     else:
                         if msg['multipart']:
                             msg_hdrs = msg['headers']
                             for part in msg['parts']:
-                                if part.get('content_type', '').startswith('multipart/'): continue
+                                if part.get('content_type', '').startswith('multipart/'):
+                                    continue
                                 msg = dict(
                                     headers=dict(msg_hdrs, **part['headers']),
                                     message_id=part['message_id'],
@@ -77,6 +81,7 @@ def route_email(
             except:
                 log.exception('Error routing mail to %s', addr)
 
+
 @task
 def sendmail(fromaddr, destinations, text, reply_to, subject,
              message_id, in_reply_to=None, sender=None, references=None):
@@ -110,11 +115,13 @@ def sendmail(fromaddr, destinations, text, reply_to, subject,
             addr = user.email_address_header()
             if not addr and user.email_addresses:
                 addr = user.email_addresses[0]
-                log.warning('User %s has not set primary email address, using %s',
-                            user._id, addr)
+                log.warning(
+                    'User %s has not set primary email address, using %s',
+                    user._id, addr)
             if not addr:
-                log.error("User %s (%s) has not set any email address, can't deliver",
-                          user._id, user.username)
+                log.error(
+                    "User %s (%s) has not set any email address, can't deliver",
+                    user._id, user.username)
                 continue
             if user.get_pref('email_format') == 'plain':
                 addrs_plain.append(addr)
@@ -137,18 +144,19 @@ def sendmail(fromaddr, destinations, text, reply_to, subject,
         addrs_html, fromaddr, reply_to, subject, message_id,
         in_reply_to, html_msg, sender=sender, references=references)
 
+
 @task
 def sendsimplemail(
-    fromaddr,
-    toaddr,
-    text,
-    reply_to,
-    subject,
-    message_id,
-    in_reply_to=None,
-    sender=None,
-    references=None,
-    cc=None):
+        fromaddr,
+        toaddr,
+        text,
+        reply_to,
+        subject,
+        message_id,
+        in_reply_to=None,
+        sender=None,
+        references=None,
+        cc=None):
     from allura import model as M
     if fromaddr is None:
         fromaddr = u'noreply@in.sf.net'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/notification_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/notification_tasks.py b/Allura/allura/tasks/notification_tasks.py
index 888e0f2..a6b7564 100644
--- a/Allura/allura/tasks/notification_tasks.py
+++ b/Allura/allura/tasks/notification_tasks.py
@@ -17,6 +17,7 @@
 
 from allura.lib.decorators import task
 
+
 @task
 def notify(n_id, ref_id, topic):
     from allura import model as M

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/repo_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/repo_tasks.py b/Allura/allura/tasks/repo_tasks.py
index 978f027..df84337 100644
--- a/Allura/allura/tasks/repo_tasks.py
+++ b/Allura/allura/tasks/repo_tasks.py
@@ -24,6 +24,7 @@ from pylons import tmpl_context as c, app_globals as g
 from allura.lib.decorators import task
 from allura.lib.repository import RepositoryApp
 
+
 @task
 def init(**kwargs):
     from allura import model as M
@@ -33,6 +34,7 @@ def init(**kwargs):
         text='Repository %s/%s created' % (
             c.project.shortname, c.app.config.options.mount_point))
 
+
 @task
 def clone(cloned_from_path, cloned_from_name, cloned_from_url):
     from allura import model as M
@@ -46,7 +48,9 @@ def clone(cloned_from_path, cloned_from_name, cloned_from_url):
             text='Repository %s/%s created' % (
                 c.project.shortname, c.app.config.options.mount_point))
     except Exception, e:
-        g.post_event('repo_clone_task_failed', cloned_from_url, cloned_from_path, traceback.format_exc())
+        g.post_event('repo_clone_task_failed', cloned_from_url,
+                     cloned_from_path, traceback.format_exc())
+
 
 @task
 def reclone(*args, **kwargs):
@@ -58,14 +62,15 @@ def reclone(*args, **kwargs):
         repo.delete()
     ThreadLocalORMSession.flush_all()
     M.MergeRequest.query.remove(dict(
-            app_config_id=c.app.config._id))
+        app_config_id=c.app.config._id))
     clone(*args, **kwargs)
 
+
 @task
 def refresh(**kwargs):
     from allura import model as M
     log = logging.getLogger(__name__)
-    #don't create multiple refresh tasks
+    # don't create multiple refresh tasks
     q = {
         'task_name': 'allura.tasks.repo_tasks.refresh',
         'state': {'$in': ['busy', 'ready']},
@@ -73,16 +78,18 @@ def refresh(**kwargs):
         'context.project_id': c.project._id,
     }
     refresh_tasks_count = M.MonQTask.query.find(q).count()
-    if refresh_tasks_count <= 1: #only this task
+    if refresh_tasks_count <= 1:  # only this task
         c.app.repo.refresh()
-        #checking if we have new commits arrived
-        #during refresh and re-queue task if so
+        # checking if we have new commits arrived
+        # during refresh and re-queue task if so
         new_commit_ids = c.app.repo.unknown_commit_ids()
         if len(new_commit_ids) > 0:
             refresh.post()
             log.info('New refresh task is queued due to new commit(s).')
     else:
-        log.info('Refresh task for %s:%s skipped due to backlog', c.project.shortname, c.app.config.options.mount_point)
+        log.info('Refresh task for %s:%s skipped due to backlog',
+                 c.project.shortname, c.app.config.options.mount_point)
+
 
 @task
 def uninstall(**kwargs):
@@ -92,22 +99,25 @@ def uninstall(**kwargs):
         shutil.rmtree(repo.full_fs_path, ignore_errors=True)
         repo.delete()
     M.MergeRequest.query.remove(dict(
-            app_config_id=c.app.config._id))
+        app_config_id=c.app.config._id))
     super(RepositoryApp, c.app).uninstall(c.project)
     from ming.orm import ThreadLocalORMSession
     ThreadLocalORMSession.flush_all()
 
+
 @task
 def nop():
     log = logging.getLogger(__name__)
     log.info('nop')
 
+
 @task
 def reclone_repo(*args, **kwargs):
     from allura import model as M
     try:
         nbhd = M.Neighborhood.query.get(url_prefix='/%s/' % kwargs['prefix'])
-        c.project = M.Project.query.get(shortname=kwargs['shortname'], neighborhood_id=nbhd._id)
+        c.project = M.Project.query.get(
+            shortname=kwargs['shortname'], neighborhood_id=nbhd._id)
         c.app = c.project.app_instance(kwargs['mount_point'])
         source_url = c.app.config.options.get('init_from_url')
         source_path = c.app.config.options.get('init_from_path')
@@ -117,7 +127,9 @@ def reclone_repo(*args, **kwargs):
             text='Repository %s/%s created' % (
                 c.project.shortname, c.app.config.options.mount_point))
     except Exception, e:
-        g.post_event('repo_clone_task_failed', source_url, source_path, traceback.format_exc())
+        g.post_event('repo_clone_task_failed', source_url,
+                     source_path, traceback.format_exc())
+
 
 @task
 def tarball(revision, path):
@@ -126,13 +138,18 @@ def tarball(revision, path):
         repo = c.app.repo
         status = repo.get_tarball_status(revision, path)
         if status == 'complete':
-            log.info('Skipping snapshot for repository: %s:%s rev %s because it is already %s' %
-                     (c.project.shortname, c.app.config.options.mount_point, revision, status))
+            log.info(
+                'Skipping snapshot for repository: %s:%s rev %s because it is already %s' %
+                (c.project.shortname, c.app.config.options.mount_point, revision, status))
         else:
             try:
                 repo.tarball(revision, path)
             except:
-                log.error('Could not create snapshot for repository: %s:%s revision %s path %s' % (c.project.shortname, c.app.config.options.mount_point, revision, path), exc_info=True)
+                log.error(
+                    'Could not create snapshot for repository: %s:%s revision %s path %s' %
+                    (c.project.shortname, c.app.config.options.mount_point, revision, path), exc_info=True)
                 raise
     else:
-        log.warn('Skipped creation of snapshot: %s:%s because revision is not specified' % (c.project.shortname, c.app.config.options.mount_point))
+        log.warn(
+            'Skipped creation of snapshot: %s:%s because revision is not specified' %
+            (c.project.shortname, c.app.config.options.mount_point))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/__init__.py b/Allura/allura/tests/__init__.py
index c715a48..8047c6b 100644
--- a/Allura/allura/tests/__init__.py
+++ b/Allura/allura/tests/__init__.py
@@ -26,7 +26,9 @@ import alluratest.controller
 import socket
 socket.setdefaulttimeout(None)
 
+
 class TestController(alluratest.controller.TestController):
+
     """
     Base functional test case for the controllers.
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/decorators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/decorators.py b/Allura/allura/tests/decorators.py
index 3e1c43d..521c76c 100644
--- a/Allura/allura/tests/decorators.py
+++ b/Allura/allura/tests/decorators.py
@@ -53,8 +53,8 @@ def NullContextManager():
 
 
 def with_tool(project_shortname, ep_name, mount_point=None, mount_label=None,
-        ordinal=None, post_install_hook=None, username='test-admin',
-        **override_options):
+              ordinal=None, post_install_hook=None, username='test-admin',
+              **override_options):
     def _with_tool(func):
         @wraps(func)
         def wrapped(*args, **kw):
@@ -62,7 +62,8 @@ def with_tool(project_shortname, ep_name, mount_point=None, mount_label=None,
             p = M.Project.query.get(shortname=project_shortname)
             c.project = p
             if mount_point and not p.app_instance(mount_point):
-                c.app = p.install_app(ep_name, mount_point, mount_label, ordinal, **override_options)
+                c.app = p.install_app(
+                    ep_name, mount_point, mount_label, ordinal, **override_options)
                 if post_install_hook:
                     post_install_hook(c.app)
 
@@ -87,7 +88,9 @@ with_tracker = with_tool('test', 'Tickets', 'bugs')
 with_wiki = with_tool('test', 'Wiki', 'wiki')
 with_url = with_tool('test', 'ShortUrl', 'url')
 
+
 class raises(object):
+
     '''
     Test helper in the form of a context manager, to assert that something raises an exception.
     After completion, the 'exc' attribute can be used to do further inspection of the exception
@@ -124,6 +127,7 @@ def without_module(*module_names):
 
 
 class patch_middleware_config(object):
+
     '''
     Context manager that patches the configuration used during middleware
     setup for Allura

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/__init__.py b/Allura/allura/tests/functional/__init__.py
index fde2aa9..40a4394 100644
--- a/Allura/allura/tests/functional/__init__.py
+++ b/Allura/allura/tests/functional/__init__.py
@@ -17,4 +17,4 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-"""Functional test suite for the controllers of the application."""
\ No newline at end of file
+"""Functional test suite for the controllers of the application."""


[02/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/000-fix-tracker-fields.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/000-fix-tracker-fields.py b/scripts/migrations/000-fix-tracker-fields.py
index d9911c3..fc9d77a 100644
--- a/scripts/migrations/000-fix-tracker-fields.py
+++ b/scripts/migrations/000-fix-tracker-fields.py
@@ -28,19 +28,25 @@ from forgetracker import model as TM
 
 log = logging.getLogger(__name__)
 
+
 def main():
     test = sys.argv[-1] == 'test'
     projects = M.Project.query.find().all()
     log.info('Fixing tracker fields')
     for p in projects:
-        if p.parent_id: continue
+        if p.parent_id:
+            continue
         c.project = p
         q = TM.Globals.query.find()
-        if not q.count(): continue
+        if not q.count():
+            continue
         for g in q:
-            if g.open_status_names: continue
-            if g.status_names is None: old_names = ['open', 'closed']
-            else: old_names = g.status_names.split() or ['open', 'closed']
+            if g.open_status_names:
+                continue
+            if g.status_names is None:
+                old_names = ['open', 'closed']
+            else:
+                old_names = g.status_names.split() or ['open', 'closed']
             if g.open_status_names is None:
                 g.open_status_names = ' '.join(
                     name for name in old_names if name != 'closed')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/001-restore-labels.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/001-restore-labels.py b/scripts/migrations/001-restore-labels.py
index 92814c9..ef68c09 100644
--- a/scripts/migrations/001-restore-labels.py
+++ b/scripts/migrations/001-restore-labels.py
@@ -27,6 +27,7 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
+
 def main():
     test = sys.argv[-1] == 'test'
     projects = M.Project.query.find().all()
@@ -37,25 +38,33 @@ def main():
         session(p).flush()
     log.info('Restoring labels on artifacts')
     for p in projects:
-        if p.parent_id: continue
+        if p.parent_id:
+            continue
         c.project = p
         for name, cls in MappedClass._registry.iteritems():
-            if not issubclass(cls, M.Artifact): continue
-            if session(cls) is None: continue
+            if not issubclass(cls, M.Artifact):
+                continue
+            if session(cls) is None:
+                continue
             for a in cls.query.find():
                 restore_labels(a, test)
         if not test:
             M.artifact_orm_session.flush()
         M.artifact_orm_session.clear()
 
+
 def restore_labels(obj, test=True):
-    if not obj.labels: return
+    if not obj.labels:
+        return
     labels = obj.labels
     while True:
-        if not labels or labels[0] != '[': return
+        if not labels or labels[0] != '[':
+            return
         lbllen = map(len, labels)
-        if max(lbllen) != 1: return
-        if min(lbllen) != 1: return
+        if max(lbllen) != 1:
+            return
+        if min(lbllen) != 1:
+            return
         s = ''.join(labels)
         s = s.replace("u'", "'")
         s = s.replace('u"', '"')
@@ -65,13 +74,15 @@ def restore_labels(obj, test=True):
         except ValueError:
             # some weird problem with json decoding, just erase the labels
             new_labels = []
-        if not isinstance(new_labels, list): return
+        if not isinstance(new_labels, list):
+            return
         for lbl in new_labels:
-            if not isinstance(lbl, basestring): return
+            if not isinstance(lbl, basestring):
+                return
         log.info('%s: %s => %s', obj.__class__, labels, new_labels)
         labels = new_labels
         if not test:
-            log.info('...actually restoring labels') 
+            log.info('...actually restoring labels')
             obj.labels = new_labels
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/002-fix-tracker-thread-subjects.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/002-fix-tracker-thread-subjects.py b/scripts/migrations/002-fix-tracker-thread-subjects.py
index 45824cd..81861a0 100644
--- a/scripts/migrations/002-fix-tracker-thread-subjects.py
+++ b/scripts/migrations/002-fix-tracker-thread-subjects.py
@@ -27,15 +27,19 @@ from forgetracker import model as TM
 
 log = logging.getLogger(__name__)
 
+
 def main():
     test = sys.argv[-1] == 'test'
     all_projects = M.Project.query.find().all()
     log.info('Fixing tracker thread subjects')
     for project in all_projects:
-        if project.parent_id: continue
+        if project.parent_id:
+            continue
         c.project = project
-        all_tickets = TM.Ticket.query.find() # will find all tickets for all trackers in this project
-        if not all_tickets.count(): continue
+        # will find all tickets for all trackers in this project
+        all_tickets = TM.Ticket.query.find()
+        if not all_tickets.count():
+            continue
         for ticket in all_tickets:
             thread = ticket.get_discussion_thread()
             thread.subject = ''

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/003-migrate_project_roles.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/003-migrate_project_roles.py b/scripts/migrations/003-migrate_project_roles.py
index 580f93f..32bcc57 100644
--- a/scripts/migrations/003-migrate_project_roles.py
+++ b/scripts/migrations/003-migrate_project_roles.py
@@ -24,7 +24,8 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
-log.info('Moving project roles in database %s to main DB', M.Project.database_uri())
+log.info('Moving project roles in database %s to main DB',
+         M.Project.database_uri())
 for opr in M.OldProjectRole.query.find():
     pr = M.ProjectRole(**state(opr).document)
 session(opr).clear()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/004-make-attachments-polymorphic.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/004-make-attachments-polymorphic.py b/scripts/migrations/004-make-attachments-polymorphic.py
index 0c34d58..e6133ec 100644
--- a/scripts/migrations/004-make-attachments-polymorphic.py
+++ b/scripts/migrations/004-make-attachments-polymorphic.py
@@ -25,6 +25,7 @@ from forgediscussion import model as DM
 
 log = logging.getLogger(__name__)
 
+
 def main():
     db = M.project_doc_session.db
     log.info('=== Making attachments in %s polymorphic ===', db)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/005-remove_duplicate_ticket_notifications.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/005-remove_duplicate_ticket_notifications.py b/scripts/migrations/005-remove_duplicate_ticket_notifications.py
index ba6f04c..716c604 100644
--- a/scripts/migrations/005-remove_duplicate_ticket_notifications.py
+++ b/scripts/migrations/005-remove_duplicate_ticket_notifications.py
@@ -29,6 +29,8 @@ log = logging.getLogger(__name__)
 
 # Given a list of subscriptions, try to find one with a proper artifact_url, and delete the rest
 # If none of them have artifact_urls, delete them all
+
+
 def trim_subs(subs, test):
     prime = False
 
@@ -42,22 +44,25 @@ def trim_subs(subs, test):
                 print "   Found subscription with no artifact URL, deleting."
             else:
                 print "   Subscription has URL, but is a duplicate, deleting."
-            if not test: sub.delete()
+            if not test:
+                sub.delete()
 
 
 def main():
     test = sys.argv[-1] == 'test'
     title = re.compile('Ticket .*')
-    all_subscriptions = M.Mailbox.query.find(dict(artifact_title=title, type='direct')).sort([ ('artifact_title', pymongo.ASCENDING), ('user_id', pymongo.DESCENDING) ]).all()
+    all_subscriptions = M.Mailbox.query.find(dict(artifact_title=title, type='direct')).sort(
+        [('artifact_title', pymongo.ASCENDING), ('user_id', pymongo.DESCENDING)]).all()
     log.info('Fixing duplicate tracker subscriptions')
 
     for (key, group) in groupby(
-        all_subscriptions,
-        key=lambda sub:(sub.artifact_title, sub.user_id)):
+            all_subscriptions,
+            key=lambda sub: (sub.artifact_title, sub.user_id)):
         group = list(group)
         if group:
             trim_subs(group, test)
-    if not test: ThreadLocalORMSession.flush_all()
+    if not test:
+        ThreadLocalORMSession.flush_all()
 
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/006-migrate-artifact-refs.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/006-migrate-artifact-refs.py b/scripts/migrations/006-migrate-artifact-refs.py
index dc9149c..aeae677 100644
--- a/scripts/migrations/006-migrate-artifact-refs.py
+++ b/scripts/migrations/006-migrate-artifact-refs.py
@@ -24,13 +24,16 @@ from allura import model as M
 log = logging.getLogger('allura.migrate-artifact-refs')
 
 # Threads have artifact references that must be migrated to the new system
+
+
 def main():
     test = sys.argv[-1] == 'test'
     log.info('Fixing artifact references in threads')
     db = M.project_doc_session.db
     for thread in db.thread.find():
         ref = thread.pop('artifact_reference', None)
-        if ref is None: continue
+        if ref is None:
+            continue
         Artifact = loads(ref['artifact_type'])
         artifact = Artifact.query.get(_id=ref['artifact_id'])
         M.ArtifactReference.from_artifact(artifact)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/007-update-acls.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/007-update-acls.py b/scripts/migrations/007-update-acls.py
index 16e28a8..0f97ee1 100644
--- a/scripts/migrations/007-update-acls.py
+++ b/scripts/migrations/007-update-acls.py
@@ -29,12 +29,13 @@ from allura.command.show_models import dfs, build_model_inheritance_graph
 log = logging.getLogger('update-acls')
 
 options = None
-optparser = OptionParser(usage='allurapaste script <ini file> -- %prog [options] [neighborhood1...]')
+optparser = OptionParser(
+    usage='allurapaste script <ini file> -- %prog [options] [neighborhood1...]')
 optparser.add_option('-t', '--test',  dest='test', action='store_true')
 
 main_db = M.main_doc_session.db
-c_neighborhood =  main_db.neighborhood
-c_project =  main_db.project
+c_neighborhood = main_db.neighborhood
+c_project = main_db.project
 c_user = main_db.user
 c_project_role = main_db.project_role
 c.project = Object(
@@ -43,16 +44,20 @@ c.project = Object(
 project_db = M.project_doc_session.db
 c_app_config = project_db.config
 
+
 def main():
     global options
     options, neighborhoods = optparser.parse_args()
     if neighborhoods:
         log.info('Updating neighborhoods: %s', neighborhoods)
-        q_neighborhoods = list(c_neighborhood.find(dict(name={'$in': neighborhoods })))
-        neighborhood_ids=[ n['_id'] for n in q_neighborhoods ]
-        q_projects = list(c_project.find(dict(neighborhood_id={'$in': neighborhood_ids})))
+        q_neighborhoods = list(
+            c_neighborhood.find(dict(name={'$in': neighborhoods})))
+        neighborhood_ids = [n['_id'] for n in q_neighborhoods]
+        q_projects = list(
+            c_project.find(dict(neighborhood_id={'$in': neighborhood_ids})))
         project_ids = list(p['_id'] for p in q_projects)
-        q_app_config = list(c_app_config.find(dict(project_id={'$in': project_ids})))
+        q_app_config = list(
+            c_app_config.find(dict(project_id={'$in': project_ids})))
         log.info('... %d neighborhoods', len(q_neighborhoods))
         log.info('... %d projects', len(q_projects))
         log.info('... %d app configs', len(q_app_config))
@@ -66,14 +71,15 @@ def main():
     log.info('Update project ACLs')
     for p in q_projects:
         update_project_acl(p)
-        if not options.test: c_project.save(p)
+        if not options.test:
+            c_project.save(p)
     # Update neighborhood acls
     log.info('====================================')
     log.info('Update neighborhood ACLs')
     for n in q_neighborhoods:
         p = c_project.find(dict(
-                neighborhood_id=n['_id'], shortname='--init--')).next()
-        update_neighborhood_acl(n,p)
+            neighborhood_id=n['_id'], shortname='--init--')).next()
+        update_neighborhood_acl(n, p)
         if not options.test:
             c_neighborhood.save(n)
             c_project.save(p)
@@ -83,7 +89,8 @@ def main():
     log.info('Update appconfig ACLs')
     for ac in q_app_config:
         simple_acl_update(ac, 'app_config')
-        if not options.test: c_app_config.save(ac)
+        if not options.test:
+            c_app_config.save(ac)
         # Update artifact acls
         log.info('====================================')
         log.info('Update artifact ACLs for %s', ac['_id'])
@@ -92,7 +99,9 @@ def main():
             for a in c_artifact.find(dict(app_config_id=ac['_id'])):
                 empty_acl = a['acl'] == []
                 simple_acl_update(a, a_cls.__mongometa__.name)
-                if not options.test and not empty_acl: c_artifact.save(a)
+                if not options.test and not empty_acl:
+                    c_artifact.save(a)
+
 
 def update_project_acl(project_doc):
     '''Convert the old dict-style ACL to a list of ALLOW ACEs. Also move the
@@ -112,7 +121,8 @@ def update_project_acl(project_doc):
     for perm, role_ids in sorted(project_doc['acl'].iteritems()):
         perm = perm_map[perm]
         for rid in role_ids:
-            if c_project_role.find(dict(_id=rid)).count() == 0: continue
+            if c_project_role.find(dict(_id=rid)).count() == 0:
+                continue
             _grant(new_acl, perm, rid)
     if options.test:
         log.info('--- update %s\n%s\n%s\n---',
@@ -121,34 +131,38 @@ def update_project_acl(project_doc):
                  pformat(map(_format_ace, new_acl)))
     project_doc['acl'] = new_acl
 
+
 def update_neighborhood_acl(neighborhood_doc, init_doc):
     '''Convert nbhd admins users to --init-- project admins'''
-    if options.test: log.info('Update nbhd %s', neighborhood_doc['name'])
+    if options.test:
+        log.info('Update nbhd %s', neighborhood_doc['name'])
     if 'acl' not in neighborhood_doc:
-        log.warning('Neighborhood %s already updated', neighborhood_doc['name'])
+        log.warning('Neighborhood %s already updated',
+                    neighborhood_doc['name'])
         return
     p = Object(init_doc)
-    p.root_project=p
+    p.root_project = p
     r_anon = _project_role(init_doc['_id'], '*anonymous')
     r_auth = _project_role(init_doc['_id'], '*authenticated')
     r_admin = _project_role(init_doc['_id'], 'Admin')
     acl = neighborhood_doc['acl']
     new_acl = list(init_doc['acl'])
-    assert acl['read'] == [None] # nbhd should be public
+    assert acl['read'] == [None]  # nbhd should be public
     for uid in acl['admin'] + acl['moderate']:
         u = c_user.find(dict(_id=uid)).next()
         if options.test:
             log.info('... grant nbhd admin to: %s', u['username'])
             continue
-        role =  _project_role(init_doc['_id'], user_id=uid)
+        role = _project_role(init_doc['_id'], user_id=uid)
         if r_admin['_id'] not in role['roles']:
             role['roles'].append(r_admin['_id'])
             c_project_role.save(role)
     _grant(new_acl, 'read', r_anon['_id'])
     _grant(new_acl, 'admin', r_admin['_id'])
     _grant(new_acl, 'register', r_admin['_id'])
-    if acl['create'] == [ ]:
-        if options.test: log.info('grant register to auth')
+    if acl['create'] == []:
+        if options.test:
+            log.info('grant register to auth')
         _grant(new_acl, 'register', r_auth['_id'])
     del neighborhood_doc['acl']
     if options.test:
@@ -157,6 +171,7 @@ def update_neighborhood_acl(neighborhood_doc, init_doc):
                  pformat(map(_format_ace, new_acl)))
     init_doc['acl'] = new_acl
 
+
 def _project_role(project_id, name=None, user_id=None):
     doc = dict(project_id=project_id)
     if name:
@@ -190,6 +205,7 @@ def simple_acl_update(doc, collection_name):
                  pformat(map(_format_ace, new_acl)))
     doc['acl'] = new_acl
 
+
 def _grant(acl, permission, role_id):
     ace = dict(
         access='ALLOW',
@@ -198,11 +214,14 @@ def _grant(acl, permission, role_id):
     if ace not in acl:
         acl.append(ace)
 
+
 def _format_ace(ace):
-    if isinstance(ace, basestring): return ace
+    if isinstance(ace, basestring):
+        return ace
     return '(%s, %s, %s)' % (
         ace['access'], ace['permission'], _format_role(ace['role_id']))
 
+
 def _format_role(rid):
     for role in c_project_role.find(dict(_id=rid)):
         if role['name']:
@@ -213,10 +232,11 @@ def _format_role(rid):
         break
     return '--invalid--'
 
+
 def _format_acd(acd):
     return dict(
         (k, map(_format_role, v))
-        for k,v in acd.iteritems())
+        for k, v in acd.iteritems())
 
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/008-remove-forumpost-subject.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/008-remove-forumpost-subject.py b/scripts/migrations/008-remove-forumpost-subject.py
index ae91511..892eb61 100644
--- a/scripts/migrations/008-remove-forumpost-subject.py
+++ b/scripts/migrations/008-remove-forumpost-subject.py
@@ -31,6 +31,7 @@ log = logging.getLogger(__name__)
 
 c_forumpost = M.project_doc_session.db.forum_post
 
+
 def main():
     test = sys.argv[-1] == 'test'
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/010-fix-home-permissions.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/010-fix-home-permissions.py b/scripts/migrations/010-fix-home-permissions.py
index f4fb852..4506a17 100644
--- a/scripts/migrations/010-fix-home-permissions.py
+++ b/scripts/migrations/010-fix-home-permissions.py
@@ -33,6 +33,7 @@ log.addHandler(handler)
 
 TEST = sys.argv[-1].lower() == 'test'
 
+
 def main():
 
     if TEST:
@@ -41,15 +42,17 @@ def main():
         log.info('Fixing permissions for all Home Wikis')
 
     for some_projects in utils.chunked_find(M.Project, {'neighborhood_id': {
-                '$nin': [ObjectId('4be2faf8898e33156f00003e'),      # /u
-                         ObjectId('4dbf2563bfc09e6362000005')]}}):  # /motorola
+        '$nin': [ObjectId('4be2faf8898e33156f00003e'),      # /u
+                 ObjectId('4dbf2563bfc09e6362000005')]}}):  # /motorola
         for project in some_projects:
             c.project = project
             home_app = project.app_instance('home')
             if isinstance(home_app, ForgeWikiApp):
-                log.info('Examining permissions in project "%s".' % project.shortname)
+                log.info('Examining permissions in project "%s".' %
+                         project.shortname)
                 root_project = project.root_project or project
-                authenticated_role = project_role(root_project, '*authenticated')
+                authenticated_role = project_role(
+                    root_project, '*authenticated')
                 member_role = project_role(root_project, 'Member')
 
                 # remove *authenticated create/update permissions
@@ -57,24 +60,32 @@ def main():
                     ((ace.role_id, ace.access, ace.permission), ace)
                     for ace in home_app.acl
                     if not (
-                        ace.role_id==authenticated_role._id and ace.access==M.ACE.ALLOW and ace.permission in ('create', 'edit', 'delete', 'unmoderated_post')
+                        ace.role_id == authenticated_role._id and ace.access == M.ACE.ALLOW and ace.permission in (
+                            'create', 'edit', 'delete', 'unmoderated_post')
                     )
                 )
                 if (member_role._id, M.ACE.ALLOW, 'update') in new_acl:
                     del new_acl[(member_role._id, M.ACE.ALLOW, 'update')]
 
                 # add member create/edit permissions
-                new_acl[(member_role._id, M.ACE.ALLOW, 'create')] = M.ACE.allow(member_role._id, 'create')
-                new_acl[(member_role._id, M.ACE.ALLOW, 'edit')] = M.ACE.allow(member_role._id, 'edit')
-                new_acl[(member_role._id, M.ACE.ALLOW, 'unmoderated_post')] = M.ACE.allow(member_role._id, 'unmoderated_post')
+                new_acl[(member_role._id, M.ACE.ALLOW, 'create')
+                        ] = M.ACE.allow(member_role._id, 'create')
+                new_acl[(member_role._id, M.ACE.ALLOW, 'edit')
+                        ] = M.ACE.allow(member_role._id, 'edit')
+                new_acl[(member_role._id, M.ACE.ALLOW, 'unmoderated_post')] = M.ACE.allow(
+                    member_role._id, 'unmoderated_post')
 
                 if TEST:
-                    log.info('...would update acl for home app in project "%s".' % project.shortname)
+                    log.info(
+                        '...would update acl for home app in project "%s".' %
+                        project.shortname)
                 else:
-                    log.info('...updating acl for home app in project "%s".' % project.shortname)
+                    log.info('...updating acl for home app in project "%s".' %
+                             project.shortname)
                     home_app.config.acl = map(dict, new_acl.values())
                     session(home_app.config).flush()
 
+
 def project_role(project, name):
     role = M.ProjectRole.query.get(project_id=project._id, name=name)
     if role is None:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/011-fix-subroles.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/011-fix-subroles.py b/scripts/migrations/011-fix-subroles.py
index 4da0129..cb63c6b 100644
--- a/scripts/migrations/011-fix-subroles.py
+++ b/scripts/migrations/011-fix-subroles.py
@@ -38,15 +38,17 @@ from allura.lib import utils
 log = logging.getLogger('fix-subroles')
 log.addHandler(logging.StreamHandler(sys.stdout))
 
+
 def main():
     test = sys.argv[-1] == 'test'
     num_projects_examined = 0
     log.info('Examining subroles in all non-user projects.')
     n_users = M.Neighborhood.query.get(name='Users')
-    project_filter = dict(neighborhood_id={'$ne':n_users._id})
+    project_filter = dict(neighborhood_id={'$ne': n_users._id})
     for some_projects in utils.chunked_find(M.Project, project_filter):
         for project in some_projects:
-            project_name = '%s.%s' % (project.neighborhood.name, project.shortname)
+            project_name = '%s.%s' % (
+                project.neighborhood.name, project.shortname)
             project_roles = {}
             for parent, child in [('Admin', 'Developer'), ('Developer', 'Member')]:
                 parent_role = M.ProjectRole.by_name(parent, project=project)
@@ -57,31 +59,38 @@ def main():
                     break
                 if len(parent_role.roles) != 1 or parent_role.roles[0] != child_role._id:
                     if test:
-                        log.info('Would reset %s subroles for project "%s".' % (parent, project_name))
-                        log.info('- Existing %s subrole(s): %s' % (parent, parent_role.roles))
+                        log.info('Would reset %s subroles for project "%s".' %
+                                 (parent, project_name))
+                        log.info('- Existing %s subrole(s): %s' %
+                                 (parent, parent_role.roles))
                     else:
-                        log.info('Resetting %s subroles for project "%s".' % (parent, project_name))
+                        log.info('Resetting %s subroles for project "%s".' %
+                                 (parent, project_name))
                         parent_role.roles = [child_role._id]
                         ThreadLocalORMSession.flush_all()
-            if not (project_roles['Admin'] and project_roles['Developer'] \
-                and project_roles['Member']):
-                log.info('Skipping "%s": missing Admin, Developer, or Member roles' % project_name)
+            if not (project_roles['Admin'] and project_roles['Developer']
+                    and project_roles['Member']):
+                log.info(
+                    'Skipping "%s": missing Admin, Developer, or Member roles' %
+                    project_name)
                 continue
             for user in project.users():
                 pr = user.project_role(project=project)
-                if not pr.roles: continue
+                if not pr.roles:
+                    continue
                 for parent, children in [('Admin', ('Developer', 'Member')),
                                          ('Developer', ('Member',))]:
-                    if project_roles[parent]._id not in pr.roles: continue
+                    if project_roles[parent]._id not in pr.roles:
+                        continue
                     for role_name in children:
                         extra_role = project_roles[role_name]
                         if extra_role._id in pr.roles:
                             if test:
-                                log.info('Would remove %s role from user "%s" in project "%s" (already has %s role).' \
+                                log.info('Would remove %s role from user "%s" in project "%s" (already has %s role).'
                                          % (role_name, user.username, project_name, parent))
                                 pr.roles.remove(extra_role._id)
                             else:
-                                log.info('Removing %s role from user "%s" in project "%s" (already has %s role).' \
+                                log.info('Removing %s role from user "%s" in project "%s" (already has %s role).'
                                          % (role_name, user.username, project_name, parent))
                                 pr.roles.remove(extra_role._id)
                                 ThreadLocalORMSession.flush_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/012-uninstall-home.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/012-uninstall-home.py b/scripts/migrations/012-uninstall-home.py
index 46ebc9b..c59fdb2 100644
--- a/scripts/migrations/012-uninstall-home.py
+++ b/scripts/migrations/012-uninstall-home.py
@@ -32,6 +32,7 @@ from allura.ext.project_home import ProjectHomeApp
 log = logging.getLogger('uninstall-home')
 log.addHandler(logging.StreamHandler(sys.stdout))
 
+
 def main():
     test = sys.argv[-1] == 'test'
     log.info('Removing "home" tools')
@@ -48,7 +49,8 @@ def main():
 
                 # would we actually be able to install a wiki?
                 if M.ProjectRole.by_name('Admin') is None:
-                    log.warning('project %s may be orphaned' % project.shortname)
+                    log.warning('project %s may be orphaned' %
+                                project.shortname)
                     possibly_orphaned_projects += 1
                     continue
 
@@ -56,17 +58,21 @@ def main():
 
                 # remove the existing home tool
                 if test:
-                    log.info('would remove "home" tool from project ' + project.shortname)
+                    log.info('would remove "home" tool from project ' +
+                             project.shortname)
                 else:
-                    log.info('removing "home" tool from project ' + project.shortname)
+                    log.info('removing "home" tool from project ' +
+                             project.shortname)
                     with patch('allura.app.g.solr.delete', solr_delete):
                         project.uninstall_app('home')
 
                 # ...and put a Wiki in its place (note we only create a Wiki if we deleted the old home)
                 if test:
-                    log.info('would create Wiki "home" for project ' + project.shortname)
+                    log.info('would create Wiki "home" for project ' +
+                             project.shortname)
                 else:
-                    log.info('creating Wiki "home" for project ' + project.shortname)
+                    log.info('creating Wiki "home" for project ' +
+                             project.shortname)
                     home_title = project.homepage_title or 'Home'
                     wiki_text = project.description or ''
                     if wiki_text == 'You can edit this description in the admin page':
@@ -75,8 +81,10 @@ def main():
                     # re-number all the mounts so the new Wiki comes first
                     mounts = project.ordered_mounts()
                     with patch('forgewiki.model.wiki.Notification.post', notification_post):
-                        new_home_app = project.install_app('Wiki', 'home', 'Home')
-                    mounts = [{'ordinal':0, 'ac':new_home_app.config}] + mounts
+                        new_home_app = project.install_app(
+                            'Wiki', 'home', 'Home')
+                    mounts = [{'ordinal': 0, 'ac': new_home_app.config}] + \
+                        mounts
                     for i, mount in enumerate(mounts):
                         if 'ac' in mount:
                             mount['ac'].options['ordinal'] = i
@@ -91,11 +99,14 @@ def main():
 
                     # now let's fix the home page itself
                     log.info('updating home page to "%s"' % home_title)
-                    new_home_page = WM.Page.query.find(dict(app_config_id=new_home_app.config._id)).first()
+                    new_home_page = WM.Page.query.find(
+                        dict(app_config_id=new_home_app.config._id)).first()
                     with h.push_config(c, app=new_home_app):
                         if new_home_page is None:
                             # weird: we didn't find the existing home page
-                            log.warning('hmmm, actually creating the home page ("%s") for project "%s" from scratch' % (home_title, project.shortname))
+                            log.warning(
+                                'hmmm, actually creating the home page ("%s") for project "%s" from scratch' %
+                                (home_title, project.shortname))
                             new_home_page = WM.Page.upsert(home_title)
                             new_home_page.viewable_by = ['all']
                         new_home_page.title = home_title
@@ -106,7 +117,8 @@ def main():
                     assert new_home_page.title == home_title
                     assert new_home_page.version == 2
 
-                    # if we changed the home page name, make sure the Wiki knows that's the root page
+                    # if we changed the home page name, make sure the Wiki
+                    # knows that's the root page
                     new_home_app.root_page_name = home_title
 
                 session(project).flush()
@@ -116,10 +128,12 @@ def main():
     else:
         log.info('%s projects were updated' % affected_projects)
     if possibly_orphaned_projects:
-        log.warning('%s possibly orphaned projects found' % possibly_orphaned_projects)
+        log.warning('%s possibly orphaned projects found' %
+                    possibly_orphaned_projects)
     if not test:
         assert solr_delete.call_count == affected_projects, solr_delete.call_count
-        assert notification_post.call_count == 2 * affected_projects, notification_post.call_count
+        assert notification_post.call_count == 2 * \
+            affected_projects, notification_post.call_count
 
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/013-update-ordinals.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/013-update-ordinals.py b/scripts/migrations/013-update-ordinals.py
index 1ddef2e..f26f112 100644
--- a/scripts/migrations/013-update-ordinals.py
+++ b/scripts/migrations/013-update-ordinals.py
@@ -28,6 +28,7 @@ from allura.lib import utils
 log = logging.getLogger('update-ordinals')
 log.addHandler(logging.StreamHandler(sys.stdout))
 
+
 def main():
     test = sys.argv[-1] == 'test'
     num_projects_examined = 0
@@ -37,7 +38,8 @@ def main():
             c.project = project
             mounts = project.ordered_mounts(include_hidden=True)
 
-            # ordered_mounts() means duplicate ordinals (if any) will be next to each other
+            # ordered_mounts() means duplicate ordinals (if any) will be next
+            # to each other
             duplicates_found = False
             prev_ordinal = None
             for mount in mounts:
@@ -48,9 +50,11 @@ def main():
 
             if duplicates_found:
                 if test:
-                    log.info('Would renumber mounts for project "%s".' % project.shortname)
+                    log.info('Would renumber mounts for project "%s".' %
+                             project.shortname)
                 else:
-                    log.info('Renumbering mounts for project "%s".' % project.shortname)
+                    log.info('Renumbering mounts for project "%s".' %
+                             project.shortname)
                     for i, mount in enumerate(mounts):
                         if 'ac' in mount:
                             mount['ac'].options['ordinal'] = i

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/015-add-neighborhood_id-to-blog-posts.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/015-add-neighborhood_id-to-blog-posts.py b/scripts/migrations/015-add-neighborhood_id-to-blog-posts.py
index b0fd9f5..48b0deb 100644
--- a/scripts/migrations/015-add-neighborhood_id-to-blog-posts.py
+++ b/scripts/migrations/015-add-neighborhood_id-to-blog-posts.py
@@ -26,6 +26,7 @@ from forgeblog import model as BM
 
 log = logging.getLogger(__name__)
 
+
 def main():
     broken_posts = BM.BlogPost.query.find(dict(neighborhood_id=None)).all()
     for post in broken_posts:
@@ -36,4 +37,4 @@ def main():
     ThreadLocalORMSession.close_all()
 
 if __name__ == '__main__':
-    main()
\ No newline at end of file
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/018-add-svn-checkout-url.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/018-add-svn-checkout-url.py b/scripts/migrations/018-add-svn-checkout-url.py
index 3656f0b..2a5469c 100644
--- a/scripts/migrations/018-add-svn-checkout-url.py
+++ b/scripts/migrations/018-add-svn-checkout-url.py
@@ -20,5 +20,5 @@ from ming.orm import ThreadLocalORMSession
 
 for app in M.AppConfig.query.find(dict(tool_name="svn")).all():
     if 'checkout_url' not in app.options:
-        app.options.checkout_url='trunk'
+        app.options.checkout_url = 'trunk'
     ThreadLocalORMSession.flush_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/020-remove-wiki-title-slashes.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/020-remove-wiki-title-slashes.py b/scripts/migrations/020-remove-wiki-title-slashes.py
index 27df1a9..34db4ce 100644
--- a/scripts/migrations/020-remove-wiki-title-slashes.py
+++ b/scripts/migrations/020-remove-wiki-title-slashes.py
@@ -25,6 +25,7 @@ from forgewiki import model as WM
 
 log = logging.getLogger(__name__)
 
+
 def main():
     c.project = None
     pages = WM.Page.query.find({'title': {'$regex': '\/'}}).all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/022-change-anon-display-name.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/022-change-anon-display-name.py b/scripts/migrations/022-change-anon-display-name.py
index 0bae22e..dbe9911 100644
--- a/scripts/migrations/022-change-anon-display-name.py
+++ b/scripts/migrations/022-change-anon-display-name.py
@@ -18,6 +18,7 @@
 from ming.orm.ormsession import ThreadLocalORMSession
 from allura import model as M
 
+
 def main():
     u = M.User.query.get(username='*anonymous')
     u.display_name = 'Anonymous'
@@ -26,4 +27,4 @@ def main():
     ThreadLocalORMSession.close_all()
 
 if __name__ == '__main__':
-    main()
\ No newline at end of file
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/024-migrate-custom-profile-text.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/024-migrate-custom-profile-text.py b/scripts/migrations/024-migrate-custom-profile-text.py
index e02fc6b..b69b5f8 100644
--- a/scripts/migrations/024-migrate-custom-profile-text.py
+++ b/scripts/migrations/024-migrate-custom-profile-text.py
@@ -32,9 +32,10 @@ log = logging.getLogger(__name__)
 default_description = r'^\s*(?:You can edit this description in the admin page)?\s*$'
 
 default_personal_project_tmpl = ("This is the personal project of %s."
-            " This project is created automatically during user registration"
-            " as an easy place to store personal data that doesn't need its own"
-            " project such as cloned repositories.\n\n%s")
+                                 " This project is created automatically during user registration"
+                                 " as an easy place to store personal data that doesn't need its own"
+                                 " project such as cloned repositories.\n\n%s")
+
 
 def main():
     users = M.Neighborhood.query.get(name='Users')
@@ -53,10 +54,12 @@ def main():
                 try:
                     app = p.install_app('wiki')
                 except Exception as e:
-                    log.error("Unable to install wiki for user %s: %s" % (user.username, str(e)))
+                    log.error("Unable to install wiki for user %s: %s" %
+                              (user.username, str(e)))
                     continue
 
-            page = WM.Page.query.get(app_config_id=app.config._id, title='Home')
+            page = WM.Page.query.get(
+                app_config_id=app.config._id, title='Home')
             if page is None:
                 continue
 
@@ -67,9 +70,11 @@ def main():
             if "This is the personal project of" in page.text:
                 if description not in page.text:
                     page.text = "%s\n\n%s" % (page.text, description)
-                    log.info("Update wiki home page text for %s" % user.username)
+                    log.info("Update wiki home page text for %s" %
+                             user.username)
             elif "This is the default page" in page.text:
-                page.text = default_personal_project_tmpl % (user.display_name, description)
+                page.text = default_personal_project_tmpl % (
+                    user.display_name, description)
                 log.info("Update wiki home page text for %s" % user.username)
             else:
                 pass

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/025-add-is-nbhd-project.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/025-add-is-nbhd-project.py b/scripts/migrations/025-add-is-nbhd-project.py
index 8fc24f5..85bb07b 100644
--- a/scripts/migrations/025-add-is-nbhd-project.py
+++ b/scripts/migrations/025-add-is-nbhd-project.py
@@ -26,9 +26,12 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
+
 def main():
-    M.Project.query.update({'shortname': '--init--'}, {'$set': {'is_nbhd_project': True}}, multi=True)
-    M.Project.query.update({'shortname': {'$ne': '--init--'}}, {'$set': {'is_nbhd_project': False}}, multi=True)
+    M.Project.query.update({'shortname': '--init--'},
+                           {'$set': {'is_nbhd_project': True}}, multi=True)
+    M.Project.query.update({'shortname': {'$ne': '--init--'}},
+                           {'$set': {'is_nbhd_project': False}}, multi=True)
 
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/026-install-activity-tool.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/026-install-activity-tool.py b/scripts/migrations/026-install-activity-tool.py
index e16973d..c7eb39f 100644
--- a/scripts/migrations/026-install-activity-tool.py
+++ b/scripts/migrations/026-install-activity-tool.py
@@ -24,6 +24,7 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
+
 def main():
     for chunk in utils.chunked_find(M.Project):
         for p in chunk:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/027-change-ticket-write-permissions.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/027-change-ticket-write-permissions.py b/scripts/migrations/027-change-ticket-write-permissions.py
index 29258d6..1e64504 100644
--- a/scripts/migrations/027-change-ticket-write-permissions.py
+++ b/scripts/migrations/027-change-ticket-write-permissions.py
@@ -36,12 +36,15 @@ def add(acl, role):
         acl.append(role)
 
 # migration script for change write permission to create + update
+
+
 def main():
     query = {'tool_name': {'$regex': '^tickets$', '$options': 'i'}}
     for chunk in utils.chunked_find(M.AppConfig, query):
         for a in chunk:
             # change 'deny write' and 'write' permission
-            role_ids = [(p.role_id, p.access) for p in a.acl if p.permission == 'write']
+            role_ids = [(p.role_id, p.access)
+                        for p in a.acl if p.permission == 'write']
             for role_id, access in role_ids:
                 if access == M.ACE.DENY:
                     add(a.acl, M.ACE.deny(role_id, 'create'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/migrations/028-remove-svn-trees.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/028-remove-svn-trees.py b/scripts/migrations/028-remove-svn-trees.py
index 6630c3a..5b9a413 100644
--- a/scripts/migrations/028-remove-svn-trees.py
+++ b/scripts/migrations/028-remove-svn-trees.py
@@ -25,6 +25,7 @@ from forgesvn import model as SM
 
 log = logging.getLogger(__name__)
 
+
 def kill_tree(repo, commit_id, path, tree):
     '''They were arboring terrorists, I swear.'''
     M.repo.Tree.query.remove(dict(_id=tree._id))
@@ -37,13 +38,14 @@ def kill_tree(repo, commit_id, path, tree):
         else:
             print '  Missing {0}'.format((path + '/' + tree_rec.name).encode('utf8'))
 
+
 def main():
     for chunk in utils.chunked_find(SM.Repository):
         for r in chunk:
             print 'Processing {0}'.format(r)
             all_commit_ids = r._impl.all_commit_ids()
             if all_commit_ids:
-                for commit in M.repo.Commit.query.find({'_id':{'$in':all_commit_ids}}):
+                for commit in M.repo.Commit.query.find({'_id': {'$in': all_commit_ids}}):
                     if commit.tree_id and M.repo.Tree.query.get(_id=commit.tree_id):
                         kill_tree(r._impl, commit._id, '', commit.tree)
                 ThreadLocalORMSession.flush_all()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/new_ticket.py
----------------------------------------------------------------------
diff --git a/scripts/new_ticket.py b/scripts/new_ticket.py
index 33d6788..9b23328 100755
--- a/scripts/new_ticket.py
+++ b/scripts/new_ticket.py
@@ -21,13 +21,16 @@ import argparse
 import requests
 from pprint import pprint
 
+
 def get_opts():
-    parser = argparse.ArgumentParser(description='Post a new ticket using the API')
+    parser = argparse.ArgumentParser(
+        description='Post a new ticket using the API')
     parser.add_argument('project', help='Project shortname')
     parser.add_argument('mount_point', help='Tracker mount point')
     parser.add_argument('-H', '--host', default='sourceforge.net')
     opts = parser.parse_args()
-    opts.url = 'https://{}/rest/p/{}/{}/new'.format(opts.host, opts.project, opts.mount_point)
+    opts.url = 'https://{}/rest/p/{}/{}/new'.format(opts.host,
+                                                    opts.project, opts.mount_point)
     return opts
 
 opts = get_opts()
@@ -39,10 +42,10 @@ description = sys.stdin.read()
 print '-----------------------------------------------'
 
 r = requests.post(opts.url, params={
-        'access_token': access_token,
-        'ticket_form.summary': summary,
-        'ticket_form.description': description,
-    })
+    'access_token': access_token,
+    'ticket_form.summary': summary,
+    'ticket_form.description': description,
+})
 if r.status_code == 200:
     print 'Ticket created at: %s' % r.url
     pprint(r.json())

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/open_relay.py
----------------------------------------------------------------------
diff --git a/scripts/open_relay.py b/scripts/open_relay.py
index 0a392d3..ba21862 100644
--- a/scripts/open_relay.py
+++ b/scripts/open_relay.py
@@ -26,14 +26,16 @@ from ConfigParser import ConfigParser
 
 log = logging.getLogger(__name__)
 
+
 def main():
     cp = ConfigParser()
-    log.info('Read config from: %s', cp.read([os.path.join(os.environ['HOME'], '.open_relay.ini')]))
+    log.info('Read config from: %s',
+             cp.read([os.path.join(os.environ['HOME'], '.open_relay.ini')]))
     host = cp.get('open_relay', 'host')
     port = cp.getint('open_relay', 'port')
     ssl = cp.getboolean('open_relay', 'ssl')
     tls = cp.getboolean('open_relay', 'tls')
-    username=cp.get('open_relay', 'username')
+    username = cp.get('open_relay', 'username')
     password = cp.get('open_relay', 'password')
     smtp_client = MailClient(host,
                              port,
@@ -43,6 +45,7 @@ def main():
                smtp_client=smtp_client)
     asyncore.loop()
 
+
 class MailClient(object):
 
     def __init__(self, host, port, ssl, tls, username, password):
@@ -52,7 +55,8 @@ class MailClient(object):
         self._connect()
 
     def sendmail(self, mailfrom, rcpttos, data):
-        if str(mailfrom) == 'None': mailfrom = rcpttos[0]
+        if str(mailfrom) == 'None':
+            mailfrom = rcpttos[0]
         log.info('Sending mail to %s' % rcpttos)
         log.info('Sending mail from %s' % mailfrom)
         try:
@@ -71,6 +75,7 @@ class MailClient(object):
         if self.username:
             self._client.login(self.username, self.password)
 
+
 class MailServer(smtpd.SMTPServer):
 
     def __init__(self, *args, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/perf/benchmark-scm.py
----------------------------------------------------------------------
diff --git a/scripts/perf/benchmark-scm.py b/scripts/perf/benchmark-scm.py
index 0a4e9eb..be9b93a 100755
--- a/scripts/perf/benchmark-scm.py
+++ b/scripts/perf/benchmark-scm.py
@@ -42,8 +42,10 @@ def main(opts):
         repo = hg.repository(HgUI(), opts.repo_path)
         cid = None if opts.cid == 'HEAD' else ['%s:0' % opts.cid]
         path = opts.path.strip('/')
-        filenames = repo['tip' if opts.cid == 'HEAD' else opts.cid].manifest().keys()
-        filenames = [name for name in filenames if name.startswith(('%s/' % path).lstrip('/'))]
+        filenames = repo[
+            'tip' if opts.cid == 'HEAD' else opts.cid].manifest().keys()
+        filenames = [
+            name for name in filenames if name.startswith(('%s/' % path).lstrip('/'))]
         names = set()
         for name in filenames:
             names.add(name.split('/')[0])
@@ -78,11 +80,13 @@ def impl_git_tree(repo, cid, path, names, *args):
     data = {}
     for name in names:
         #data[name] = repo.git.rev_list(cid, '--', os.path.join(path, name), max_count=1)
-        data[name] = git.Commit.iter_items(repo, cid, os.path.join(path, name), max_count=1).next().hexsha
+        data[name] = git.Commit.iter_items(
+            repo, cid, os.path.join(path, name), max_count=1).next().hexsha
     return data
 
+
 def impl_git_node(repo, cid, path, *args):
-    #return repo.git.rev_list(cid, '--', path, max_count=1)
+    # return repo.git.rev_list(cid, '--', path, max_count=1)
     return git.Commit.iter_items(repo, cid, path, max_count=1).next().hexsha
 
 
@@ -90,53 +94,62 @@ def impl_hg_tree(repo, cid, path, names, *args):
     m = cmdutil.match(repo, pats=[path], default=path)
     data = {}
     for name in names:
-        rev_iter = cmdutil.walkchangerevs(repo, m, {'rev': cid}, lambda c,f: None)
+        rev_iter = cmdutil.walkchangerevs(
+            repo, m, {'rev': cid}, lambda c, f: None)
         data[name] = rev_iter.next().hex()
     return data
 
+
 def impl_hg_node(repo, cid, path, *args):
     m = cmdutil.match(repo, pats=[path], default=path)
-    rev_iter = cmdutil.walkchangerevs(repo, m, {'rev': cid}, lambda c,f: None)
+    rev_iter = cmdutil.walkchangerevs(repo, m, {'rev': cid}, lambda c, f: None)
     return rev_iter.next().hex()
 
+
 def impl_svn_tree(repo, cid, path, names, repo_path, *args):
     infos = repo.info2(
-            'file://%s/%s' % (repo_path, path),
-            revision=cid,
-            depth=pysvn.depth.immediates)
+        'file://%s/%s' % (repo_path, path),
+        revision=cid,
+        depth=pysvn.depth.immediates)
     data = {}
     for name, info in infos[1:]:
         data[name] = info.last_changed_rev
     return data
 
+
 def impl_svn_node(repo, cid, path, names, repo_path, *args):
     logs = repo.log(
-            'file://%s/%s' % (repo_path, path),
-            revision_start=cid,
-            limit=1)
+        'file://%s/%s' % (repo_path, path),
+        revision_start=cid,
+        limit=1)
     return logs[0].revision.number
 
 
 class HgUI(ui.ui):
+
     '''Hg UI subclass that suppresses reporting of untrusted hgrc files.'''
+
     def __init__(self, *args, **kwargs):
         super(HgUI, self).__init__(*args, **kwargs)
         self._reportuntrusted = False
 
+
 def parse_opts():
-    parser = argparse.ArgumentParser(description='Benchmark getting LCD from repo tool')
+    parser = argparse.ArgumentParser(
+        description='Benchmark getting LCD from repo tool')
     parser.add_argument('--type', default='git', dest='type',
-            help='Type of repository being tested.')
+                        help='Type of repository being tested.')
     parser.add_argument('--repo-path', dest='repo_path', required=True,
-            help='Path to the repository to test against')
+                        help='Path to the repository to test against')
     parser.add_argument('--commit', default='HEAD', dest='cid',
-            help='Commit ID or revision number to test against')
+                        help='Commit ID or revision number to test against')
     parser.add_argument('--path', default='', dest='path',
-            help='Path within the repository to test against')
+                        help='Path within the repository to test against')
     parser.add_argument('--count', type=int, default=100, dest='count',
-            help='Number of times to execute')
-    parser.add_argument('--full-tree', action='store_true', default=False, dest='full_tree',
-            help='Time full tree listing instead of just the single node')
+                        help='Number of times to execute')
+    parser.add_argument(
+        '--full-tree', action='store_true', default=False, dest='full_tree',
+        help='Time full tree listing instead of just the single node')
     return parser.parse_args()
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/perf/call_count.py
----------------------------------------------------------------------
diff --git a/scripts/perf/call_count.py b/scripts/perf/call_count.py
index f965294..1d4ef6d 100755
--- a/scripts/perf/call_count.py
+++ b/scripts/perf/call_count.py
@@ -49,8 +49,9 @@ def parse_args():
                         help='Show call details.  Note that Timers with debug_each_call=False (like ming\'s Cursor.next) are not displayed in verbose mode (but they are counted).')
     parser.add_argument('--debug-html', action='store_true', default=False,
                         help='Save HTML responses as local files')
-    parser.add_argument('--data-file', default='call_counts.csv', type=argparse.FileType('a'),
-                        help='CSV file that is appended to')
+    parser.add_argument(
+        '--data-file', default='call_counts.csv', type=argparse.FileType('a'),
+        help='CSV file that is appended to')
     parser.add_argument('--id', default='',
                         help='An identifier for this run.  Examples:\n'
                              '`git rev-parse --short HEAD` for current hash\n'
@@ -63,9 +64,11 @@ def main(args):
     setup(test)
 
     url = generate_wiki_thread(test)
-    ThreadLocalODMSession.close_all()  # make sure ODM sessions won't get re-used
+    # make sure ODM sessions won't get re-used
+    ThreadLocalODMSession.close_all()
 
-    counts = count_page(test, url, verbose=args.verbose, debug_html=args.debug_html)
+    counts = count_page(test, url, verbose=args.verbose,
+                        debug_html=args.debug_html)
     print json.dumps(counts)
     write_csv(counts, args.id, args.data_file)
     test.tearDown()
@@ -76,7 +79,7 @@ def setup(test):
     with patch_middleware_config({'stats.sample_rate': 1,
                                   'stats.debug_line_length': 1000,
                                   }), \
-         patch('timermiddleware.log.isEnabledFor', return_value=True):  # can't set this via logging configuration since setUp() will load a logging config and then start using it before we have a good place to tweak it
+            patch('timermiddleware.log.isEnabledFor', return_value=True):  # can't set this via logging configuration since setUp() will load a logging config and then start using it before we have a good place to tweak it
         test.setUp()
 
     tmw_log = logging.getLogger('timermiddleware')
@@ -95,8 +98,8 @@ def generate_wiki_thread(test):
     thread = page.discussion_thread
     # create a few posts by a few users
     with push_config(c, user=M.User.query.get(username='test-admin'),
-                        app=app,
-                        project=project):
+                     app=app,
+                     project=project):
         thread.add_post(text='This is very helpful')
         thread.add_post(text="But it's not **super** helpful")
         with push_config(c, user=M.User.query.get(username='test-user')):
@@ -116,7 +119,8 @@ def count_page(test, url, verbose=False, debug_html=False):
         resp = test.app.get(url, extra_environ=dict(username='*anonymous'))
         print url, resp.status
         if debug_html:
-            debug_filename = 'call-{}.html'.format(''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(10)]))
+            debug_filename = 'call-{}.html'.format(''.join([random.choice(string.ascii_letters + string.digits)
+                                                   for n in xrange(10)]))
             with open(debug_filename, 'w') as out:
                 out.write(resp.body)
             print debug_filename
@@ -127,7 +131,8 @@ def count_page(test, url, verbose=False, debug_html=False):
 
     assert len(stats.records) == 1
     timings = json.loads(stats.records[0].getMessage())
-    del timings['call_counts']['total']  # total is always 1, which is misleading
+    # total is always 1, which is misleading
+    del timings['call_counts']['total']
     return timings['call_counts']
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/perf/md_perf.py
----------------------------------------------------------------------
diff --git a/scripts/perf/md_perf.py b/scripts/perf/md_perf.py
index 708a296..e40cb66 100644
--- a/scripts/perf/md_perf.py
+++ b/scripts/perf/md_perf.py
@@ -69,7 +69,7 @@ DUMMYTEXT = None
 def get_artifact():
     from forgeblog import model as BM
     return BM.BlogPost.query.get(
-            slug='2013/09/watch-breaking-bad-season-5-episode-16-felina-live-streaming')
+        slug='2013/09/watch-breaking-bad-season-5-episode-16-felina-live-streaming')
 
 
 def main(opts):
@@ -81,7 +81,7 @@ def main(opts):
         'markdown_safe': lambda: markdown.Markdown(safe_mode=True),
         'markdown_escape': lambda: markdown.Markdown(safe_mode='escape'),
         'forge': lambda: g.markdown,
-        }
+    }
     md = converters[opts.converter]()
     artifact = get_artifact()
     return render(artifact, md, opts)
@@ -113,11 +113,16 @@ def render(artifact, md, opts):
 def parse_options():
     parser = argparse.ArgumentParser()
     parser.add_argument('--converter', default='markdown')
-    parser.add_argument('--profile', action='store_true', help='Run profiler and output timings')
-    parser.add_argument('--output', action='store_true', help='Print result of markdown conversion')
-    parser.add_argument('--re2', action='store_true', help='Run with re2 instead of re')
-    parser.add_argument('--compare', action='store_true', help='Run with re and re2, and compare results')
-    parser.add_argument('-n', '--n', nargs='+', type=int, help='Only convert nth post(s) in thread')
+    parser.add_argument('--profile', action='store_true',
+                        help='Run profiler and output timings')
+    parser.add_argument('--output', action='store_true',
+                        help='Print result of markdown conversion')
+    parser.add_argument('--re2', action='store_true',
+                        help='Run with re2 instead of re')
+    parser.add_argument('--compare', action='store_true',
+                        help='Run with re and re2, and compare results')
+    parser.add_argument('-n', '--n', nargs='+', type=int,
+                        help='Only convert nth post(s) in thread')
     return parser.parse_args()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/perf/sstress.py
----------------------------------------------------------------------
diff --git a/scripts/perf/sstress.py b/scripts/perf/sstress.py
index 5e53a52..3f1a1b1 100644
--- a/scripts/perf/sstress.py
+++ b/scripts/perf/sstress.py
@@ -30,11 +30,12 @@ N = 1000
 TOADDR = 'nobody@localhost'
 SERVER = 'localhost'
 PORT = 8825
-SIZE = 10 * (2**10)
+SIZE = 10 * (2 ** 10)
 EMAIL_TEXT = 'X' * SIZE
 
+
 def main():
-    threads = [ threading.Thread(target=stress) for x in xrange(C) ]
+    threads = [threading.Thread(target=stress) for x in xrange(C)]
     begin = time.time()
     for t in threads:
         t.start()
@@ -43,11 +44,12 @@ def main():
     end = time.time()
     elapsed = end - begin
     print '%d requests completed in %f seconds' % (N, elapsed)
-    print '%f requests/second' % (N/elapsed)
+    print '%f requests/second' % (N / elapsed)
+
 
 def stress():
     server = smtplib.SMTP(SERVER, PORT)
-    for x in xrange(N/C):
+    for x in xrange(N / C):
         server.sendmail('sstress@localhost', TOADDR, EMAIL_TEXT)
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/perf/test_git_lcd.py
----------------------------------------------------------------------
diff --git a/scripts/perf/test_git_lcd.py b/scripts/perf/test_git_lcd.py
index aca0288..8d75e54 100644
--- a/scripts/perf/test_git_lcd.py
+++ b/scripts/perf/test_git_lcd.py
@@ -41,7 +41,7 @@ def main(repo_dir, sub_dir='', commit=None):
     git = GitImplementation(Mock(full_fs_path=repo_dir))
     commit = Mock(_id=commit or git.head)
     paths = glob(os.path.join(repo_dir, sub_dir, '*'))
-    paths = [path.replace(repo_dir+'/', '', 1) for path in paths]
+    paths = [path.replace(repo_dir + '/', '', 1) for path in paths]
     print "Timing LCDs for %s at %s" % (paths, commit._id)
     with benchmark() as timer:
         result = git.last_commit_ids(commit, paths)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/prep-scm-sandbox.py
----------------------------------------------------------------------
diff --git a/scripts/prep-scm-sandbox.py b/scripts/prep-scm-sandbox.py
index e60d7c6..414280f 100644
--- a/scripts/prep-scm-sandbox.py
+++ b/scripts/prep-scm-sandbox.py
@@ -18,16 +18,17 @@
 import os
 import string
 
-HOME=os.environ['HOME']
+HOME = os.environ['HOME']
 
-USERS=['user%.2d' % i for i in range(1, 21) ]
+USERS = ['user%.2d' % i for i in range(1, 21)]
 USERS += [
     'admin1', 'admin2',
     'dovethunder', 'dovetail', 'dovestream', 'dovetree', 'dovespangle',
-    'dovemeade', 'dovestar', 'dovebuyer', 'dovesomething', 'dovesweet', 'dovewood' ]
+    'dovemeade', 'dovestar', 'dovebuyer', 'dovesomething', 'dovesweet', 'dovewood']
 SSH_CONFIG = '%s/.ssh/config' % HOME
 LDIF_FILE = '%s/users.ldif' % HOME
-KEYFILE='%s/.ssh/allura_rsa' % HOME
+KEYFILE = '%s/.ssh/allura_rsa' % HOME
+
 
 def main():
 
@@ -40,8 +41,8 @@ def main():
             sb_host=sb_host,
             sb=sb,
             veid='%d0%.2d' % (sb_host, sb))
-        for sb_host in 5,6,7,9
-        for sb in range(99) ]
+        for sb_host in 5, 6, 7, 9
+        for sb in range(99)]
     new_lines = '\n'.join(new_lines)
     found_star = False
     with open(SSH_CONFIG, 'w') as fp:
@@ -62,9 +63,10 @@ def main():
                 user=user, pubkey=pubkey)
 
     # Update LDAP
-    assert 0 == os.system('/usr/local/sbin/ldaptool modify -v -f %s' % LDIF_FILE)
+    assert 0 == os.system('/usr/local/sbin/ldaptool modify -v -f %s' %
+                          LDIF_FILE)
 
-SSH_TMPL=string.Template('''
+SSH_TMPL = string.Template('''
 Host hg*-$veid hg*-${veid}.sb.sf.net
   Hostname 10.58.${sb_host}.${sb}
   Port 17
@@ -81,7 +83,7 @@ Host git*-$veid git*-${veid}.sb.sf.net
   IdentityFile ~/.ssh/allura_rsa
 ''')
 
-LDIF_TMPL=string.Template('''
+LDIF_TMPL = string.Template('''
 dn: cn=$user,ou=users,dc=sf,dc=net
 changetype: modify
 add: sshPublicKey

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/project-import.py
----------------------------------------------------------------------
diff --git a/scripts/project-import.py b/scripts/project-import.py
index f4ba62f..7e6257a 100644
--- a/scripts/project-import.py
+++ b/scripts/project-import.py
@@ -35,7 +35,9 @@ from allura.lib import helpers as h
 
 log = logging.getLogger(__name__)
 
+
 class TroveCategory():
+
     def __init__(self, root_type=''):
         self.root_type = root_type
 
@@ -47,29 +49,35 @@ class TroveCategory():
             cat = M.TroveCategory.query.get(fullname=cstruct)
         if not cat:
             raise col.Invalid(node,
-                    '"%s" is not a valid trove category.' % cstruct)
+                              '"%s" is not a valid trove category.' % cstruct)
         if not cat.fullpath.startswith(self.root_type):
             raise col.Invalid(node,
-                    '"%s" is not a valid "%s" trove category.' %
-                    (cstruct, self.root_type))
+                              '"%s" is not a valid "%s" trove category.' %
+                              (cstruct, self.root_type))
         return cat
 
+
 class User():
+
     def deserialize(self, node, cstruct):
         if cstruct is col.null:
             return col.null
         user = M.User.by_username(cstruct)
         if not user:
             raise col.Invalid(node,
-                    'Invalid username "%s".' % cstruct)
+                              'Invalid username "%s".' % cstruct)
         return user
 
+
 class ProjectName(object):
+
     def __init__(self, name, shortname):
         self.name = name
         self.shortname = shortname
 
+
 class ProjectNameType():
+
     def deserialize(self, node, cstruct):
         if cstruct is col.null:
             return col.null
@@ -78,18 +86,22 @@ class ProjectNameType():
         shortname = re.sub(" ", "-", shortname)
         return ProjectName(name, shortname)
 
+
 class ProjectShortnameType():
+
     def deserialize(self, node, cstruct):
         if cstruct is col.null:
             return col.null
         col.Length(min=3, max=15)(node, cstruct)
         col.Regex(r'^[A-z][-A-z0-9]{2,}$',
-            msg='Project shortname must begin with a letter, can '
-                'contain letters, numbers, and dashes, and must be '
-                '3-15 characters in length.')(node, cstruct)
+                  msg='Project shortname must begin with a letter, can '
+                  'contain letters, numbers, and dashes, and must be '
+                  '3-15 characters in length.')(node, cstruct)
         return cstruct.lower()
 
+
 class Award():
+
     def __init__(self, nbhd):
         self.nbhd = nbhd
 
@@ -97,46 +109,57 @@ class Award():
         if cstruct is col.null:
             return col.null
         award = M.Award.query.find(dict(short=cstruct,
-            created_by_neighborhood_id=self.nbhd._id)).first()
+                                        created_by_neighborhood_id=self.nbhd._id)).first()
         if not award:
             # try to look up the award by _id
             award = M.Award.query.find(dict(_id=bson.ObjectId(cstruct),
-                created_by_neighborhood_id=self.nbhd._id)).first()
+                                            created_by_neighborhood_id=self.nbhd._id)).first()
         if not award:
             raise col.Invalid(node,
-                    'Invalid award "%s".' % cstruct)
+                              'Invalid award "%s".' % cstruct)
         return award
 
+
 class TroveTopics(col.SequenceSchema):
     trove_topics = col.SchemaNode(TroveCategory("Topic"))
 
+
 class TroveLicenses(col.SequenceSchema):
     trove_license = col.SchemaNode(TroveCategory("License"))
 
+
 class TroveDatabases(col.SequenceSchema):
     trove_databases = col.SchemaNode(TroveCategory("Database Environment"))
 
+
 class TroveStatuses(col.SequenceSchema):
     trove_statuses = col.SchemaNode(TroveCategory("Development Status"))
 
+
 class TroveAudiences(col.SequenceSchema):
     trove_audience = col.SchemaNode(TroveCategory("Intended Audience"))
 
+
 class TroveOSes(col.SequenceSchema):
     trove_oses = col.SchemaNode(TroveCategory("Operating System"))
 
+
 class TroveLanguages(col.SequenceSchema):
     trove_languages = col.SchemaNode(TroveCategory("Programming Language"))
 
+
 class TroveTranslations(col.SequenceSchema):
     trove_translations = col.SchemaNode(TroveCategory("Translations"))
 
+
 class TroveUIs(col.SequenceSchema):
     trove_uis = col.SchemaNode(TroveCategory("User Interface"))
 
+
 class Labels(col.SequenceSchema):
     label = col.SchemaNode(col.Str())
 
+
 class Project(col.MappingSchema):
     name = col.SchemaNode(ProjectNameType())
     shortname = col.SchemaNode(ProjectShortnameType(), missing=None)
@@ -147,7 +170,8 @@ class Project(col.MappingSchema):
     labels = Labels(missing=[])
     external_homepage = col.SchemaNode(col.Str(), missing='')
     trove_root_databases = TroveDatabases(missing=None)
-    trove_developmentstatuses = TroveStatuses(validator=col.Length(max=6), missing=None)
+    trove_developmentstatuses = TroveStatuses(
+        validator=col.Length(max=6), missing=None)
     trove_audiences = TroveAudiences(validator=col.Length(max=6), missing=None)
     trove_licenses = TroveLicenses(validator=col.Length(max=6), missing=None)
     trove_oses = TroveOSes(missing=None)
@@ -156,6 +180,7 @@ class Project(col.MappingSchema):
     trove_natlanguages = TroveTranslations(missing=None)
     trove_environments = TroveUIs(missing=None)
 
+
 def valid_shortname(project):
     if project.shortname:
         # already validated in ProjectShortnameType validator
@@ -164,25 +189,31 @@ def valid_shortname(project):
         return True
     else:
         return 'Project shortname "%s" must be between 3 and 15 characters' \
-                % project.name.shortname
+            % project.name.shortname
+
 
 class Projects(col.SequenceSchema):
     project = Project(validator=col.Function(valid_shortname))
 
+
 class Object(object):
+
     def __init__(self, d):
         self.__dict__.update(d)
 
+
 def trove_ids(orig, new_):
-    if new_ is None: return orig
+    if new_ is None:
+        return orig
     return list(set(t._id for t in list(new_)))
 
+
 def create_project(p, nbhd, user, options):
     worker_name = multiprocessing.current_process().name
     M.session.artifact_orm_session._get().skip_mod_date = True
     shortname = p.shortname or p.name.shortname
     project = M.Project.query.get(shortname=shortname,
-            neighborhood_id=nbhd._id)
+                                  neighborhood_id=nbhd._id)
     project_template = nbhd.get_project_template()
 
     if project and not (options.update and p.shortname):
@@ -196,8 +227,8 @@ def create_project(p, nbhd, user, options):
         try:
                 project = nbhd.register_project(shortname,
                                                 p.admin,
-                                            project_name=p.name.name,
-                                            private_project=p.private)
+                                                project_name=p.name.name,
+                                                private_project=p.private)
         except Exception, e:
             log.error('[%s] %s' % (worker_name, str(e)))
             return 0
@@ -217,9 +248,9 @@ def create_project(p, nbhd, user, options):
                     tool_options[k] = string.Template(v).safe_substitute(
                         project.root_project.__dict__.get('root_project', {}))
             project.install_app(tool,
-                    mount_label=tool_config['label'],
-                    mount_point=tool_config['mount_point'],
-                    **tool_options)
+                                mount_label=tool_config['label'],
+                                mount_point=tool_config['mount_point'],
+                                **tool_options)
 
     project.summary = p.summary
     project.short_description = p.description
@@ -228,21 +259,27 @@ def create_project(p, nbhd, user, options):
     # These properties may have been populated by nbhd template defaults in
     # register_project(). Overwrite if we have data, otherwise keep defaults.
     project.labels = p.labels or project.labels
-    project.trove_root_database = trove_ids(project.trove_root_database, p.trove_root_databases)
-    project.trove_developmentstatus = trove_ids(project.trove_developmentstatus, p.trove_developmentstatuses)
-    project.trove_audience = trove_ids(project.trove_audience, p.trove_audiences)
+    project.trove_root_database = trove_ids(
+        project.trove_root_database, p.trove_root_databases)
+    project.trove_developmentstatus = trove_ids(
+        project.trove_developmentstatus, p.trove_developmentstatuses)
+    project.trove_audience = trove_ids(
+        project.trove_audience, p.trove_audiences)
     project.trove_license = trove_ids(project.trove_license, p.trove_licenses)
     project.trove_os = trove_ids(project.trove_os, p.trove_oses)
-    project.trove_language = trove_ids(project.trove_language, p.trove_languages)
+    project.trove_language = trove_ids(
+        project.trove_language, p.trove_languages)
     project.trove_topic = trove_ids(project.trove_topic, p.trove_topics)
-    project.trove_natlanguage = trove_ids(project.trove_natlanguage, p.trove_natlanguages)
-    project.trove_environment = trove_ids(project.trove_environment, p.trove_environments)
+    project.trove_natlanguage = trove_ids(
+        project.trove_natlanguage, p.trove_natlanguages)
+    project.trove_environment = trove_ids(
+        project.trove_environment, p.trove_environments)
 
     for a in p.awards:
         M.AwardGrant(app_config_id=bson.ObjectId(),
-                award_id=a._id,
-                granted_to_project_id=project._id,
-                granted_by_neighborhood_id=nbhd._id)
+                     award_id=a._id,
+                     granted_to_project_id=project._id,
+                     granted_by_neighborhood_id=nbhd._id)
     project.notifications_disabled = False
     with h.push_config(c, project=project, user=user):
         ThreadLocalORMSession.flush_all()
@@ -250,12 +287,14 @@ def create_project(p, nbhd, user, options):
     session(project).clear()
     return 0
 
+
 def create_projects(projects, nbhd, user, options):
     for p in projects:
         r = create_project(Object(p), nbhd, user, options)
         if r != 0:
             sys.exit(r)
 
+
 def main(options):
     log.addHandler(logging.StreamHandler(sys.stdout))
     log.setLevel(getattr(logging, options.log_level.upper()))
@@ -264,7 +303,8 @@ def main(options):
     nbhd = M.Neighborhood.query.get(name=options.neighborhood)
     if not nbhd:
         return 'Invalid neighborhood "%s".' % options.neighborhood
-    admin = M.User.query.get(username=config.get('sfx.api.siteadmin', 'sf-robot'))
+    admin = M.User.query.get(
+        username=config.get('sfx.api.siteadmin', 'sf-robot'))
 
     data = json.load(open(options.file, 'r'))
     project = Project()
@@ -279,36 +319,39 @@ def main(options):
     jobs = []
     for i in range(options.nprocs):
         p = multiprocessing.Process(target=create_projects,
-                args=(chunks[i], nbhd, admin, options), name='worker-' + str(i+1))
+                                    args=(chunks[i], nbhd, admin, options), name='worker-' + str(i + 1))
         jobs.append(p)
         p.start()
 
     for j in jobs:
         j.join()
-        if j.exitcode <> 0: return j.exitcode
+        if j.exitcode <> 0:
+            return j.exitcode
     return 0
 
+
 def parse_options():
     import argparse
     parser = argparse.ArgumentParser(
-            description='Import Allura project(s) from JSON file')
+        description='Import Allura project(s) from JSON file')
     parser.add_argument('file', metavar='JSON_FILE', type=str,
-            help='Path to JSON file containing project data.')
+                        help='Path to JSON file containing project data.')
     parser.add_argument('neighborhood', metavar='NEIGHBORHOOD', type=str,
-            help='Destination Neighborhood shortname.')
+                        help='Destination Neighborhood shortname.')
     parser.add_argument('--log', dest='log_level', default='INFO',
-            help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
+                        help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
     parser.add_argument('--update', dest='update', default=False,
-            action='store_true',
-            help='Update existing projects. Without this option, existing '
-                 'projects will be skipped.')
+                        action='store_true',
+                        help='Update existing projects. Without this option, existing '
+                        'projects will be skipped.')
     parser.add_argument('--ensure-tools', dest='ensure_tools', default=False,
-            action='store_true',
-            help='Check nbhd project template for default tools, and install '
-                 'them on the project(s) if not already installed.')
-    parser.add_argument('--nprocs', '-n', action='store', dest='nprocs', type=int,
-            help='Number of processes to divide the work among.',
-            default=multiprocessing.cpu_count())
+                        action='store_true',
+                        help='Check nbhd project template for default tools, and install '
+                        'them on the project(s) if not already installed.')
+    parser.add_argument(
+        '--nprocs', '-n', action='store', dest='nprocs', type=int,
+        help='Number of processes to divide the work among.',
+        default=multiprocessing.cpu_count())
     return parser.parse_args()
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/publicize-neighborhood.py
----------------------------------------------------------------------
diff --git a/scripts/publicize-neighborhood.py b/scripts/publicize-neighborhood.py
index 68ed679..c32d384 100644
--- a/scripts/publicize-neighborhood.py
+++ b/scripts/publicize-neighborhood.py
@@ -27,6 +27,7 @@ from allura.lib import utils
 
 log = logging.getLogger(__name__)
 
+
 def main(options):
     log.addHandler(logging.StreamHandler(sys.stdout))
     log.setLevel(getattr(logging, options.log_level.upper()))
@@ -34,17 +35,19 @@ def main(options):
     nbhd = M.Neighborhood.query.get(name=options.neighborhood)
     if not nbhd:
         return 'Invalid neighborhood "%s".' % options.neighborhood
-    admin_role = M.ProjectRole.by_name('Admin', project=nbhd.neighborhood_project)
-    nbhd_admin = admin_role.users_with_role(project=nbhd.neighborhood_project)[0].user
+    admin_role = M.ProjectRole.by_name(
+        'Admin', project=nbhd.neighborhood_project)
+    nbhd_admin = admin_role.users_with_role(
+        project=nbhd.neighborhood_project)[0].user
     log.info('Making updates as neighborhood admin "%s"' % nbhd_admin.username)
 
     q = {'neighborhood_id': nbhd._id,
-            'is_nbhd_project': False, 'deleted':False}
+         'is_nbhd_project': False, 'deleted': False}
     private_count = public_count = 0
     for projects in utils.chunked_find(M.Project, q):
         for p in projects:
             role_anon = M.ProjectRole.upsert(name='*anonymous',
-                    project_id=p.root_project._id)
+                                             project_id=p.root_project._id)
             if M.ACE.allow(role_anon._id, 'read') not in p.acl:
                 if options.test:
                     log.info('Would be made public: "%s"' % p.shortname)
@@ -66,17 +69,18 @@ def main(options):
         log.info('Made public: %s' % private_count)
     return 0
 
+
 def parse_options():
     import argparse
     parser = argparse.ArgumentParser(
-            description='Make all projects in a neighborhood public.')
+        description='Make all projects in a neighborhood public.')
     parser.add_argument('neighborhood', metavar='NEIGHBORHOOD', type=str,
-            help='Neighborhood name.')
+                        help='Neighborhood name.')
     parser.add_argument('--test', dest='test', default=False,
-            action='store_true',
-            help='Run in test mode (no updates will be applied).')
+                        action='store_true',
+                        help='Run in test mode (no updates will be applied).')
     parser.add_argument('--log', dest='log_level', default='INFO',
-            help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
+                        help='Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).')
     return parser.parse_args()
 
 if __name__ == '__main__':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/recover-user-databases.py
----------------------------------------------------------------------
diff --git a/scripts/recover-user-databases.py b/scripts/recover-user-databases.py
index f55f13b..5abf6f3 100644
--- a/scripts/recover-user-databases.py
+++ b/scripts/recover-user-databases.py
@@ -30,12 +30,15 @@ IGNORED_COLLECTIONS = [
     'config',
     'system.indexes']
 
+
 def main():
     conn = M.session.main_doc_session.bind.conn
     n = M.Neighborhood.query.get(url_prefix='/u/')
     for p in M.Project.query.find(dict(neighborhood_id=n._id)):
-        if not p.database_configured: continue
-        if not p.shortname.startswith('u/'): continue
+        if not p.database_configured:
+            continue
+        if not p.shortname.startswith('u/'):
+            continue
         log.info('Checking to see if %s is configured...', p.database)
         db = conn[p.database]
         if is_unconfigured(db):
@@ -49,10 +52,12 @@ def main():
         else:
             log.info('... it is.')
 
+
 def is_unconfigured(db):
     # Check for data in collections other than those we pre-fill with data
     for collection_name in db.collection_names():
-        if collection_name in IGNORED_COLLECTIONS: continue
+        if collection_name in IGNORED_COLLECTIONS:
+            continue
         collection = db[collection_name]
         if collection.count():
             log.info('...%s has data', collection_name)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/rethumb.py
----------------------------------------------------------------------
diff --git a/scripts/rethumb.py b/scripts/rethumb.py
index 6ee190a..cbda4d7 100644
--- a/scripts/rethumb.py
+++ b/scripts/rethumb.py
@@ -32,8 +32,8 @@ import forgetracker.model
 
 
 class RethumbCommand(base.Command):
-    min_args=1
-    max_args=2
+    min_args = 1
+    max_args = 2
     usage = '<ini file> [<project name>]'
     summary = 'Recreate thumbnails for attachment images'
     parser = base.Command.standard_parser(verbose=True)
@@ -44,23 +44,27 @@ class RethumbCommand(base.Command):
 
     def create_thumbnail(self, attachment, att_cls):
         if attachment.is_image():
-            base.log.info("Processing image attachment '%s'", attachment.filename)
+            base.log.info("Processing image attachment '%s'",
+                          attachment.filename)
             doc = state(attachment).document.deinstrumented_clone()
             del doc['_id']
             del doc['file_id']
             doc['type'] = 'thumbnail'
             count = att_cls.query.find(doc).count()
             if count == 1:
-                base.log.info("Thumbnail already exists for '%s' - skipping", attachment.filename)
+                base.log.info(
+                    "Thumbnail already exists for '%s' - skipping", attachment.filename)
                 return
             elif count > 1:
-                base.log.warning("There are %d thumbnails for '%s' - consider clearing them with --force", count, attachment.filename)
+                base.log.warning(
+                    "There are %d thumbnails for '%s' - consider clearing them with --force", count, attachment.filename)
                 return
 
             image = PIL.Image.open(attachment.rfile())
             del doc['content_type']
             del doc['filename']
-            att_cls.save_thumbnail(attachment.filename, image, attachment.content_type, att_cls.thumbnail_size, doc, square=True)
+            att_cls.save_thumbnail(attachment.filename, image,
+                                   attachment.content_type, att_cls.thumbnail_size, doc, square=True)
             base.log.info("Created thumbnail for '%s'", attachment.filename)
             self.created_thumbs += 1
 
@@ -97,20 +101,27 @@ class RethumbCommand(base.Command):
             c.project = p
 
             if self.options.force:
-                existing_thumbs += M.BaseAttachment.query.find({'type': 'thumbnail'}).count()
-                base.log.info('Removing %d current thumbnails (per --force)', existing_thumbs)
+                existing_thumbs += M.BaseAttachment.query.find({'type': 'thumbnail'}
+                                                               ).count()
+                base.log.info(
+                    'Removing %d current thumbnails (per --force)', existing_thumbs)
                 M.BaseAttachment.query.remove({'type': 'thumbnail'})
 
             # ProjectFile's live in main collection (unlike File's)
             # M.ProjectFile.query.find({'app_config_id': None, 'type': 'attachment'}).all()
 
             for app in p.app_configs:
-                base.log.info("Processing application '%s' mounted at '%s' of type '%s'", app.options['mount_label'], app.options['mount_point'], app.tool_name)
+                base.log.info(
+                    "Processing application '%s' mounted at '%s' of type '%s'",
+                    app.options['mount_label'], app.options['mount_point'], app.tool_name)
 
-                # Any application may contain DiscussionAttachment's, it has discussion_id field
-                self.process_att_of_type(M.DiscussionAttachment, {'app_config_id': app._id, 'discussion_id': {'$ne': None}})
+                # Any application may contain DiscussionAttachment's, it has
+                # discussion_id field
+                self.process_att_of_type(
+                    M.DiscussionAttachment, {'app_config_id': app._id, 'discussion_id': {'$ne': None}})
 
-                # Otherwise, we'll take attachment classes belonging to app's package
+                # Otherwise, we'll take attachment classes belonging to app's
+                # package
                 ep = iter_entry_points('allura', app.tool_name).next()
                 app_package = ep.module_name.split('.', 1)[0]
                 if app_package == 'allura':
@@ -119,14 +130,17 @@ class RethumbCommand(base.Command):
 
                 classes = package_model_map.get(app_package, [])
                 for cls in classes:
-                    self.process_att_of_type(cls, {'app_config_id': app._id, 'discussion_id': None})
+                    self.process_att_of_type(
+                        cls, {'app_config_id': app._id, 'discussion_id': None})
 
                 base.log.info('-' * 10)
 
         base.log.info('Recreated %d thumbs', self.created_thumbs)
         if self.options.force:
             if existing_thumbs != self.created_thumbs:
-                base.log.warning('There were %d thumbs before --force operation started, but %d recreated', existing_thumbs, self.created_thumbs)
+                base.log.warning(
+                    'There were %d thumbs before --force operation started, but %d recreated',
+                    existing_thumbs, self.created_thumbs)
 
         ThreadLocalORMSession.flush_all()
 


[03/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py b/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
index 65630aa..afc6f41 100644
--- a/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
+++ b/ForgeWiki/forgewiki/scripts/wiki_from_trac/wiki_from_trac.py
@@ -30,25 +30,38 @@ log = logging.getLogger(__name__)
 
 
 class WikiFromTrac(ScriptTask):
+
     """Import Trac Wiki to Allura Wiki"""
     @classmethod
     def parser(cls):
         parser = argparse.ArgumentParser(description='Import wiki from'
-            'Trac to allura wiki')
+                                         'Trac to allura wiki')
 
         parser.add_argument('trac_url', type=str, help='Trac URL')
-        parser.add_argument('-a', '--api-ticket', dest='api_key', help='API ticket')
-        parser.add_argument('-s', '--secret-key', dest='secret_key', help='Secret key')
-        parser.add_argument('-p', '--project', dest='project', help='Project to import to')
-        parser.add_argument('-t', '--tracker', dest='tracker', help='Tracker to import to')
-        parser.add_argument('-f', '--forum', dest='forum', help='Forum tool to import to')
-        parser.add_argument('-w', '--wiki', dest='wiki', help='Wiki tool to import to')
-        parser.add_argument('-u', '--base-url', dest='base_url', default='https://sourceforge.net', help='Base Allura (%(default)s for default)')
-        parser.add_argument('-o', dest='import_opts', default=[], action='append', help='Specify import option(s)', metavar='opt=val')
-        parser.add_argument('--user-map', dest='user_map_file', help='Map original users to SF.net users', metavar='JSON_FILE')
-        parser.add_argument('--validate', dest='validate', action='store_true', help='Validate import data')
-        parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose operation')
-        parser.add_argument('-c', '--continue', dest='cont', action='store_true', help='Continue import into existing tracker')
+        parser.add_argument('-a', '--api-ticket',
+                            dest='api_key', help='API ticket')
+        parser.add_argument('-s', '--secret-key',
+                            dest='secret_key', help='Secret key')
+        parser.add_argument('-p', '--project', dest='project',
+                            help='Project to import to')
+        parser.add_argument('-t', '--tracker', dest='tracker',
+                            help='Tracker to import to')
+        parser.add_argument('-f', '--forum', dest='forum',
+                            help='Forum tool to import to')
+        parser.add_argument('-w', '--wiki', dest='wiki',
+                            help='Wiki tool to import to')
+        parser.add_argument('-u', '--base-url', dest='base_url',
+                            default='https://sourceforge.net', help='Base Allura (%(default)s for default)')
+        parser.add_argument('-o', dest='import_opts',
+                            default=[], action='append', help='Specify import option(s)', metavar='opt=val')
+        parser.add_argument('--user-map', dest='user_map_file',
+                            help='Map original users to SF.net users', metavar='JSON_FILE')
+        parser.add_argument('--validate', dest='validate',
+                            action='store_true', help='Validate import data')
+        parser.add_argument('-v', '--verbose', dest='verbose',
+                            action='store_true', help='Verbose operation')
+        parser.add_argument('-c', '--continue', dest='cont',
+                            action='store_true', help='Continue import into existing tracker')
         parser.add_argument('-C', '--converter', dest='converter',
                             default='html2text',
                             help='Converter to use on wiki text. '

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/functional/test_rest.py b/ForgeWiki/forgewiki/tests/functional/test_rest.py
index 2b5cfd5..560503e 100644
--- a/ForgeWiki/forgewiki/tests/functional/test_rest.py
+++ b/ForgeWiki/forgewiki/tests/functional/test_rest.py
@@ -42,13 +42,18 @@ class TestWikiApi(TestRestApiBase):
         r = self.app.get('/p/test/wiki/Home/')
         discussion_url = r.html.findAll('form')[2]['action'][:-4]
         content = file(__file__).read()
-        self.app.post('/wiki/Home/attach', upload_files=[('file_info', 'test_root.py', content)])
+        self.app.post('/wiki/Home/attach',
+                      upload_files=[('file_info', 'test_root.py', content)])
         r = self.app.get('/rest/p/test/wiki/Home/')
         r = json.loads(r.body)
-        assert_equal(r['attachments'][0]['url'], 'http://localhost/p/test/wiki/Home/attachment/test_root.py')
-        assert_equal(r['discussion_thread_url'], 'http://localhost/rest%s' % discussion_url)
-        assert_equal(r['discussion_thread']['_id'], discussion_url.split('/')[-2])
-        self.app.post('/wiki/Home/attach', upload_files=[('file_info', '__init__.py', content),])
+        assert_equal(r['attachments'][0]['url'],
+                     'http://localhost/p/test/wiki/Home/attachment/test_root.py')
+        assert_equal(r['discussion_thread_url'], 'http://localhost/rest%s' %
+                     discussion_url)
+        assert_equal(r['discussion_thread']['_id'],
+                     discussion_url.split('/')[-2])
+        self.app.post('/wiki/Home/attach',
+                      upload_files=[('file_info', '__init__.py', content), ])
         r = self.app.get('/rest/p/test/wiki/Home/')
         r = json.loads(r.body)
         assert_equal(len(r['attachments']), 2)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/functional/test_root.py b/ForgeWiki/forgewiki/tests/functional/test_root.py
index 0bd5d78..5b05aad 100644
--- a/ForgeWiki/forgewiki/tests/functional/test_root.py
+++ b/ForgeWiki/forgewiki/tests/functional/test_root.py
@@ -42,7 +42,9 @@ from forgewiki import model
 # CommentController methods exposed:
 #     reply, delete
 
+
 class TestRootController(TestController):
+
     def setUp(self):
         super(TestRootController, self).setUp()
         self.setup_with_tools()
@@ -57,7 +59,7 @@ class TestRootController(TestController):
         assert 'Create Page' in r
         # No 'Create Page' button if user doesn't have 'create' perm
         r = self.app.get('/wiki/tést/',
-                extra_environ=dict(username='*anonymous')).follow()
+                         extra_environ=dict(username='*anonymous')).follow()
         assert 'Create Page' not in r
 
     def test_root_markdown_syntax(self):
@@ -88,8 +90,10 @@ class TestRootController(TestController):
     @patch('allura.lib.search.search')
     def test_search(self, search):
         r = self.app.get('/wiki/search?q=test')
-        assert_in('<a href="/wiki/search?q=test&amp;sort=score+asc" class="strong">relevance</a>', r)
-        assert_in('<a href="/wiki/search?q=test&amp;sort=mod_date_dt+desc" class="">date</a>', r)
+        assert_in(
+            '<a href="/wiki/search?q=test&amp;sort=score+asc" class="strong">relevance</a>', r)
+        assert_in(
+            '<a href="/wiki/search?q=test&amp;sort=mod_date_dt+desc" class="">date</a>', r)
 
         p = M.Project.query.get(shortname='test')
         r = self.app.get('/wiki/search?q=test&sort=score+asc')
@@ -102,7 +106,7 @@ class TestRootController(TestController):
             'qf': 'title^2 text',
             'pf': 'title^2 text',
             'fq': [
-                'project_id_s:%s'  % p._id,
+                'project_id_s:%s' % p._id,
                 'mount_point_s:wiki',
                 '-deleted_b:true',
                 'type_s:("WikiPage" OR "WikiPage Snapshot")',
@@ -115,8 +119,10 @@ class TestRootController(TestController):
         }
         search.assert_called_with('test', **solr_query)
 
-        r = self.app.get('/wiki/search?q=test&search_comments=on&history=on&sort=mod_date_dt+desc')
-        solr_query['fq'][3] = 'type_s:("WikiPage" OR "WikiPage Snapshot" OR "Post")'
+        r = self.app.get(
+            '/wiki/search?q=test&search_comments=on&history=on&sort=mod_date_dt+desc')
+        solr_query['fq'][
+            3] = 'type_s:("WikiPage" OR "WikiPage Snapshot" OR "Post")'
         solr_query['fq'].remove('is_history_b:False')
         solr_query['sort'] = 'mod_date_dt desc'
         search.assert_called_with('test', **solr_query)
@@ -169,10 +175,10 @@ class TestRootController(TestController):
         response = self.app.post(
             '/wiki/foo-bar/update',
             params={
-                'title':'foo/bar',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'}).follow()
+                'title': 'foo/bar',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'}).follow()
         assert 'foo-bar' in response
         assert 'foo-bar' in response.request.url
 
@@ -180,10 +186,10 @@ class TestRootController(TestController):
         r = self.app.post(
             '/wiki/page.dot/update',
             params={
-                'title':'page.dot',
-                'text':'text1',
-                'labels':'',
-                'viewable_by-0.id':'all'}).follow()
+                'title': 'page.dot',
+                'text': 'text1',
+                'labels': '',
+                'viewable_by-0.id': 'all'}).follow()
         assert 'page.dot' in r
 
     def test_subpage_attempt(self):
@@ -191,10 +197,10 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'text1',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'text1',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         assert '/p/test/wiki/Home/' in self.app.get('/wiki/tést/Home/')
         self.app.get('/wiki/tést/notthere/', status=404)
 
@@ -203,41 +209,42 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'text1',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'text1',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'text2',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'text2',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         response = self.app.get('/wiki/tést/history')
         assert 'tést' in response
         # two revisions are shown
         assert '2 by Test Admin' in response
         assert '1 by Test Admin' in response
         # you can revert to an old revison, but not the current one
-        assert response.html.find('a',{'href':'./revert?version=1'})
-        assert not response.html.find('a',{'href':'./revert?version=2'})
-        response = self.app.get('/wiki/tést/history', extra_environ=dict(username='*anonymous'))
+        assert response.html.find('a', {'href': './revert?version=1'})
+        assert not response.html.find('a', {'href': './revert?version=2'})
+        response = self.app.get('/wiki/tést/history',
+                                extra_environ=dict(username='*anonymous'))
         # two revisions are shown
         assert '2 by Test Admin' in response
         assert '1 by Test Admin' in response
         # you cannot revert to any revision
-        assert not response.html.find('a',{'href':'./revert?version=1'})
-        assert not response.html.find('a',{'href':'./revert?version=2'})
+        assert not response.html.find('a', {'href': './revert?version=1'})
+        assert not response.html.find('a', {'href': './revert?version=2'})
 
     def test_page_diff(self):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         self.app.post('/wiki/tést/revert', params=dict(version='1'))
         response = self.app.get('/wiki/tést/')
         assert 'Subscribe' in response
@@ -322,10 +329,10 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/TEST/update',
             params={
-                'title':'TEST',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'TEST',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         response = self.app.get('/wiki/TEST/raw')
         assert 'TEST' in response
 
@@ -333,10 +340,10 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': '',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         response = self.app.post('/wiki/tést/revert', params=dict(version='1'))
         assert '.' in response.json['location']
         response = self.app.get('/wiki/tést/')
@@ -347,10 +354,10 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         response = self.app.post('/wiki/tést/revert', params=dict(version='1'))
         assert '.' in response.json['location']
         response = self.app.get('/wiki/tést/')
@@ -362,10 +369,10 @@ class TestRootController(TestController):
         response = self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         assert_equal(spam_checker.check.call_args[0][0], 'sometext')
         assert 'tést' in response
 
@@ -374,18 +381,18 @@ class TestRootController(TestController):
         response = self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'yellow,green',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': 'yellow,green',
+                'viewable_by-0.id': 'all'})
         assert 'tést' in response
         response = self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'yellow',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': 'yellow',
+                'viewable_by-0.id': 'all'})
         assert 'tést' in response
 
     def test_page_label_count(self):
@@ -413,12 +420,13 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         content = file(__file__).read()
-        self.app.post('/wiki/tést/attach', upload_files=[('file_info', 'test_root.py', content)])
+        self.app.post('/wiki/tést/attach',
+                      upload_files=[('file_info', 'test_root.py', content)])
         response = self.app.get('/wiki/tést/')
         assert 'test_root.py' in response
 
@@ -426,12 +434,13 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         content = file(__file__).read()
-        self.app.post('/wiki/tést/attach', upload_files=[('file_info', 'test1.py', content),('file_info', 'test2.py', content)])
+        self.app.post('/wiki/tést/attach',
+                      upload_files=[('file_info', 'test1.py', content), ('file_info', 'test2.py', content)])
         response = self.app.get('/wiki/tést/')
         assert 'test1.py' in response
         assert 'test2.py' in response
@@ -440,10 +449,10 @@ class TestRootController(TestController):
         self.app.post(
             '/wiki/tést/update',
             params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+                'title': 'tést',
+                'text': 'sometext',
+                'labels': '',
+                'viewable_by-0.id': 'all'})
         file_name = 'test_root.py'
         file_data = file(__file__).read()
         upload = ('file_info', file_name, file_data)
@@ -454,12 +463,13 @@ class TestRootController(TestController):
 
     def test_new_image_attachment_content(self):
         self.app.post('/wiki/TEST/update', params={
-                'title':'TEST',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'TEST',
+            'text': 'sometext',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('file_info', file_name, file_data)
         self.app.post('/wiki/TEST/attach', upload_files=[upload])
@@ -468,18 +478,19 @@ class TestRootController(TestController):
         filename = page.attachments[0].filename
 
         uploaded = PIL.Image.open(file_path)
-        r = self.app.get('/wiki/TEST/attachment/'+filename)
+        r = self.app.get('/wiki/TEST/attachment/' + filename)
         downloaded = PIL.Image.open(StringIO.StringIO(r.body))
         assert uploaded.size == downloaded.size
-        r = self.app.get('/wiki/TEST/attachment/'+filename+'/thumb')
+        r = self.app.get('/wiki/TEST/attachment/' + filename + '/thumb')
 
         thumbnail = PIL.Image.open(StringIO.StringIO(r.body))
-        assert thumbnail.size == (255,255)
+        assert thumbnail.size == (255, 255)
 
         # Make sure thumbnail is absent
         r = self.app.get('/wiki/TEST/')
-        img_srcs = [ i['src'] for i in r.html.findAll('img') ]
-        assert ('/p/test/wiki/TEST/attachment/' + filename) not in img_srcs, img_srcs
+        img_srcs = [i['src'] for i in r.html.findAll('img')]
+        assert ('/p/test/wiki/TEST/attachment/' +
+                filename) not in img_srcs, img_srcs
 
     def test_sidebar_static_page(self):
         response = self.app.get('/wiki/tést/')
@@ -491,20 +502,20 @@ class TestRootController(TestController):
         assert 'Edit TEST' in response
         assert 'Related' not in response
         self.app.post('/wiki/TEST/update', params={
-                'title':'TEST',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'TEST',
+            'text': 'sometext',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         self.app.post('/wiki/aaa/update', params={
-                'title':'aaa',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'aaa',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         self.app.post('/wiki/bbb/update', params={
-                'title':'bbb',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'bbb',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
 
         h.set_context('test', 'wiki', neighborhood='Projects')
         a = model.Page.query.find(dict(title='aaa')).first()
@@ -523,56 +534,63 @@ class TestRootController(TestController):
 
     def test_show_discussion(self):
         self.app.post('/wiki/tést/update', params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'tést',
+            'text': 'sometext',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         wiki_page = self.app.get('/wiki/tést/')
-        assert wiki_page.html.find('div',{'id':'new_post_holder'})
-        options_admin = self.app.get('/admin/wiki/options', validate_chunk=True)
+        assert wiki_page.html.find('div', {'id': 'new_post_holder'})
+        options_admin = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert options_admin.form['show_discussion'].checked
         options_admin.form['show_discussion'].checked = False
         options_admin.form.submit()
-        options_admin2 = self.app.get('/admin/wiki/options', validate_chunk=True)
+        options_admin2 = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert not options_admin2.form['show_discussion'].checked
         wiki_page2 = self.app.get('/wiki/tést/')
-        assert not wiki_page2.html.find('div',{'id':'new_post_holder'})
+        assert not wiki_page2.html.find('div', {'id': 'new_post_holder'})
 
     def test_show_left_bar(self):
         self.app.post('/wiki/tést/update', params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'tést',
+            'text': 'sometext',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         wiki_page = self.app.get('/wiki/tést/')
-        assert wiki_page.html.find('ul',{'class':'sidebarmenu'})
-        options_admin = self.app.get('/admin/wiki/options', validate_chunk=True)
+        assert wiki_page.html.find('ul', {'class': 'sidebarmenu'})
+        options_admin = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert options_admin.form['show_left_bar'].checked
         options_admin.form['show_left_bar'].checked = False
         options_admin.form.submit()
-        options_admin2 = self.app.get('/admin/wiki/options', validate_chunk=True)
+        options_admin2 = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert not options_admin2.form['show_left_bar'].checked
-        wiki_page2 = self.app.get('/wiki/tést/',extra_environ=dict(username='*anonymous'))
-        assert not wiki_page2.html.find('ul',{'class':'sidebarmenu'})
+        wiki_page2 = self.app.get(
+            '/wiki/tést/', extra_environ=dict(username='*anonymous'))
+        assert not wiki_page2.html.find('ul', {'class': 'sidebarmenu'})
         wiki_page3 = self.app.get('/wiki/tést/')
-        assert not wiki_page3.html.find('ul',{'class':'sidebarmenu'})
+        assert not wiki_page3.html.find('ul', {'class': 'sidebarmenu'})
 
     def test_show_metadata(self):
         self.app.post('/wiki/tést/update', params={
-                'title':'tést',
-                'text':'sometext',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'tést',
+            'text': 'sometext',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         wiki_page = self.app.get('/wiki/tést/')
-        assert wiki_page.html.find('div',{'class':'editbox'})
-        options_admin = self.app.get('/admin/wiki/options', validate_chunk=True)
+        assert wiki_page.html.find('div', {'class': 'editbox'})
+        options_admin = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert options_admin.form['show_right_bar'].checked
         options_admin.form['show_right_bar'].checked = False
         options_admin.form.submit()
-        options_admin2 = self.app.get('/admin/wiki/options', validate_chunk=True)
+        options_admin2 = self.app.get(
+            '/admin/wiki/options', validate_chunk=True)
         assert not options_admin2.form['show_right_bar'].checked
         wiki_page2 = self.app.get('/wiki/tést/')
-        assert not wiki_page2.html.find('div',{'class':'editbox'})
+        assert not wiki_page2.html.find('div', {'class': 'editbox'})
 
     def test_edit_mount_label(self):
         r = self.app.get('/admin/wiki/edit_label', validate_chunk=True)
@@ -585,15 +603,15 @@ class TestRootController(TestController):
     def test_page_links_are_colored(self):
         self.app.get('/wiki/space%20page/')
         params = {
-            'title':'space page',
-            'text':'''There is a space in the title!''',
-            'labels':'',
-            'viewable_by-0.id':'all'}
+            'title': 'space page',
+            'text': '''There is a space in the title!''',
+            'labels': '',
+            'viewable_by-0.id': 'all'}
         self.app.post('/wiki/space%20page/update', params=params)
         self.app.get('/wiki/TEST/')
         params = {
-            'title':'TEST',
-            'text':'''
+            'title': 'TEST',
+            'text': '''
 * Here is a link to [this page](TEST)
 * Here is a link to [another page](Some page which does not exist)
 * Here is a link to [space page space](space page)
@@ -605,62 +623,65 @@ class TestRootController(TestController):
 * Here is a link to [another attach](TEST/attachment/attach.txt)
 * Here is a link to [attach](TEST/attachment/test_root.py)
 ''',
-            'labels':'',
-            'viewable_by-0.id':'all'}
+            'labels': '',
+            'viewable_by-0.id': 'all'}
         self.app.post('/wiki/TEST/update', params=params)
         content = file(__file__).read()
-        self.app.post('/wiki/TEST/attach', upload_files=[('file_info', 'test_root.py', content)])
+        self.app.post('/wiki/TEST/attach',
+                      upload_files=[('file_info', 'test_root.py', content)])
         r = self.app.get('/wiki/TEST/')
         found_links = 0
         for link in r.html.findAll('a'):
             if link.contents == ['this page']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['another page']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['space page space']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['space page escape']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['[TEST]']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['[Some page which does not exist]']:
                 assert 'notfound' in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['[space page]']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['[space%20page]']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['another attach']:
                 assert 'notfound' in link.get('class', '')
-                found_links +=1
+                found_links += 1
             if link.contents == ['attach']:
                 assert 'notfound' not in link.get('class', '')
-                found_links +=1
+                found_links += 1
         assert found_links == 10, 'Wrong number of links found'
 
     def test_home_rename(self):
-        assert 'The resource was found at http://localhost/p/test/wiki/Home/;' in self.app.get('/p/test/wiki/')
+        assert 'The resource was found at http://localhost/p/test/wiki/Home/;' in self.app.get(
+            '/p/test/wiki/')
         req = self.app.get('/p/test/wiki/Home/edit')
         req.forms[1]['title'].value = 'new_title'
         req.forms[1].submit()
-        assert 'The resource was found at http://localhost/p/test/wiki/new_title/;' in self.app.get('/p/test/wiki/')
+        assert 'The resource was found at http://localhost/p/test/wiki/new_title/;' in self.app.get(
+            '/p/test/wiki/')
 
     @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
     def test_cached_html(self):
         """Ensure cached html is not escaped."""
         html = '<p><span>My Html</span></p>'
         self.app.post('/wiki/cache/update', params={
-                'title': 'cache',
-                'text': html,
-                'labels': '',
-                'viewable_by-0.id': 'all'})
+            'title': 'cache',
+            'text': html,
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         # first request caches html, second serves from cache
         r = self.app.get('/wiki/cache/')
         r = self.app.get('/wiki/cache/')
@@ -668,15 +689,15 @@ class TestRootController(TestController):
 
     def test_page_delete(self):
         self.app.post('/wiki/aaa/update', params={
-                'title':'aaa',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'aaa',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         self.app.post('/wiki/bbb/update', params={
-                'title':'bbb',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'bbb',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         response = self.app.get('/wiki/browse_pages/')
         assert 'aaa' in response
         assert 'bbb' in response
@@ -688,21 +709,21 @@ class TestRootController(TestController):
     def test_mailto_links(self):
         self.app.get('/wiki/test_mailto/')
         params = {
-            'title':'test_mailto',
-            'text':'''
+            'title': 'test_mailto',
+            'text': '''
 * Automatic mailto #1 <da...@deathstar.org>
 * Automatic mailto #2 <ma...@tatooine.org>
 * Handmaid mailto <a href="mailto:yoda@jedi.org">Email Yoda</a>
 ''',
-            'labels':'',
-            'viewable_by-0.id':'all'}
+            'labels': '',
+            'viewable_by-0.id': 'all'}
         self.app.post('/wiki/test_mailto/update', params=params)
         r = self.app.get('/wiki/test_mailto/')
         mailto_links = 0
         for link in r.html.findAll('a'):
             if link.get('href') == 'mailto:darth.vader@deathstar.org':
                 assert 'notfound' not in link.get('class', '')
-                mailto_links +=1
+                mailto_links += 1
             if link.get('href') == 'mailto:luke.skywalker@tatooine.org':
                 assert 'notfound' not in link.get('class', '')
                 mailto_links += 1

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/tests/test_app.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/test_app.py b/ForgeWiki/forgewiki/tests/test_app.py
index 103def4..484a6ee 100644
--- a/ForgeWiki/forgewiki/tests/test_app.py
+++ b/ForgeWiki/forgewiki/tests/test_app.py
@@ -53,7 +53,8 @@ class TestBulkExport(object):
         page = WM.Page.upsert('Return of the Jedi')
         page.text = 'Star Wars Episode VI: Return of the Jedi'
         page.commit()
-        page = WM.Page.query.get(app_config_id=self.wiki.config._id, title='Home')
+        page = WM.Page.query.get(
+            app_config_id=self.wiki.config._id, title='Home')
         page.deleted = True
         page.commit()
 
@@ -75,9 +76,11 @@ class TestBulkExport(object):
         assert_equal(len(pages[0]['discussion_thread']['posts']), 2)
 
         assert_equal(pages[1]['title'], 'Return of the Jedi')
-        assert_equal(pages[1]['text'], 'Star Wars Episode VI: Return of the Jedi')
+        assert_equal(pages[1]['text'],
+                     'Star Wars Episode VI: Return of the Jedi')
         assert_equal(len(pages[1]['discussion_thread']['posts']), 0)
 
         assert_equal(pages[2]['title'], 'The Empire Strikes Back')
-        assert_equal(pages[2]['text'], 'Star Wars Episode V: The Empire Strikes Back')
+        assert_equal(pages[2]['text'],
+                     'Star Wars Episode V: The Empire Strikes Back')
         assert_equal(len(pages[2]['discussion_thread']['posts']), 0)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/test_wiki2markdown.py b/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
index ff5c662..f021742 100644
--- a/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
+++ b/ForgeWiki/forgewiki/tests/test_wiki2markdown.py
@@ -172,23 +172,23 @@ class TestMySQLExtractor(object):
         with open(os.path.join(self.options.dump_dir, 'pages/1/discussion.json'), 'r') as f:
             page = json.load(f)
         assert page == {
-                        'text': 'Talk for page Test 1.',
-                        'username': 'test-user',
-                        'timestamp': 1}
+            'text': 'Talk for page Test 1.',
+            'username': 'test-user',
+            'timestamp': 1}
 
         with open(os.path.join(self.options.dump_dir, 'pages/2/discussion.json'), 'r') as f:
             page = json.load(f)
         assert page == {
-                        'text': 'Talk for page Test 2.',
-                        'timestamp': 1,
-                        'username': 'test-user'}
+            'text': 'Talk for page Test 2.',
+            'timestamp': 1,
+            'username': 'test-user'}
 
         with open(os.path.join(self.options.dump_dir, 'pages/3/discussion.json'), 'r') as f:
             page = json.load(f)
         assert page == {
-                        'text': 'Talk for page Test 3.',
-                        'timestamp': 1,
-                        'username': 'test-user'}
+            'text': 'Talk for page Test 3.',
+            'timestamp': 1,
+            'username': 'test-user'}
 
 
 class TestMediawikiLoader(object):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/tests/test_wiki_roles.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/tests/test_wiki_roles.py b/ForgeWiki/forgewiki/tests/test_wiki_roles.py
index 8971ede..64b5f03 100644
--- a/ForgeWiki/forgewiki/tests/test_wiki_roles.py
+++ b/ForgeWiki/forgewiki/tests/test_wiki_roles.py
@@ -24,19 +24,23 @@ from allura import model as M
 from allura.lib import security
 from allura.tests import decorators as td
 
+
 def setUp():
     setup_basic_test()
     setup_with_tools()
 
+
 @td.with_wiki
 def setup_with_tools():
     setup_global_objects()
     g.set_app('wiki')
 
+
 def test_role_assignments():
     admin = M.User.by_username('test-admin')
     user = M.User.by_username('test-user')
     anon = M.User.anonymous()
+
     def check_access(perm):
         pred = security.has_access(c.app, perm)
         return pred(user=admin), pred(user=user), pred(user=anon)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/widgets/wiki.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/widgets/wiki.py b/ForgeWiki/forgewiki/widgets/wiki.py
index 0cd174e..7dc8b56 100644
--- a/ForgeWiki/forgewiki/widgets/wiki.py
+++ b/ForgeWiki/forgewiki/widgets/wiki.py
@@ -18,14 +18,16 @@
 import ew.jinja2_ew as ew
 from allura.lib.widgets import form_fields as ffw
 
+
 class CreatePageWidget(ffw.Lightbox):
 
     def resources(self):
-        for r in super(CreatePageWidget, self).resources(): yield r
+        for r in super(CreatePageWidget, self).resources():
+            yield r
         yield ew.JSScript('''$(function () {
             $('#lightbox_create_wiki_page form').submit(function(){
                 location.href = $('#sidebar a.add_wiki_page').attr('href') +
                     encodeURIComponent($('input[name=name]', $(this)).val().replace('/', '-')) + '/edit';
                 return false;
             });
-        });''');
+        });''')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/forgewiki/wiki_main.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/forgewiki/wiki_main.py b/ForgeWiki/forgewiki/wiki_main.py
index e2112b6..0244f73 100644
--- a/ForgeWiki/forgewiki/wiki_main.py
+++ b/ForgeWiki/forgewiki/wiki_main.py
@@ -57,10 +57,11 @@ log = logging.getLogger(__name__)
 
 
 class W:
-    thread=w.Thread(
+    thread = w.Thread(
         page=None, limit=None, page_size=None, count=None,
         style='linear')
-    create_page_lightbox = CreatePageWidget(name='create_wiki_page', trigger='#sidebar a.add_wiki_page')
+    create_page_lightbox = CreatePageWidget(
+        name='create_wiki_page', trigger='#sidebar a.add_wiki_page')
     markdown_editor = ffw.MarkdownEdit()
     label_edit = ffw.LabelEdit()
     attachment_add = ffw.AttachmentAdd()
@@ -71,17 +72,19 @@ class W:
     page_size = ffw.PageSize()
     search_results = SearchResults()
     help_modal = SearchHelp()
-    icons={
-        24:'images/wiki_24.png',
-        32:'images/wiki_32.png',
-        48:'images/wiki_48.png'
+    icons = {
+        24: 'images/wiki_24.png',
+        32: 'images/wiki_32.png',
+        48: 'images/wiki_48.png'
     }
 
+
 class ForgeWikiApp(Application):
+
     '''This is the Wiki app for PyForge'''
     __version__ = version.__version__
-    permissions = [ 'configure', 'read', 'create', 'edit', 'delete',
-                    'unmoderated_post', 'post', 'moderate', 'admin']
+    permissions = ['configure', 'read', 'create', 'edit', 'delete',
+                   'unmoderated_post', 'post', 'moderate', 'admin']
     permissions_desc = {
         'read': 'View wiki pages.',
         'create': 'Create wiki pages.',
@@ -89,21 +92,21 @@ class ForgeWikiApp(Application):
         'delete': 'Delete wiki pages.',
         'admin': 'Set permissions. Configure options. Set wiki home page.',
     }
-    searchable=True
-    exportable=True
-    tool_label='Wiki'
-    tool_description="""
+    searchable = True
+    exportable = True
+    tool_label = 'Wiki'
+    tool_description = """
         Documentation is key to your project and the wiki tool
         helps make it easy for anyone to contribute.
     """
-    default_mount_label='Wiki'
-    default_mount_point='wiki'
-    ordinal=5
+    default_mount_label = 'Wiki'
+    default_mount_point = 'wiki'
+    ordinal = 5
     default_root_page_name = u'Home'
-    icons={
-        24:'images/wiki_24.png',
-        32:'images/wiki_32.png',
-        48:'images/wiki_48.png'
+    icons = {
+        24: 'images/wiki_24.png',
+        32: 'images/wiki_32.png',
+        48: 'images/wiki_48.png'
     }
 
     def __init__(self, project, config):
@@ -134,12 +137,14 @@ class ForgeWikiApp(Application):
             else:
                 page_name = self.default_root_page_name
             return page_name
+
         def fset(self, new_root_page_name):
             globals = WM.Globals.query.get(app_config_id=self.config._id)
             if globals is not None:
                 globals.root = new_root_page_name
             elif new_root_page_name != self.default_root_page_name:
-                globals = WM.Globals(app_config_id=self.config._id, root=new_root_page_name)
+                globals = WM.Globals(
+                    app_config_id=self.config._id, root=new_root_page_name)
             if globals is not None:
                 session(globals).flush(globals)
 
@@ -147,6 +152,7 @@ class ForgeWikiApp(Application):
     def show_discussion():
         def fget(self):
             return self.config.options.get('show_discussion', True)
+
         def fset(self, show):
             self.config.options['show_discussion'] = bool(show)
 
@@ -154,6 +160,7 @@ class ForgeWikiApp(Application):
     def show_left_bar():
         def fget(self):
             return self.config.options.get('show_left_bar', True)
+
         def fset(self, show):
             self.config.options['show_left_bar'] = bool(show)
 
@@ -161,6 +168,7 @@ class ForgeWikiApp(Application):
     def show_right_bar():
         def fget(self):
             return self.config.options.get('show_right_bar', True)
+
         def fset(self, show):
             self.config.options['show_right_bar'] = bool(show)
 
@@ -168,9 +176,9 @@ class ForgeWikiApp(Application):
         '''Apps should provide their entries to be added to the main nav
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
         '''
-        return [ SitemapEntry(
-                self.config.options.mount_label,
-                '.')]
+        return [SitemapEntry(
+            self.config.options.mount_label,
+            '.')]
 
     @property
     @h.exceptionless([], log)
@@ -180,10 +188,10 @@ class ForgeWikiApp(Application):
             pages = [
                 SitemapEntry(p.title, p.url())
                 for p in WM.Page.query.find(dict(
-                        app_config_id=self.config._id,
-                        deleted=False)) ]
+                    app_config_id=self.config._id,
+                    deleted=False))]
             return [
-                SitemapEntry(menu_id, '.')[SitemapEntry('Pages')[pages]] ]
+                SitemapEntry(menu_id, '.')[SitemapEntry('Pages')[pages]]]
 
     def create_common_wiki_menu(self,
                                 has_create_access,
@@ -193,42 +201,46 @@ class ForgeWikiApp(Application):
         links = []
         if has_create_access:
             links += [SitemapEntry('Create Page', create_page_url,
-                                    ui_icon=g.icons['plus'],
-                                    className=create_page_class)]
+                                   ui_icon=g.icons['plus'],
+                                   className=create_page_class)]
         if not admin_menu:
             links += [SitemapEntry(''),
-                SitemapEntry('Wiki Home', self.url, className='wiki_home')]
+                      SitemapEntry('Wiki Home', self.url, className='wiki_home')]
         links += [SitemapEntry('Browse Pages', self.url + 'browse_pages/'),
                   SitemapEntry('Browse Labels', self.url + 'browse_tags/')]
         discussion = c.app.config.discussion
-        pending_mod_count = M.Post.query.find({'discussion_id':discussion._id, 'status':'pending'}).count() if discussion else 0
+        pending_mod_count = M.Post.query.find(
+            {'discussion_id': discussion._id, 'status': 'pending'}).count() if discussion else 0
         if pending_mod_count and h.has_access(discussion, 'moderate')():
-            links.append(SitemapEntry('Moderate', discussion.url() + 'moderate', ui_icon=g.icons['pencil'],
-                small = pending_mod_count))
+            links.append(
+                SitemapEntry(
+                    'Moderate', discussion.url() + 'moderate', ui_icon=g.icons['pencil'],
+                    small=pending_mod_count))
         if not admin_menu:
             links += [SitemapEntry(''),
-                SitemapEntry('Formatting Help',self.url+'markdown_syntax/')]
+                      SitemapEntry('Formatting Help', self.url + 'markdown_syntax/')]
         return links
 
     def admin_menu(self):
         admin_url = c.project.url() + \
-                    'admin/' + \
-                    self.config.options.mount_point + '/'
+            'admin/' + \
+            self.config.options.mount_point + '/'
         links = [SitemapEntry('Set Home',
                               admin_url + 'home',
                               className='admin_modal')]
 
         if not self.show_left_bar:
             links += self.create_common_wiki_menu(True,
-                        admin_url + 'create_wiki_page',
-                        'admin_modal', admin_menu=True)
+                                                  admin_url +
+                                                  'create_wiki_page',
+                                                  'admin_modal', admin_menu=True)
         links += super(ForgeWikiApp, self).admin_menu(force_options=True)
 
         return links
 
     @h.exceptionless([], log)
     def sidebar_menu(self):
-        return self.create_common_wiki_menu(has_access(self, 'create'),c.app.url,'add_wiki_page')
+        return self.create_common_wiki_menu(has_access(self, 'create'), c.app.url, 'add_wiki_page')
 
     def install(self, project):
         'Set up any default permissions and roles here'
@@ -250,13 +262,14 @@ class ForgeWikiApp(Application):
             M.ACE.allow(role_developer, 'moderate'),
             M.ACE.allow(role_admin, 'configure'),
             M.ACE.allow(role_admin, 'admin'),
-            ]
+        ]
         root_page_name = self.default_root_page_name
         WM.Globals(app_config_id=c.app.config._id, root=root_page_name)
         self.upsert_root(root_page_name)
 
     def upsert_root(self, new_root):
-        p = WM.Page.query.get(app_config_id=self.config._id, title=new_root, deleted=False)
+        p = WM.Page.query.get(app_config_id=self.config._id,
+                              title=new_root, deleted=False)
         if p is None:
             with h.push_config(c, app=self):
                 p = WM.Page.upsert(new_root)
@@ -273,7 +286,6 @@ The wiki uses [Markdown](%s) syntax.
 """ % url
                 p.commit()
 
-
     def uninstall(self, project):
         "Remove all the tool's artifacts from the database"
         WM.WikiAttachment.query.remove(dict(app_config_id=self.config._id))
@@ -305,7 +317,7 @@ class RootController(BaseController, DispatchIndex, FeedController):
     @with_trailing_slash
     @expose()
     def index(self, **kw):
-        redirect(h.really_unicode(c.app.root_page_name).encode('utf-8')+'/')
+        redirect(h.really_unicode(c.app.root_page_name).encode('utf-8') + '/')
 
     @expose()
     def _lookup(self, pname, *remainder):
@@ -376,10 +388,11 @@ class RootController(BaseController, DispatchIndex, FeedController):
                 else:
                     pages.append(p)
         if sort == 'recent':
-            pages.sort(reverse=True, key=lambda x:(x['updated']))
+            pages.sort(reverse=True, key=lambda x: (x['updated']))
             pages = pages + uv_pages
-        return dict(pages=pages, can_delete=can_delete, show_deleted=show_deleted,
-                    limit=limit, count=count, page=pagenum)
+        return dict(
+            pages=pages, can_delete=can_delete, show_deleted=show_deleted,
+            limit=limit, count=count, page=pagenum)
 
     @with_trailing_slash
     @expose('jinja:forgewiki:templates/wiki/browse_tags.html')
@@ -451,7 +464,8 @@ class PageController(BaseController, FeedController):
             attachments=[])
 
     def get_version(self, version):
-        if not version: return self.page
+        if not version:
+            return self.page
         try:
             return self.page.get_version(version)
         except (ValueError, IndexError):
@@ -473,7 +487,7 @@ class PageController(BaseController, FeedController):
                    limit=validators.Int(if_empty=25, if_invalid=25)))
     def index(self, version=None, page=0, limit=25, **kw):
         if not self.page:
-            redirect(c.app.url+h.urlquote(self.title)+'/edit')
+            redirect(c.app.url + h.urlquote(self.title) + '/edit')
         c.thread = W.thread
         c.attachment_list = W.attachment_list
         c.subscribe_form = W.page_subscribe_form
@@ -481,14 +495,18 @@ class PageController(BaseController, FeedController):
         limit, pagenum = h.paging_sanitizer(limit, page, post_count)
         page = self.get_version(version)
         if page is None:
-            if version: redirect('.?version=%d' % (version-1))
-            else: redirect('.')
+            if version:
+                redirect('.?version=%d' % (version - 1))
+            else:
+                redirect('.')
         elif 'all' not in page.viewable_by and c.user.username not in page.viewable_by:
             raise exc.HTTPForbidden(detail="You may not view this page.")
         cur = page.version
-        if cur > 1: prev = cur-1
-        else: prev = None
-        next = cur+1
+        if cur > 1:
+            prev = cur - 1
+        else:
+            prev = None
+        next = cur + 1
         hide_left_bar = not (c.app.show_left_bar)
         return dict(
             page=page,
@@ -513,7 +531,7 @@ class PageController(BaseController, FeedController):
         c.label_edit = W.label_edit
         hide_left_bar = not c.app.show_left_bar
         return dict(page=page, page_exists=page_exists,
-            hide_left_bar=hide_left_bar)
+                    hide_left_bar=hide_left_bar)
 
     @without_trailing_slash
     @expose('json')
@@ -521,7 +539,7 @@ class PageController(BaseController, FeedController):
     def delete(self):
         require_access(self.page, 'delete')
         self.page.delete()
-        return dict(location='../'+self.page.title+'/?deleted=True')
+        return dict(location='../' + self.page.title + '/?deleted=True')
 
     @without_trailing_slash
     @expose('json')
@@ -552,8 +570,8 @@ class PageController(BaseController, FeedController):
     @without_trailing_slash
     @expose('jinja:forgewiki:templates/wiki/page_diff.html')
     @validate(dict(
-            v1=validators.Int(),
-            v2=validators.Int()))
+        v1=validators.Int(),
+        v2=validators.Int()))
     def diff(self, v1, v2, **kw):
         if not self.page:
             raise exc.HTTPNotFound
@@ -604,10 +622,10 @@ class PageController(BaseController, FeedController):
     def update(self, title=None, text=None,
                labels=None,
                viewable_by=None,
-               new_viewable_by=None,**kw):
+               new_viewable_by=None, **kw):
         activity_verb = 'created'
         if not title:
-            flash('You must provide a title for the page.','error')
+            flash('You must provide a title for the page.', 'error')
             redirect('edit')
         title = title.replace('/', '-')
         if not self.page:
@@ -619,12 +637,14 @@ class PageController(BaseController, FeedController):
             activity_verb = 'modified'
         name_conflict = None
         if self.page.title != title:
-            name_conflict = WM.Page.query.find(dict(app_config_id=c.app.config._id, title=title, deleted=False)).first()
+            name_conflict = WM.Page.query.find(
+                dict(app_config_id=c.app.config._id, title=title, deleted=False)).first()
             if name_conflict:
                 flash('There is already a page named "%s".' % title, 'error')
             else:
                 if self.page.title == c.app.root_page_name:
-                    WM.Globals.query.get(app_config_id=c.app.config._id).root = title
+                    WM.Globals.query.get(
+                        app_config_id=c.app.config._id).root = title
                 self.page.title = title
                 activity_verb = 'renamed'
         self.page.text = text
@@ -633,9 +653,10 @@ class PageController(BaseController, FeedController):
         else:
             self.page.labels = []
         self.page.commit()
-        g.spam_checker.check(text, artifact=self.page, user=c.user, content_type='wiki')
+        g.spam_checker.check(text, artifact=self.page,
+                             user=c.user, content_type='wiki')
         g.director.create_activity(c.user, activity_verb, self.page,
-                target=c.project)
+                                   target=c.project)
         if new_viewable_by:
             if new_viewable_by == 'all':
                 self.page.viewable_by.append('all')
@@ -652,7 +673,8 @@ class PageController(BaseController, FeedController):
                         user = M.User.by_username(str(u['id']))
                         if user:
                             self.page.viewable_by.remove(user.username)
-        redirect('../' + h.really_unicode(self.page.title).encode('utf-8') + ('/' if not name_conflict else '/edit'))
+        redirect('../' + h.really_unicode(self.page.title)
+                 .encode('utf-8') + ('/' if not name_conflict else '/edit'))
 
     @without_trailing_slash
     @expose()
@@ -675,14 +697,16 @@ class PageController(BaseController, FeedController):
             self.page.unsubscribe()
         redirect(request.referer)
 
+
 class WikiAttachmentController(ac.AttachmentController):
     AttachmentClass = WM.WikiAttachment
     edit_perm = 'edit'
 
+
 class WikiAttachmentsController(ac.AttachmentsController):
     AttachmentControllerClass = WikiAttachmentController
 
-MARKDOWN_EXAMPLE='''
+MARKDOWN_EXAMPLE = '''
 # First-level heading
 
 Some *emphasized* and **strong** text
@@ -691,6 +715,7 @@ Some *emphasized* and **strong** text
 
 '''
 
+
 class RootRestController(BaseController):
 
     def __init__(self):
@@ -702,7 +727,8 @@ class RootRestController(BaseController):
     @expose('json:')
     def index(self, **kw):
         page_titles = []
-        pages = WM.Page.query.find(dict(app_config_id=c.app.config._id, deleted=False))
+        pages = WM.Page.query.find(
+            dict(app_config_id=c.app.config._id, deleted=False))
         for page in pages:
             if has_access(page, 'read')():
                 page_titles.append(page.title)
@@ -786,8 +812,10 @@ class WikiAdminController(DefaultAdminController):
         self.app.root_page_name = new_home
         self.app.upsert_root(new_home)
         flash('Home updated')
-        mount_base = c.project.url()+self.app.config.options.mount_point+'/'
-        url = h.really_unicode(mount_base).encode('utf-8') + h.really_unicode(new_home).encode('utf-8')+'/'
+        mount_base = c.project.url() + \
+            self.app.config.options.mount_point + '/'
+        url = h.really_unicode(mount_base).encode('utf-8') + \
+            h.really_unicode(new_home).encode('utf-8') + '/'
         redirect(url)
 
     @without_trailing_slash
@@ -798,4 +826,4 @@ class WikiAdminController(DefaultAdminController):
         self.app.show_left_bar = show_left_bar
         self.app.show_right_bar = show_right_bar
         flash('Wiki options updated')
-        redirect(c.project.url()+'admin/tools')
+        redirect(c.project.url() + 'admin/tools')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeWiki/setup.py
----------------------------------------------------------------------
diff --git a/ForgeWiki/setup.py b/ForgeWiki/setup.py
index dce7390..6bc7944 100644
--- a/ForgeWiki/setup.py
+++ b/ForgeWiki/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgewiki.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeWiki',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/NoWarnings/nowarnings.py
----------------------------------------------------------------------
diff --git a/NoWarnings/nowarnings.py b/NoWarnings/nowarnings.py
index 27671b7..e81d745 100644
--- a/NoWarnings/nowarnings.py
+++ b/NoWarnings/nowarnings.py
@@ -18,6 +18,7 @@
 import warnings
 from nose.plugins import Plugin
 
+
 class NoWarnings(Plugin):
 
     def beforeTest(self, result):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/fuse/accessfs.py
----------------------------------------------------------------------
diff --git a/fuse/accessfs.py b/fuse/accessfs.py
index 44f1cc6..56808f8 100644
--- a/fuse/accessfs.py
+++ b/fuse/accessfs.py
@@ -40,6 +40,7 @@ logging.basicConfig()
 fuse.fuse_python_api = (0, 2)
 fuse.feature_assert('stateful_files', 'has_init')
 
+
 class check_access(object):
 
     def __init__(self, *args, **kwargs):
@@ -58,10 +59,12 @@ class check_access(object):
         return wrapper
 
     def check(self, inst, path, mode):
-        if mode is None: return
+        if mode is None:
+            return
         rc = inst.access(path, mode)
         if rc:
-            raise OSError(errno.EPERM, path,'Permission denied')
+            raise OSError(errno.EPERM, path, 'Permission denied')
+
 
 class check_and_translate(check_access):
 
@@ -69,6 +72,7 @@ class check_and_translate(check_access):
         super(check_and_translate, self).check(inst, path, mode)
         return inst._to_global(path)
 
+
 def flag2mode(flags):
     md = {os.O_RDONLY: 'r', os.O_WRONLY: 'w', os.O_RDWR: 'w+'}
     m = md[flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)]
@@ -76,6 +80,7 @@ def flag2mode(flags):
         m = m.replace('w', 'a', 1)
     return m
 
+
 class AccessFS(fuse.Fuse):
 
     def __init__(self, *args, **kw):
@@ -148,7 +153,8 @@ class AccessFS(fuse.Fuse):
         os.utime("." + path, times)
 
     def access(self, path, mode):
-        if mode & (os.R_OK|os.W_OK) == 0: return
+        if mode & (os.R_OK | os.W_OK) == 0:
+            return
         ctx = fuse.FuseGetContext()
         entry = self.perm_cache.get(ctx['uid'], path)
         if (mode & entry) != mode:
@@ -157,7 +163,7 @@ class AccessFS(fuse.Fuse):
     def _assert_access(self, path, mode):
         rc = self.access(path, mode)
         if rc:
-            raise OSError(errno.EPERM, path,'Permission denied')
+            raise OSError(errno.EPERM, path, 'Permission denied')
 
     def statfs(self):
         """
@@ -191,18 +197,19 @@ class AccessFS(fuse.Fuse):
 
     def make_file_class(self):
         class FSAccessFile(AccessFile):
-            filesystem=self
+            filesystem = self
         return FSAccessFile
 
+
 class AccessFile(fuse.FuseFileInfo):
-    direct_io=False
+    direct_io = False
     keep_cache = False
     needs_write = (
         os.O_WRONLY
         | os.O_RDWR
         | os.O_APPEND
         | os.O_CREAT
-        | os.O_TRUNC )
+        | os.O_TRUNC)
 
     def __init__(self, path, flags, *mode):
         access_mode = os.R_OK
@@ -272,9 +279,9 @@ class AccessFile(fuse.FuseFileInfo):
 
         # Convert fcntl-ish lock parameters to Python's weird
         # lockf(3)/flock(2) medley locking API...
-        op = { fcntl.F_UNLCK : fcntl.LOCK_UN,
-               fcntl.F_RDLCK : fcntl.LOCK_SH,
-               fcntl.F_WRLCK : fcntl.LOCK_EX }[kw['l_type']]
+        op = {fcntl.F_UNLCK: fcntl.LOCK_UN,
+              fcntl.F_RDLCK: fcntl.LOCK_SH,
+              fcntl.F_WRLCK: fcntl.LOCK_EX}[kw['l_type']]
         if cmd == fcntl.F_GETLK:
             return -errno.EOPNOTSUPP
         elif cmd == fcntl.F_SETLK:
@@ -287,6 +294,7 @@ class AccessFile(fuse.FuseFileInfo):
 
         fcntl.lockf(self.fd, op, kw['l_start'], kw['l_len'])
 
+
 class PermissionCache(object):
 
     def __init__(self, uid_cache, host, timeout=30, size=1024):
@@ -305,7 +313,8 @@ class PermissionCache(object):
             if elapsed > self._timeout:
                 print 'Timeout!', elapsed
                 uname = self._uid_cache.get(uid)
-                entry = self._refresh_result(uid, path, self._api_lookup(uname, path))
+                entry = self._refresh_result(
+                    uid, path, self._api_lookup(uname, path))
                 return entry
             return entry
         except KeyError:
@@ -327,20 +336,22 @@ class PermissionCache(object):
             self._host
             + '/auth/repo_permissions?'
             + urllib.urlencode(dict(
-                    repo_path=path,
-                    username=uname)))
+                repo_path=path,
+                username=uname)))
         print 'Checking access for %s at %s (%s)' % (uname, url, path)
         fp = urllib2.urlopen(url)
         result = json.load(fp)
         print result
         entry = 0
-        if result['allow_read']: entry |= os.R_OK
-        if result['allow_write']: entry |= os.W_OK
+        if result['allow_read']:
+            entry |= os.R_OK
+        if result['allow_write']:
+            entry |= os.W_OK
         return entry
 
     def _refresh_result(self, uid, path, value):
         with self._lock:
-            if (uid,path) in self._data:
+            if (uid, path) in self._data:
                 self._data[uid, path] = (value, time.time())
             else:
                 if len(self._data) >= self._size:
@@ -362,11 +373,12 @@ class PermissionCache(object):
         '''Convert paths from the form /SCM/neighborhood/project/a/b/c to
         /SCM/project.neighborhood/a/b/c
         '''
-        parts = [ p for p in path.split(os.path.sep) if p ]
+        parts = [p for p in path.split(os.path.sep) if p]
         scm, nbhd, proj, rest = parts[0], parts[1], parts[2], parts[3:]
-        parts = ['/SCM/%s.%s' % (proj, nbhd) ] + rest
+        parts = ['/SCM/%s.%s' % (proj, nbhd)] + rest
         return '/'.join(parts)
 
+
 class UnixUsernameCache(object):
 
     def __init__(self):
@@ -381,6 +393,7 @@ class UnixUsernameCache(object):
         self._cache[uid] = uname
         return uname
 
+
 def main():
 
     usage = """
@@ -389,8 +402,8 @@ Userspace nullfs-alike: mirror the filesystem tree from some point on.
 """ + fuse.Fuse.fusage
 
     server = AccessFS(version="%prog " + fuse.__version__,
-                 usage=usage,
-                 dash_s_do='setsingle')
+                      usage=usage,
+                      dash_s_do='setsingle')
 
     server.parser.add_option(mountopt="root", metavar="PATH", default='/',
                              help="mirror filesystem from under PATH [default: %default]")

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/run_tests
----------------------------------------------------------------------
diff --git a/run_tests b/run_tests
index 4180aa9..7ae6547 100755
--- a/run_tests
+++ b/run_tests
@@ -33,13 +33,14 @@ CONCURRENT_TESTS = (CPUS // CONCURRENT_SUITES) or 1
 PROC_TIMEOUT = 120
 
 ALT_PKG_PATHS = {
-        'Allura': 'allura/tests/',
-        }
+    'Allura': 'allura/tests/',
+}
 
 NOT_MULTIPROC_SAFE = [
-        'ForgeGit',
-        'ForgeSVN',
-        ]
+    'ForgeGit',
+    'ForgeSVN',
+]
+
 
 def run_one(cmd, **popen_kwargs):
     print '{} running {} {}'.format(threading.current_thread(), cmd, popen_kwargs)
@@ -114,13 +115,15 @@ def run_tests_in_parallel(options, nosetests_args):
 
     def get_pkg_path(pkg):
         return ALT_PKG_PATHS.get(pkg, '')
+
     def get_multiproc_args(pkg):
         if not use_multiproc:
             return ''
         return ('--processes={procs_per_suite} --process-timeout={proc_timeout}'.format(
-                    procs_per_suite=options.concurrent_tests,
-                    proc_timeout=PROC_TIMEOUT)
-                if pkg not in NOT_MULTIPROC_SAFE else '')
+            procs_per_suite=options.concurrent_tests,
+            proc_timeout=PROC_TIMEOUT)
+            if pkg not in NOT_MULTIPROC_SAFE else '')
+
     def get_concurrent_suites():
         if use_multiproc or '-n' in sys.argv:
             return options.concurrent_suites
@@ -141,8 +144,9 @@ def run_tests_in_parallel(options, nosetests_args):
 
 
 def parse_args():
-    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
-                                     epilog=textwrap.dedent('''
+    parser = argparse.ArgumentParser(
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        epilog=textwrap.dedent('''
                                         All additional arguments are passed along to nosetests
                                           (e.g. -v --with-coverage)
                                         Note: --cover-package will be set automatically to the appropriate value'''))
@@ -152,9 +156,10 @@ def parse_args():
     parser.add_argument('-m', help='Number of tests to run concurrently in separate '
                                    'processes, per suite. Default: # CPUs / # concurrent suites',
                         dest='concurrent_tests', type=int, default=CONCURRENT_TESTS)
-    parser.add_argument('-p', help='List of packages to run tests on. Default: all',
-                        dest='packages', choices=get_packages(), default=get_packages(),
-                        nargs='+')
+    parser.add_argument(
+        '-p', help='List of packages to run tests on. Default: all',
+        dest='packages', choices=get_packages(), default=get_packages(),
+        nargs='+')
     return parser.parse_known_args()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/add_user_to_group.py
----------------------------------------------------------------------
diff --git a/scripts/add_user_to_group.py b/scripts/add_user_to_group.py
index 1047999..aa2fe1e 100644
--- a/scripts/add_user_to_group.py
+++ b/scripts/add_user_to_group.py
@@ -39,12 +39,13 @@ Example:
 from allura import model as M
 from ming.orm import ThreadLocalORMSession
 
+
 def main(options):
     nbhd = M.Neighborhood.query.get(url_prefix=options.nbhd)
     if not nbhd:
         return "Couldn't find neighborhood with url_prefix '%s'" % options.nbhd
     project = M.Project.query.get(neighborhood_id=nbhd._id,
-            shortname=options.project)
+                                  shortname=options.project)
     if not project:
         return "Couldn't find project with shortname '%s'" % options.project
     user = M.User.by_username(options.user)
@@ -53,7 +54,8 @@ def main(options):
     project_role = M.ProjectRole.by_name(options.group, project=project)
     if not project_role:
         return "Couldn't find group (ProjectRole) with name '%s'" % options.group
-    user_roles = M.ProjectRole.by_user(user, project=project, upsert=True).roles
+    user_roles = M.ProjectRole.by_user(
+        user, project=project, upsert=True).roles
     if project_role._id not in user_roles:
         user_roles.append(project_role._id)
     ThreadLocalORMSession.flush_all()
@@ -62,14 +64,14 @@ def main(options):
 def parse_options():
     import argparse
     parser = argparse.ArgumentParser(description=__doc__,
-            formatter_class=argparse.RawDescriptionHelpFormatter)
+                                     formatter_class=argparse.RawDescriptionHelpFormatter)
     parser.add_argument('user', help='Username')
     parser.add_argument('group', help='Group (ProjectRole) name, e.g. Admin, '
-            'Member, Developer, etc.')
+                        'Member, Developer, etc.')
     parser.add_argument('project', nargs='?', default='--init--',
-            help='Project shortname. Default is --init--.')
+                        help='Project shortname. Default is --init--.')
     parser.add_argument('--nbhd', default='/p/', help='Neighborhood '
-            'url_prefix. Default is /p/.')
+                        'url_prefix. Default is /p/.')
     return parser.parse_args()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/allura_import.py
----------------------------------------------------------------------
diff --git a/scripts/allura_import.py b/scripts/allura_import.py
index b05d524..0f2f715 100644
--- a/scripts/allura_import.py
+++ b/scripts/allura_import.py
@@ -45,22 +45,26 @@ def main():
                 if not isinstance(k, basestring) or not isinstance(v, basestring):
                     raise ValueError
         except ValueError:
-            optparser.error('--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
+            optparser.error(
+                '--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
         finally:
             f.close()
 
     import_options['user_map'] = user_map
 
-    cli = AlluraImportApiClient(options.base_url, options.api_key, options.secret_key, options.verbose)
+    cli = AlluraImportApiClient(
+        options.base_url, options.api_key, options.secret_key, options.verbose)
     doc_txt = open(args[0]).read()
 
     # import the tracker (if any)
     if options.tracker:
-        import_tracker(cli, options.project, options.tracker, import_options, options, doc_txt,
-                       validate=options.validate,
-                       verbose=options.verbose)
+        import_tracker(
+            cli, options.project, options.tracker, import_options, options, doc_txt,
+            validate=options.validate,
+            verbose=options.verbose)
     elif options.forum:
-        import_forum(cli, options.project, options.forum, user_map, doc_txt, validate=options.validate)
+        import_forum(cli, options.project, options.forum,
+                     user_map, doc_txt, validate=options.validate)
     elif options.wiki:
         import_wiki(cli, options.project, options.wiki, options, doc_txt)
 
@@ -79,18 +83,30 @@ def parse_options():
     optparser = OptionParser(usage='''%prog [options] <JSON dump>
 
 Import project data dump in JSON format into an Allura project.''')
-    optparser.add_option('-a', '--api-ticket', dest='api_key', help='API ticket')
-    optparser.add_option('-s', '--secret-key', dest='secret_key', help='Secret key')
-    optparser.add_option('-p', '--project', dest='project', help='Project to import to')
-    optparser.add_option('-t', '--tracker', dest='tracker', help='Tracker to import to')
-    optparser.add_option('-f', '--forum', dest='forum', help='Forum tool to import to')
-    optparser.add_option('-w', '--wiki', dest='wiki', help='Wiki tool to import to')
-    optparser.add_option('-u', '--base-url', dest='base_url', default='https://sourceforge.net', help='Base Allura URL (%default)')
-    optparser.add_option('-o', dest='import_opts', default=[], action='append', help='Specify import option(s)', metavar='opt=val')
-    optparser.add_option('--user-map', dest='user_map_file', help='Map original users to SF.net users', metavar='JSON_FILE')
-    optparser.add_option('--validate', dest='validate', action='store_true', help='Validate import data')
-    optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose operation')
-    optparser.add_option('-c', '--continue', dest='cont', action='store_true', help='Continue import into existing tracker')
+    optparser.add_option('-a', '--api-ticket',
+                         dest='api_key', help='API ticket')
+    optparser.add_option('-s', '--secret-key',
+                         dest='secret_key', help='Secret key')
+    optparser.add_option('-p', '--project', dest='project',
+                         help='Project to import to')
+    optparser.add_option('-t', '--tracker', dest='tracker',
+                         help='Tracker to import to')
+    optparser.add_option('-f', '--forum', dest='forum',
+                         help='Forum tool to import to')
+    optparser.add_option('-w', '--wiki', dest='wiki',
+                         help='Wiki tool to import to')
+    optparser.add_option('-u', '--base-url', dest='base_url',
+                         default='https://sourceforge.net', help='Base Allura URL (%default)')
+    optparser.add_option('-o', dest='import_opts',
+                         default=[], action='append', help='Specify import option(s)', metavar='opt=val')
+    optparser.add_option('--user-map', dest='user_map_file',
+                         help='Map original users to SF.net users', metavar='JSON_FILE')
+    optparser.add_option('--validate', dest='validate',
+                         action='store_true', help='Validate import data')
+    optparser.add_option('-v', '--verbose', dest='verbose',
+                         action='store_true', help='Verbose operation')
+    optparser.add_option('-c', '--continue', dest='cont',
+                         action='store_true', help='Continue import into existing tracker')
     options, args = optparser.parse_args()
     if len(args) != 1:
         optparser.error("Wrong number of arguments")

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/changelog.py
----------------------------------------------------------------------
diff --git a/scripts/changelog.py b/scripts/changelog.py
index 2d6a40d..d138376 100755
--- a/scripts/changelog.py
+++ b/scripts/changelog.py
@@ -38,6 +38,7 @@ def main():
 def get_versions():
     return sys.argv[1], sys.argv[2], sys.argv[3]
 
+
 def get_tickets(from_ref, to_ref):
     repo = git.Repo('.')
     ticket_nums = set()
@@ -48,6 +49,7 @@ def get_tickets(from_ref, to_ref):
             ticket_nums.add(match.group(1))
     return list(ticket_nums)
 
+
 def get_ticket_summaries(tickets):
     summaries = {}
     r = requests.get(API_URL.format(' '.join(tickets)))
@@ -57,11 +59,12 @@ def get_ticket_summaries(tickets):
         summaries[ticket['ticket_num']] = ticket['summary']
     return summaries
 
+
 def print_changelog(version, summaries):
     print 'Version {version}  ({date})\n'.format(**{
-            'version': version,
-            'date': datetime.utcnow().strftime('%B %Y'),
-        })
+        'version': version,
+        'date': datetime.utcnow().strftime('%B %Y'),
+    })
     for ticket in sorted(summaries.keys()):
         print " * [#{0}] {1}".format(ticket, summaries[ticket])
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/create-allura-sitemap.py
----------------------------------------------------------------------
diff --git a/scripts/create-allura-sitemap.py b/scripts/create-allura-sitemap.py
index 13222c1..5357df7 100644
--- a/scripts/create-allura-sitemap.py
+++ b/scripts/create-allura-sitemap.py
@@ -28,11 +28,13 @@ things that would make it faster, if we need/want to.
 2. Use multiprocessing to distribute the offsets to n subprocesses.
 """
 
-import os, sys
+import os
+import sys
 from datetime import datetime
 from jinja2 import Template
 
-import pylons, webob
+import pylons
+import webob
 from pylons import tmpl_context as c
 
 from allura import model as M
@@ -67,6 +69,7 @@ SITEMAP_TEMPLATE = """\
 </urlset>
 """
 
+
 def main(options, args):
     # This script will indirectly call app.sidebar_menu() for every app in
     # every project. Some of the sidebar_menu methods expect the
@@ -86,6 +89,7 @@ def main(options, args):
 
     now = datetime.utcnow().date()
     sitemap_content_template = Template(SITEMAP_TEMPLATE)
+
     def write_sitemap(urls, file_no):
         sitemap_content = sitemap_content_template.render(dict(
             now=now, locs=urls))
@@ -101,7 +105,7 @@ def main(options, args):
             c.project = p
             try:
                 locs += [BASE_URL + s.url if s.url[0] == '/' else s.url
-                        for s in p.sitemap(excluded_tools=['git', 'hg', 'svn'])]
+                         for s in p.sitemap(excluded_tools=['git', 'hg', 'svn'])]
             except Exception, e:
                 print "Error creating sitemap for project '%s': %s" %\
                     (p.shortname, e)
@@ -120,13 +124,15 @@ def main(options, args):
     if file_count:
         sitemap_index_vars = dict(
             now=now,
-            sitemaps = [
+            sitemaps=[
                 '%s/allura_sitemap/sitemap-%d.xml' % (BASE_URL, n)
                 for n in range(file_count)])
-        sitemap_index_content = Template(INDEX_TEMPLATE).render(sitemap_index_vars)
+        sitemap_index_content = Template(
+            INDEX_TEMPLATE).render(sitemap_index_vars)
         with open(os.path.join(output_path, 'sitemap.xml'), 'w') as f:
             f.write(sitemap_index_content)
 
+
 def parse_options():
     def validate(option, opt_str, value, parser):
         parser.values.urls_per_file = min(value, MAX_SITEMAP_URLS)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/git-hooks/for-the-remote-repo/update
----------------------------------------------------------------------
diff --git a/scripts/git-hooks/for-the-remote-repo/update b/scripts/git-hooks/for-the-remote-repo/update
index 15c1c21..b2dcc6b 100755
--- a/scripts/git-hooks/for-the-remote-repo/update
+++ b/scripts/git-hooks/for-the-remote-repo/update
@@ -17,7 +17,10 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import os, re, sys, subprocess
+import os
+import re
+import sys
+import subprocess
 
 signoff = re.compile('^Signed-off-by: ', flags=re.MULTILINE)
 parent = re.compile('^parent ', flags=re.MULTILINE)
@@ -25,7 +28,8 @@ no_commit = '0' * 40
 
 
 def run(*args):
-    p = subprocess.Popen(list(args), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    p = subprocess.Popen(list(args), stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
     p.wait()
     return p.stdout.readlines()
 
@@ -45,16 +49,17 @@ def git_user():
 
 
 def unwrap_commit_ids(git_output):
-    return [ commit_id[:-1] for commit_id in git_output ]
+    return [commit_id[:-1] for commit_id in git_output]
 
 
 def all_commits_signed_off(from_rev, to_rev):
-    commits = unwrap_commit_ids(run('git', 'rev-list', '%s..%s' % (from_rev, to_rev)))
+    commits = unwrap_commit_ids(
+        run('git', 'rev-list', '%s..%s' % (from_rev, to_rev)))
     for commit in commits:
         raw_commit = ''.join(run('git', 'cat-file', '-p', commit))
         headers, body = raw_commit.split('\n\n', 1)
         num_parents = len(parent.findall(headers))
-        if num_parents<2 and not signoff.search(body):
+        if num_parents < 2 and not signoff.search(body):
             return False
     return True
 
@@ -66,31 +71,44 @@ def deny_update(message):
 
 def main():
     ref_name = sys.argv[1]  # the branch being updated, e.g., refs/heads/master
-    old_rev = sys.argv[2]   # the pre-update commit-id of that branch (or '0'*40 if we're creating the branch)
-    new_rev = sys.argv[3]   # the post-update commit-id of that branch (or '0'*40 if we're deleting the branch)
+    # the pre-update commit-id of that branch (or '0'*40 if we're creating the
+    # branch)
+    old_rev = sys.argv[2]
+    # the post-update commit-id of that branch (or '0'*40 if we're deleting
+    # the branch)
+    new_rev = sys.argv[3]
 
     (user_name, user_prefix) = git_user()
 
     if old_rev == no_commit:
         action = 'create'
-        merge_base = unwrap_commit_ids(run('git', 'merge-base', 'master', new_rev))[0]
-            # not ideal, since you probably branched off something more specific than master
+        merge_base = unwrap_commit_ids(
+            run('git', 'merge-base', 'master', new_rev))[0]
+            # not ideal, since you probably branched off something more
+            # specific than master
     elif new_rev == no_commit:
         action = 'destroy'
     else:
         action = 'update'
-        merge_base = unwrap_commit_ids(run('git', 'merge-base', old_rev, new_rev))[0]
+        merge_base = unwrap_commit_ids(
+            run('git', 'merge-base', old_rev, new_rev))[0]
 
     if ref_name.startswith('refs/heads/%s/' % user_prefix) or ref_name.startswith('refs/heads/ffa/') or user_name == 'wolf' or user_name == 'dbrondsema':
-        pass # no restrictions
+        pass  # no restrictions
     elif ref_name.startswith('refs/heads/'):
         substitutions = (user_name, ref_name, 'refs/heads/%s/*' % user_prefix)
         if action == 'create':
-            deny_update("You (%s) may not create '%s'; you have full rights over '%s'." % substitutions)
+            deny_update(
+                "You (%s) may not create '%s'; you have full rights over '%s'." %
+                substitutions)
         elif action == 'destroy':
-            deny_update("You (%s) may not destroy '%s'; you have full rights over '%s'." % substitutions)
+            deny_update(
+                "You (%s) may not destroy '%s'; you have full rights over '%s'." %
+                substitutions)
         elif old_rev != merge_base:
-            deny_update("You (%s) may not rewind or rebase '%s'; you have full rights over '%s'." % substitutions)
+            deny_update(
+                "You (%s) may not rewind or rebase '%s'; you have full rights over '%s'." %
+                substitutions)
 
     if ref_name.startswith('refs/heads/') and action != 'destroy' and not all_commits_signed_off(merge_base, new_rev):
         deny_update('Not all commits were signed-off.')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/git-hooks/for-your-local-repo/commit-msg
----------------------------------------------------------------------
diff --git a/scripts/git-hooks/for-your-local-repo/commit-msg b/scripts/git-hooks/for-your-local-repo/commit-msg
index e65d571..96d375b 100755
--- a/scripts/git-hooks/for-your-local-repo/commit-msg
+++ b/scripts/git-hooks/for-your-local-repo/commit-msg
@@ -17,15 +17,18 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import re, sys
+import re
+import sys
 
 signoff = re.compile('^Signed-off-by: (.*)$', flags=re.MULTILINE)
 bug = re.compile('\[(?:.*:)?#\d+\]')
 
+
 def deny_commit(message):
     print message
     sys.exit(1)
 
+
 def main():
     # argv[1] is the name of the file holding the commit message.
     # It is _not_ a commit, it has no headers.  The first line is

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/git-mr
----------------------------------------------------------------------
diff --git a/scripts/git-mr b/scripts/git-mr
index 5da0491..3e4916e 100755
--- a/scripts/git-mr
+++ b/scripts/git-mr
@@ -38,7 +38,7 @@ usage = "git mr [-a|-r] [--grep PATTERN] [upstream]"
 
 def main():
     parser = argparse.ArgumentParser(prog='git mr', usage=usage)
-    parser.add_argument('-r', action='store_true', 
+    parser.add_argument('-r', action='store_true',
                         dest='examine_remote_branches', default=False,
                         help='examine remote branches')
     parser.add_argument('-a', action='store_true',
@@ -60,49 +60,52 @@ def main():
         merged_branches = git('branch', '--merged', args.upstream)
         unmerged_branches = git('branch', '--no-merged', args.upstream)
 
-    merged_branches = [ line[2:] for line in merged_branches ]
-    unmerged_branches = [ line[2:] for line in unmerged_branches ]
+    merged_branches = [line[2:] for line in merged_branches]
+    unmerged_branches = [line[2:] for line in unmerged_branches]
     really_unmerged_branches = []
 
     if args.grep:
         filter = re.compile(args.grep[0])
-        merged_branches = [ b for b in merged_branches if filter.search(b) ]
-        unmerged_branches = [ b for b in unmerged_branches if filter.search(b) ]
+        merged_branches = [b for b in merged_branches if filter.search(b)]
+        unmerged_branches = [b for b in unmerged_branches if filter.search(b)]
 
     if merged_branches:
         print('Branches contained by %s:' % args.upstream)
         for branch in merged_branches:
-            print('  '+colorize('green', branch))
+            print('  ' + colorize('green', branch))
 
     if unmerged_branches:
         header_printed = False
         for branch in unmerged_branches:
-            commits = ''.join(git('cherry', args.upstream, branch, strip_eol=False))
+            commits = ''.join(
+                git('cherry', args.upstream, branch, strip_eol=False))
             if commits.find('+') == -1:
                 if not header_printed:
                     print('Branches probably merged to %s:' % args.upstream)
                     header_printed = True
-                print('  '+colorize('green', branch))
+                print('  ' + colorize('green', branch))
             else:
-                really_unmerged_branches += [ branch ]
+                really_unmerged_branches += [branch]
 
         if really_unmerged_branches:
             print('Branches probably not merged to %s:' % args.upstream)
             for branch in really_unmerged_branches:
-                print('  '+colorize('red', branch))
+                print('  ' + colorize('red', branch))
+
 
 def git(*args, **kw):
-    if len(args)==1 and isinstance(args[0], basestring):
+    if len(args) == 1 and isinstance(args[0], basestring):
         argv = shlex.split(args[0])
     else:
         argv = list(args)
     if argv[0] != 'git':
         argv.insert(0, 'git')
-    p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    p = subprocess.Popen(argv, stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
     rc = p.wait()
     output = p.stdout.readlines()
     if kw.get('strip_eol', True):
-        output = [ line.rstrip('\n') for line in output ]
+        output = [line.rstrip('\n') for line in output]
     return output
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/scripts/import_trove_categories.py
----------------------------------------------------------------------
diff --git a/scripts/import_trove_categories.py b/scripts/import_trove_categories.py
index 86d2063..123a56c 100644
--- a/scripts/import_trove_categories.py
+++ b/scripts/import_trove_categories.py
@@ -28,15 +28,18 @@ from sfx.model import tables as T
 
 log = logging.getLogger(__name__)
 
+
 def main():
     sfx.middleware.configure_databases(h.config_with_prefix(config, 'sfx.'))
-    topic_trove = T.trove_cat.select(T.trove_cat.c.shortname=='topic').execute().fetchone()
+    topic_trove = T.trove_cat.select(
+        T.trove_cat.c.shortname == 'topic').execute().fetchone()
     M.ProjectCategory.query.remove()
     for t in T.trove_cat.select(
-        T.trove_cat.c.parent==topic_trove.trove_cat_id).execute():
-        parent = M.ProjectCategory(name=t.shortname, label=t.fullname, description=t.description)
+            T.trove_cat.c.parent == topic_trove.trove_cat_id).execute():
+        parent = M.ProjectCategory(
+            name=t.shortname, label=t.fullname, description=t.description)
         for tt in T.trove_cat.select(
-            T.trove_cat.c.parent==t.trove_cat_id).execute():
+                T.trove_cat.c.parent == t.trove_cat_id).execute():
             M.ProjectCategory(parent_id=parent._id,
                               name=tt.shortname, label=tt.fullname, description=tt.description)
     session(M.ProjectCategory).flush()


[15/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/spam/test_mollom.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/spam/test_mollom.py b/Allura/allura/tests/unit/spam/test_mollom.py
index ac7441d..931f8a3 100644
--- a/Allura/allura/tests/unit/spam/test_mollom.py
+++ b/Allura/allura/tests/unit/spam/test_mollom.py
@@ -28,19 +28,21 @@ from allura.lib.spam.mollomfilter import MOLLOM_AVAILABLE, MollomSpamFilter
 
 @unittest.skipIf(not MOLLOM_AVAILABLE, "Mollom not available")
 class TestMollom(unittest.TestCase):
+
     @mock.patch('allura.lib.spam.mollomfilter.Mollom')
     def setUp(self, mollom_lib):
         self.mollom = MollomSpamFilter({})
+
         def side_effect(*args, **kw):
             # side effect to test that data being sent to
             # mollom can be successfully urlencoded
             urllib.urlencode(kw.get('data', {}))
             return dict(spam=2)
         self.mollom.service.checkContent = mock.Mock(side_effect=side_effect,
-                return_value=dict(spam=2))
+                                                     return_value=dict(spam=2))
         self.fake_artifact = mock.Mock(**{'url.return_value': 'artifact url'})
         self.fake_user = mock.Mock(display_name=u'Søme User',
-                email_addresses=['user@domain'])
+                                   email_addresses=['user@domain'])
         self.fake_headers = dict(
             REMOTE_ADDR='fallback ip',
             X_FORWARDED_FOR='some ip',
@@ -58,19 +60,22 @@ class TestMollom(unittest.TestCase):
     def test_check(self, request, c):
         request.headers = self.fake_headers
         c.user = None
-        self.mollom.check(self.content, artifact = self.artifact)
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+        self.mollom.check(self.content, artifact=self.artifact)
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
     def test_check_with_user(self, request, c):
         request.headers = self.fake_headers
         c.user = None
-        self.mollom.check(self.content, user=self.fake_user, artifact=self.artifact)
+        self.mollom.check(self.content, user=self.fake_user,
+                          artifact=self.artifact)
         expected_data = self.expected_data
         expected_data.update(authorName=u'Søme User'.encode('utf8'),
-                authorMail='user@domain')
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+                             authorMail='user@domain')
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
@@ -80,8 +85,9 @@ class TestMollom(unittest.TestCase):
         self.mollom.check(self.content, artifact=self.artifact)
         expected_data = self.expected_data
         expected_data.update(authorName=u'Søme User'.encode('utf8'),
-                authorMail='user@domain')
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+                             authorMail='user@domain')
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
@@ -92,8 +98,10 @@ class TestMollom(unittest.TestCase):
         request.remote_addr = self.fake_headers['REMOTE_ADDR']
         c.user = None
         self.mollom.check(self.content, artifact=self.artifact)
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     def test_submit_spam(self):
         self.mollom.submit_spam('test', artifact=self.artifact)
-        assert self.mollom.service.sendFeedback.call_args[0] == ('test_id', 'spam'), self.mollom.service.sendFeedback.call_args[0]
+        assert self.mollom.service.sendFeedback.call_args[0] == (
+            'test_id', 'spam'), self.mollom.service.sendFeedback.call_args[0]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/spam/test_spam_filter.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/spam/test_spam_filter.py b/Allura/allura/tests/unit/spam/test_spam_filter.py
index 5255503..1d96c7f 100644
--- a/Allura/allura/tests/unit/spam/test_spam_filter.py
+++ b/Allura/allura/tests/unit/spam/test_spam_filter.py
@@ -24,12 +24,14 @@ from allura.lib.spam import SpamFilter
 
 
 class MockFilter(SpamFilter):
+
     def check(*args, **kw):
         raise Exception("test exception")
         return True
 
 
 class TestSpamFilter(unittest.TestCase):
+
     def test_check(self):
         # default no-op impl always returns False
         self.assertFalse(SpamFilter({}).check('foo'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_app.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_app.py b/Allura/allura/tests/unit/test_app.py
index da36170..adbfc40 100644
--- a/Allura/allura/tests/unit/test_app.py
+++ b/Allura/allura/tests/unit/test_app.py
@@ -111,4 +111,3 @@ def install_app():
     app = Application(project, app_config)
     app.install(project)
     return app
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_helpers/test_ago.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_helpers/test_ago.py b/Allura/allura/tests/unit/test_helpers/test_ago.py
index 240469f..5e09a2c 100644
--- a/Allura/allura/tests/unit/test_helpers/test_ago.py
+++ b/Allura/allura/tests/unit/test_helpers/test_ago.py
@@ -22,6 +22,7 @@ from allura.lib import helpers
 
 
 class TestAgo:
+
     def setUp(self):
         self.start_time = datetime(2010, 1, 1, 0, 0, 0)
 
@@ -45,9 +46,9 @@ class TestAgo:
         self.assertTimeSince('2 days ago', 2010, 1, 2, 13, 0, 0)
 
     def test_that_months_are_rounded(self):
-        self.assertTimeSince('2010-01-01', 2010,2,8,0,0,0)
-        self.assertTimeSince('2010-01-01', 2010,2,9,0,0,0)
-        self.assertTimeSince('2010-01-01', 2010,2,20,0,0,0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 8, 0, 0, 0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 9, 0, 0, 0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 20, 0, 0, 0)
 
     def test_that_years_are_rounded(self):
         self.assertTimeSince('2010-01-01', 2011, 6, 1, 0, 0, 0)
@@ -61,4 +62,3 @@ class TestAgo:
         with patch('allura.lib.helpers.datetime') as datetime_class:
             datetime_class.utcnow.return_value = end_time
             return helpers.ago(self.start_time)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_helpers/test_set_context.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_helpers/test_set_context.py b/Allura/allura/tests/unit/test_helpers/test_set_context.py
index 84df78e..7784f74 100644
--- a/Allura/allura/tests/unit/test_helpers/test_set_context.py
+++ b/Allura/allura/tests/unit/test_helpers/test_set_context.py
@@ -24,12 +24,13 @@ from allura.lib.exceptions import NoSuchProjectError, NoSuchNeighborhoodError
 from allura.tests.unit import WithDatabase
 from allura.tests.unit import patches
 from allura.tests.unit.factories import (create_project,
-                                          create_app_config,
-                                          create_neighborhood)
+                                         create_app_config,
+                                         create_neighborhood)
 from allura.model.project import Neighborhood
 
 
 class TestWhenProjectIsFoundAndAppIsNot(WithDatabase):
+
     def setUp(self):
         super(TestWhenProjectIsFoundAndAppIsNot, self).setUp()
         self.myproject = create_project('myproject')
@@ -43,6 +44,7 @@ class TestWhenProjectIsFoundAndAppIsNot(WithDatabase):
 
 
 class TestWhenProjectIsFoundInNeighborhood(WithDatabase):
+
     def setUp(self):
         super(TestWhenProjectIsFoundInNeighborhood, self).setUp()
         self.myproject = create_project('myproject')
@@ -62,7 +64,8 @@ class TestWhenAppIsFoundByID(WithDatabase):
         super(TestWhenAppIsFoundByID, self).setUp()
         self.myproject = create_project('myproject')
         self.app_config = create_app_config(self.myproject, 'my_mounted_app')
-        set_context('myproject', app_config_id=self.app_config._id, neighborhood=self.myproject.neighborhood)
+        set_context('myproject', app_config_id=self.app_config._id,
+                    neighborhood=self.myproject.neighborhood)
 
     def test_that_it_sets_the_app(self):
         assert c.app is self.fake_app
@@ -78,7 +81,8 @@ class TestWhenAppIsFoundByMountPoint(WithDatabase):
         super(TestWhenAppIsFoundByMountPoint, self).setUp()
         self.myproject = create_project('myproject')
         self.app_config = create_app_config(self.myproject, 'my_mounted_app')
-        set_context('myproject', mount_point='my_mounted_app', neighborhood=self.myproject.neighborhood)
+        set_context('myproject', mount_point='my_mounted_app',
+                    neighborhood=self.myproject.neighborhood)
 
     def test_that_it_sets_the_app(self):
         assert c.app is self.fake_app
@@ -104,6 +108,7 @@ class TestWhenProjectIsNotFound(WithDatabase):
                       ObjectId(),
                       neighborhood=None)
 
+
 class TestWhenNeighborhoodIsNotFound(WithDatabase):
 
     def test_that_it_raises_an_exception(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_mixins.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_mixins.py b/Allura/allura/tests/unit/test_mixins.py
index a6c508f..cdc3aaa 100644
--- a/Allura/allura/tests/unit/test_mixins.py
+++ b/Allura/allura/tests/unit/test_mixins.py
@@ -56,7 +56,7 @@ class TestVotableArtifact(object):
         vote.vote_down(self.user2)
         assert vote.votes_down == 2
         assert vote.votes_down_users == [self.user1.username,
-                                        self.user2.username]
+                                         self.user2.username]
 
         vote.vote_down(self.user1)  # unvote user1
         assert vote.votes_down == 1
@@ -84,4 +84,4 @@ class TestVotableArtifact(object):
         assert vote.__json__() == {'votes_up': 0, 'votes_down': 1}
 
         vote.vote_up(self.user2)
-        assert vote.__json__() == {'votes_up': 1, 'votes_down': 1}
\ No newline at end of file
+        assert vote.__json__() == {'votes_up': 1, 'votes_down': 1}

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_package_path_loader.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_package_path_loader.py b/Allura/allura/tests/unit/test_package_path_loader.py
index a5055ac..61bd887 100644
--- a/Allura/allura/tests/unit/test_package_path_loader.py
+++ b/Allura/allura/tests/unit/test_package_path_loader.py
@@ -37,31 +37,31 @@ class TestPackagePathLoader(TestCase):
         ]
         for ep in eps:
             ep.name = ep.ep_name
-        resource_filename.side_effect = lambda m, r: 'path:'+m
+        resource_filename.side_effect = lambda m, r: 'path:' + m
 
         paths = PackagePathLoader()._load_paths()
 
         assert_equal(paths, [
-                ['site-theme', None],
-                ['ep0', 'path:eps.ep0'],
-                ['ep1', 'path:eps.ep1'],
-                ['ep2', 'path:eps.ep2'],
-                ['allura', '/'],
-            ])
+            ['site-theme', None],
+            ['ep0', 'path:eps.ep0'],
+            ['ep1', 'path:eps.ep1'],
+            ['ep2', 'path:eps.ep2'],
+            ['allura', '/'],
+        ])
         assert_equal(type(paths[0]), list)
         assert_equal(resource_filename.call_args_list, [
-                mock.call('eps.ep0', ''),
-                mock.call('eps.ep1', ''),
-                mock.call('eps.ep2', ''),
-            ])
+            mock.call('eps.ep0', ''),
+            mock.call('eps.ep1', ''),
+            mock.call('eps.ep2', ''),
+        ])
 
     @mock.patch('pkg_resources.iter_entry_points')
     def test_load_rules(self, iter_entry_points):
         eps = iter_entry_points.return_value.__iter__.return_value = [
-                mock.Mock(ep_name='ep0', rules=[('>', 'allura')]),
-                mock.Mock(ep_name='ep1', rules=[('=', 'allura')]),
-                mock.Mock(ep_name='ep2', rules=[('<', 'allura')]),
-            ]
+            mock.Mock(ep_name='ep0', rules=[('>', 'allura')]),
+            mock.Mock(ep_name='ep1', rules=[('=', 'allura')]),
+            mock.Mock(ep_name='ep2', rules=[('<', 'allura')]),
+        ]
         for ep in eps:
             ep.name = ep.ep_name
             ep.load.return_value.template_path_rules = ep.rules
@@ -72,8 +72,8 @@ class TestPackagePathLoader(TestCase):
         assert_equal(replacement_rules, {'allura': 'ep1'})
 
         eps = iter_entry_points.return_value.__iter__.return_value = [
-                mock.Mock(ep_name='ep0', rules=[('?', 'allura')]),
-            ]
+            mock.Mock(ep_name='ep0', rules=[('?', 'allura')]),
+        ]
         for ep in eps:
             ep.name = ep.ep_name
             ep.load.return_value.template_path_rules = ep.rules
@@ -84,63 +84,63 @@ class TestPackagePathLoader(TestCase):
         ppl._replace_signpost = mock.Mock()
         paths = [
                 ['site-theme', None],
-                ['ep0', '/ep0'],
-                ['ep1', '/ep1'],
-                ['ep2', '/ep2'],
-                ['allura', '/'],
-            ]
+            ['ep0', '/ep0'],
+            ['ep1', '/ep1'],
+            ['ep2', '/ep2'],
+            ['allura', '/'],
+        ]
         rules = {
-                'allura': 'ep2',
-                'site-theme': 'ep1',
-                'foo': 'ep1',
-                'ep0': 'bar',
-            }
+            'allura': 'ep2',
+            'site-theme': 'ep1',
+            'foo': 'ep1',
+            'ep0': 'bar',
+        }
 
         ppl._replace_signposts(paths, rules)
 
         assert_equal(paths, [
-                ['site-theme', '/ep1'],
-                ['ep0', '/ep0'],
-                ['allura', '/ep2'],
-            ]);
+            ['site-theme', '/ep1'],
+            ['ep0', '/ep0'],
+            ['allura', '/ep2'],
+        ])
 
     def test_sort_paths(self):
         paths = [
                 ['site-theme', None],
-                ['ep0', '/ep0'],
-                ['ep1', '/ep1'],
-                ['ep2', '/ep2'],
-                ['ep3', '/ep3'],
-                ['allura', '/'],
-            ]
+            ['ep0', '/ep0'],
+            ['ep1', '/ep1'],
+            ['ep2', '/ep2'],
+            ['ep3', '/ep3'],
+            ['allura', '/'],
+        ]
         rules = [
-                ('allura', 'ep0'),
-                ('ep3', 'ep1'),
-                ('ep2', 'ep1'),
-                ('ep4', 'ep1'),  # rules referencing missing paths
-                ('ep2', 'ep5'),
-            ]
+            ('allura', 'ep0'),
+            ('ep3', 'ep1'),
+            ('ep2', 'ep1'),
+            ('ep4', 'ep1'),  # rules referencing missing paths
+            ('ep2', 'ep5'),
+        ]
 
         PackagePathLoader()._sort_paths(paths, rules)
 
         assert_equal(paths, [
-                ['site-theme', None],
-                ['ep2', '/ep2'],
-                ['ep3', '/ep3'],
-                ['ep1', '/ep1'],
-                ['allura', '/'],
-                ['ep0', '/ep0'],
-            ])
+            ['site-theme', None],
+            ['ep2', '/ep2'],
+            ['ep3', '/ep3'],
+            ['ep1', '/ep1'],
+            ['allura', '/'],
+            ['ep0', '/ep0'],
+        ])
 
     def test_init_paths(self):
-        paths =  [
-                ['root', '/'],
-                ['none', None],
-                ['tail', '/tail'],
-            ]
+        paths = [
+            ['root', '/'],
+            ['none', None],
+            ['tail', '/tail'],
+        ]
         ppl = PackagePathLoader()
         ppl._load_paths = mock.Mock(return_value=paths)
-        ppl._load_rules = mock.Mock(return_value=('order_rules','repl_rules'))
+        ppl._load_rules = mock.Mock(return_value=('order_rules', 'repl_rules'))
         ppl._replace_signposts = mock.Mock()
         ppl._sort_paths = mock.Mock()
 
@@ -177,27 +177,33 @@ class TestPackagePathLoader(TestCase):
         output = ppl.get_source('env', 'allura.ext.admin:templates/audit.html')
 
         assert_equal(output, 'fs_load')
-        fs_loader().get_source.assert_called_once_with('env', 'override/allura/ext/admin/templates/audit.html')
+        fs_loader().get_source.assert_called_once_with(
+            'env', 'override/allura/ext/admin/templates/audit.html')
 
         fs_loader().get_source.reset_mock()
-        fs_loader().get_source.side_effect = [jinja2.TemplateNotFound('test'), 'fs_load']
+        fs_loader().get_source.side_effect = [
+            jinja2.TemplateNotFound('test'), 'fs_load']
 
         with mock.patch('pkg_resources.resource_filename') as rf:
             rf.return_value = 'resource'
             # no override, ':' in template
-            output = ppl.get_source('env', 'allura.ext.admin:templates/audit.html')
-            rf.assert_called_once_with('allura.ext.admin', 'templates/audit.html')
+            output = ppl.get_source(
+                'env', 'allura.ext.admin:templates/audit.html')
+            rf.assert_called_once_with(
+                'allura.ext.admin', 'templates/audit.html')
 
         assert_equal(output, 'fs_load')
         assert_equal(fs_loader().get_source.call_count, 2)
         fs_loader().get_source.assert_called_with('env', 'resource')
 
         fs_loader().get_source.reset_mock()
-        fs_loader().get_source.side_effect = [jinja2.TemplateNotFound('test'), 'fs_load']
+        fs_loader().get_source.side_effect = [
+            jinja2.TemplateNotFound('test'), 'fs_load']
 
         # no override, ':' not in template
         output = ppl.get_source('env', 'templates/audit.html')
 
         assert_equal(output, 'fs_load')
         assert_equal(fs_loader().get_source.call_count, 2)
-        fs_loader().get_source.assert_called_with('env', 'templates/audit.html')
+        fs_loader().get_source.assert_called_with(
+            'env', 'templates/audit.html')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_project.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_project.py b/Allura/allura/tests/unit/test_project.py
index 396fdae..b4108e0 100644
--- a/Allura/allura/tests/unit/test_project.py
+++ b/Allura/allura/tests/unit/test_project.py
@@ -23,6 +23,7 @@ from allura.app import SitemapEntry
 
 
 class TestProject(unittest.TestCase):
+
     def test_grouped_navbar_entries(self):
         p = M.Project()
         sitemap_entries = [
@@ -32,7 +33,8 @@ class TestProject(unittest.TestCase):
             SitemapEntry('subproject', url='subproject url'),
             SitemapEntry('features', url='features url', tool_name='Tickets'),
             SitemapEntry('help', url='help url', tool_name='Discussion'),
-            SitemapEntry('support reqs', url='support url', tool_name='Tickets'),
+            SitemapEntry('support reqs', url='support url',
+                         tool_name='Tickets'),
         ]
         p.url = Mock(return_value='proj_url/')
         p.sitemap = Mock(return_value=sitemap_entries)
@@ -57,7 +59,8 @@ class TestProject(unittest.TestCase):
             SitemapEntry('subproject', url='subproject url'),
             SitemapEntry('features', url='features url', tool_name='Tickets'),
             SitemapEntry('help', url='help url', tool_name='Discussion'),
-            SitemapEntry('support reqs', url='support url', tool_name='Tickets'),
+            SitemapEntry('support reqs', url='support url',
+                         tool_name='Tickets'),
         ]
         p.url = Mock(return_value='proj_url/')
         p.sitemap = Mock(return_value=sitemap_entries)
@@ -80,5 +83,6 @@ class TestProject(unittest.TestCase):
         self.assertIsNone(p.social_account('Twitter'))
 
         p.set_social_account('Twitter', 'http://twitter.com/allura')
-        self.assertEqual(p.social_account('Twitter').accounturl, 'http://twitter.com/allura')
-        self.assertEqual(p.twitter_handle, 'http://twitter.com/allura')
\ No newline at end of file
+        self.assertEqual(p.social_account('Twitter')
+                         .accounturl, 'http://twitter.com/allura')
+        self.assertEqual(p.twitter_handle, 'http://twitter.com/allura')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_repo.py b/Allura/allura/tests/unit/test_repo.py
index c730d59..567b6cc 100644
--- a/Allura/allura/tests/unit/test_repo.py
+++ b/Allura/allura/tests/unit/test_repo.py
@@ -27,6 +27,7 @@ from allura.controllers.repository import topo_sort
 from allura.model.repository import zipdir, prefix_paths_union
 from alluratest.controller import setup_unit_test
 
+
 class TestCommitRunBuilder(unittest.TestCase):
 
     def setUp(self):
@@ -34,10 +35,10 @@ class TestCommitRunBuilder(unittest.TestCase):
         commits = [
             M.repo.CommitDoc.make(dict(
                 _id=str(i)))
-            for i in range(10) ]
-        for p,c in zip(commits, commits[1:]):
-            p.child_ids = [ c._id ]
-            c.parent_ids = [ p._id ]
+            for i in range(10)]
+        for p, c in zip(commits, commits[1:]):
+            p.child_ids = [c._id]
+            c.parent_ids = [p._id]
         for ci in commits:
             ci.m.save()
         self.commits = commits
@@ -73,7 +74,9 @@ class TestCommitRunBuilder(unittest.TestCase):
             crb.cleanup()
         self.assertEqual(M.repo.CommitRunDoc.m.count(), 1)
 
+
 class TestTopoSort(unittest.TestCase):
+
     def test_commit_dates_out_of_order(self):
         """Commits should be sorted by their parent/child relationships,
         regardless of the date on the commit.
@@ -102,7 +105,7 @@ class TestTopoSort(unittest.TestCase):
             'dev':        datetime.datetime(2012, 6, 1)}
         result = topo_sort(children, parents, dates, head_ids)
         self.assertEqual(list(result), ['dev', 'dev@{1}', 'master',
-            'master@{1}', 'master@{2}', 'master@{3}'])
+                                        'master@{1}', 'master@{2}', 'master@{3}'])
 
 
 def tree(name, id, trees=None, blobs=None):
@@ -142,6 +145,7 @@ class TestTree(unittest.TestCase):
 
 
 class TestBlob(unittest.TestCase):
+
     def test_context_no_create(self):
         blob = M.repo.Blob(Mock(), Mock(), Mock())
         blob.path = Mock(return_value='path')
@@ -208,6 +212,7 @@ class TestBlob(unittest.TestCase):
 
 
 class TestCommit(unittest.TestCase):
+
     def test_activity_extras(self):
         commit = M.repo.Commit()
         commit.shorthand_id = MagicMock(return_value='abcdef')
@@ -269,7 +274,8 @@ class TestCommit(unittest.TestCase):
         tree = commit.get_tree()
         commit.repo.compute_tree_new.assert_called_once_with(commit)
         assert not tree_get.called
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree'))
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -279,7 +285,8 @@ class TestCommit(unittest.TestCase):
         tree = commit.get_tree()
         assert not commit.repo.compute_tree_new.called
         assert not tree_get.called
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree2'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree2'))
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -288,11 +295,13 @@ class TestCommit(unittest.TestCase):
         c.model_cache.get.return_value = None
         tree_get.return_value = _tree
         tree = commit.get_tree()
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree2'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree2'))
         commit.repo.compute_tree_new.assert_called_once_with(commit)
         assert_equal(commit.tree_id, 'tree')
         tree_get.assert_called_once_with(_id='tree')
-        c.model_cache.set.assert_called_once_with(M.repo.Tree, dict(_id='tree'), _tree)
+        c.model_cache.set.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree'), _tree)
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -304,6 +313,7 @@ class TestCommit(unittest.TestCase):
 
 
 class TestZipDir(unittest.TestCase):
+
     @patch('allura.model.repository.Popen')
     @patch('allura.model.repository.tg')
     def test_popen_called(self, tg, popen):
@@ -314,14 +324,16 @@ class TestZipDir(unittest.TestCase):
         src = '/fake/path/to/repo'
         zipfile = '/fake/zip/file.tmp'
         zipdir(src, zipfile)
-        popen.assert_called_once_with(['/bin/zip', '-y', '-q', '-r', zipfile, 'repo'],
-                cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
+        popen.assert_called_once_with(
+            ['/bin/zip', '-y', '-q', '-r', zipfile, 'repo'],
+            cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
         popen.reset_mock()
         src = '/fake/path/to/repo/'
         zipdir(src, zipfile, exclude='file.txt')
         popen.assert_called_once_with(
-                ['/bin/zip', '-y', '-q', '-r', zipfile, 'repo', '-x', 'file.txt'],
-                cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
+            ['/bin/zip', '-y', '-q', '-r',
+             zipfile, 'repo', '-x', 'file.txt'],
+            cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
 
     @patch('allura.model.repository.Popen')
     @patch('allura.model.repository.tg')
@@ -335,14 +347,15 @@ class TestZipDir(unittest.TestCase):
             zipdir(src, zipfile)
         emsg = str(cm.exception)
         self.assertTrue(
-                "Command: "
-                "['/bin/zip', '-y', '-q', '-r', '/fake/zip/file.tmp', 'repo'] "
-                "returned non-zero exit code 1" in emsg)
+            "Command: "
+            "['/bin/zip', '-y', '-q', '-r', '/fake/zip/file.tmp', 'repo'] "
+            "returned non-zero exit code 1" in emsg)
         self.assertTrue("STDOUT: 1" in emsg)
         self.assertTrue("STDERR: 2" in emsg)
 
 
 class TestPrefixPathsUnion(unittest.TestCase):
+
     def test_disjoint(self):
         a = set(['a1', 'a2', 'a3'])
         b = set(['b1', 'b1/foo', 'b2'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_session.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_session.py b/Allura/allura/tests/unit/test_session.py
index 5b5449e..b079004 100644
--- a/Allura/allura/tests/unit/test_session.py
+++ b/Allura/allura/tests/unit/test_session.py
@@ -38,6 +38,7 @@ def test_extensions_cm():
 
 
 class TestBatchIndexer(TestCase):
+
     def setUp(self):
         session = mock.Mock()
         self.extcls = BatchIndexer
@@ -74,7 +75,8 @@ class TestBatchIndexer(TestCase):
         self.extcls.to_delete = del_index_ids
         self.extcls.to_add = set([4, 5, 6])
         self.ext.flush()
-        index_tasks.del_artifacts.post.assert_called_once_with(list(del_index_ids))
+        index_tasks.del_artifacts.post.assert_called_once_with(
+            list(del_index_ids))
         index_tasks.add_artifacts.post.assert_called_once_with([4, 5, 6])
         self.assertEqual(self.ext.to_delete, set())
         self.assertEqual(self.ext.to_add, set())
@@ -110,7 +112,7 @@ class TestBatchIndexer(TestCase):
         def on_post(chunk):
             if len(chunk) > 1:
                 raise pymongo.errors.InvalidDocument(
-                        "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
+                    "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
         index_tasks.add_artifacts.post.side_effect = on_post
         self.ext._post(index_tasks.add_artifacts, range(5))
         expected = [
@@ -124,7 +126,8 @@ class TestBatchIndexer(TestCase):
             mock.call([3]),
             mock.call([4])
         ]
-        self.assertEqual(expected, index_tasks.add_artifacts.post.call_args_list)
+        self.assertEqual(
+            expected, index_tasks.add_artifacts.post.call_args_list)
 
     @mock.patch('allura.tasks.index_tasks')
     def test__post_other_error(self, index_tasks):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_sitemapentry.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_sitemapentry.py b/Allura/allura/tests/unit/test_sitemapentry.py
index f6e9317..bdd6399 100644
--- a/Allura/allura/tests/unit/test_sitemapentry.py
+++ b/Allura/allura/tests/unit/test_sitemapentry.py
@@ -22,6 +22,7 @@ from allura.app import SitemapEntry
 
 
 class TestSitemapEntry(unittest.TestCase):
+
     def test_matches_url(self):
         request = Mock(upath_info='/p/project/tool/artifact')
         s1 = SitemapEntry('tool', url='/p/project/tool')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_solr.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_solr.py b/Allura/allura/tests/unit/test_solr.py
index 2bc5cdb..02da952 100644
--- a/Allura/allura/tests/unit/test_solr.py
+++ b/Allura/allura/tests/unit/test_solr.py
@@ -27,6 +27,7 @@ from alluratest.controller import setup_basic_test
 from allura.lib.solr import Solr
 from allura.lib.search import solarize, search_app
 
+
 class TestSolr(unittest.TestCase):
 
     @mock.patch('allura.lib.solr.pysolr')
@@ -39,7 +40,8 @@ class TestSolr(unittest.TestCase):
 
         pysolr.reset_mock()
         solr = Solr(servers, 'server3', commit=False, commitWithin='10000')
-        calls = [mock.call('server1'), mock.call('server2'), mock.call('server3')]
+        calls = [mock.call('server1'), mock.call('server2'),
+                 mock.call('server3')]
         pysolr.Solr.assert_has_calls(calls)
         assert_equal(len(solr.push_pool), 2)
 
@@ -53,7 +55,7 @@ class TestSolr(unittest.TestCase):
         pysolr.reset_mock()
         solr.add('bar', somekw='value')
         calls = [mock.call('bar', commit=False,
-            commitWithin='10000', somekw='value')] * 2
+                           commitWithin='10000', somekw='value')] * 2
         pysolr.Solr().add.assert_has_calls(calls)
 
     @mock.patch('allura.lib.solr.pysolr')
@@ -115,7 +117,8 @@ class TestSolarize(unittest.TestCase):
         obj.index.return_value = {'text': '<script>alert(1)</script>'}
         assert_equal(solarize(obj), {'text': ''})
 
-        obj.index.return_value = {'text': '&lt;script&gt;alert(1)&lt;/script&gt;'}
+        obj.index.return_value = {'text':
+                                  '&lt;script&gt;alert(1)&lt;/script&gt;'}
         assert_equal(solarize(obj), {'text': '<script>alert(1)</script>'})
 
 
@@ -156,15 +159,17 @@ class TestSearch_app(unittest.TestCase):
         req.path = '/test/wiki/search'
         url_fn.side_effect = ['the-score-url', 'the-date-url']
         results = mock.Mock(hits=2, docs=[
-                {'id': 123, 'type_s':'WikiPage Snapshot', 'url_s':'/test/wiki/Foo', 'version_i':2},
-                {'id': 321, 'type_s':'Post'},
-            ], highlighting={
-                123: dict(title='some #ALLURA-HIGHLIGHT-START#Foo#ALLURA-HIGHLIGHT-END# stuff',
-                         text='scary <script>alert(1)</script> bar'),
-                321: dict(title='blah blah',
-                         text='less scary but still dangerous &lt;script&gt;alert(1)&lt;/script&gt; '
-                              'blah #ALLURA-HIGHLIGHT-START#bar#ALLURA-HIGHLIGHT-END# foo foo'),
-            },
+            {'id': 123, 'type_s': 'WikiPage Snapshot',
+             'url_s': '/test/wiki/Foo', 'version_i': 2},
+            {'id': 321, 'type_s': 'Post'},
+        ], highlighting={
+            123: dict(
+                title='some #ALLURA-HIGHLIGHT-START#Foo#ALLURA-HIGHLIGHT-END# stuff',
+                text='scary <script>alert(1)</script> bar'),
+            321: dict(title='blah blah',
+                      text='less scary but still dangerous &lt;script&gt;alert(1)&lt;/script&gt; '
+                      'blah #ALLURA-HIGHLIGHT-START#bar#ALLURA-HIGHLIGHT-END# foo foo'),
+        },
         )
         results.__iter__ = lambda self: iter(results.docs)
         solr_search.return_value = results
@@ -190,11 +195,11 @@ class TestSearch_app(unittest.TestCase):
                 'title_match': Markup('some <strong>Foo</strong> stuff'),
                 # HTML in the solr plaintext results get escaped
                 'text_match': Markup('scary &lt;script&gt;alert(1)&lt;/script&gt; bar'),
-                }, {
+            }, {
                 'id': 321,
                 'type_s': 'Post',
                 'title_match': Markup('blah blah'),
                 # highlighting in text
                 'text_match': Markup('less scary but still dangerous &amp;lt;script&amp;gt;alert(1)&amp;lt;/script&amp;gt; blah <strong>bar</strong> foo foo'),
-                }]
+            }]
         ))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/__init__.py b/Allura/allura/websetup/__init__.py
index e05e639..37b1586 100644
--- a/Allura/allura/websetup/__init__.py
+++ b/Allura/allura/websetup/__init__.py
@@ -30,6 +30,7 @@ log = logging.getLogger(__name__)
 from schema import setup_schema
 import bootstrap
 
+
 def setup_app(command, conf, vars):
     """Place any commands to setup allura here"""
     load_environment(conf.global_conf, conf.local_conf)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/bootstrap.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/bootstrap.py b/Allura/allura/websetup/bootstrap.py
index 9d09927..252bfc9 100644
--- a/Allura/allura/websetup/bootstrap.py
+++ b/Allura/allura/websetup/bootstrap.py
@@ -45,20 +45,25 @@ from forgewiki import model as WM
 
 log = logging.getLogger(__name__)
 
+
 def cache_test_data():
     log.info('Saving data to cache in .test-data')
     if os.path.exists('.test-data'):
         shutil.rmtree('.test-data')
-    os.system('mongodump -h 127.0.0.1:27018 -o .test-data > mongodump.log 2>&1')
+    os.system(
+        'mongodump -h 127.0.0.1:27018 -o .test-data > mongodump.log 2>&1')
+
 
 def restore_test_data():
     if os.path.exists('.test-data'):
         log.info('Restoring data from cache in .test-data')
-        rc = os.system('mongorestore -h 127.0.0.1:27018 --dir .test-data > mongorestore.log 2>&1')
+        rc = os.system(
+            'mongorestore -h 127.0.0.1:27018 --dir .test-data > mongorestore.log 2>&1')
         return rc == 0
     else:
         return False
 
+
 def bootstrap(command, conf, vars):
     """Place any commands to setup allura here"""
     # are we being called by the test suite?
@@ -66,6 +71,7 @@ def bootstrap(command, conf, vars):
 
     # if this is a test_run, skip user project creation to save time
     make_user_projects = not test_run
+
     def make_user(*args, **kw):
         kw.update(make_project=make_user_projects)
         return create_user(*args, **kw)
@@ -79,11 +85,11 @@ def bootstrap(command, conf, vars):
     ThreadLocalORMSession.close_all()
     c.queued_messages = defaultdict(list)
     c.user = c.project = c.app = None
-    database=conf.get('db_prefix', '') + 'project:test'
+    database = conf.get('db_prefix', '') + 'project:test'
     wipe_database()
     try:
         g.solr.delete(q='*:*')
-    except: # pragma no cover
+    except:  # pragma no cover
         log.error('SOLR server is %s', g.solr_server)
         log.error('Error clearing solr index')
     if asbool(conf.get('cache_test_data')):
@@ -102,31 +108,34 @@ def bootstrap(command, conf, vars):
     root = create_user('Root', make_project=False)
 
     n_projects = M.Neighborhood(name='Projects', url_prefix='/p/',
-                                features=dict(private_projects = True,
-                                              max_projects = None,
-                                              css = 'none',
-                                              google_analytics = False))
+                                features=dict(private_projects=True,
+                                              max_projects=None,
+                                              css='none',
+                                              google_analytics=False))
     n_users = M.Neighborhood(name='Users', url_prefix='/u/',
                              shortname_prefix='u/',
                              anchored_tools='profile:Profile,userstats:Statistics',
-                             features=dict(private_projects = True,
-                                           max_projects = None,
-                                           css = 'none',
-                                           google_analytics = False))
-    n_adobe = M.Neighborhood(name='Adobe', url_prefix='/adobe/', project_list_url='/adobe/',
-                             features=dict(private_projects = True,
-                                           max_projects = None,
-                                           css = 'custom',
-                                           google_analytics = True))
+                             features=dict(private_projects=True,
+                                           max_projects=None,
+                                           css='none',
+                                           google_analytics=False))
+    n_adobe = M.Neighborhood(
+        name='Adobe', url_prefix='/adobe/', project_list_url='/adobe/',
+        features=dict(private_projects=True,
+                      max_projects=None,
+                      css='custom',
+                      google_analytics=True))
     assert tg.config['auth.method'] == 'local'
     project_reg = plugin.ProjectRegistrationProvider.get()
-    p_projects = project_reg.register_neighborhood_project(n_projects, [root], allow_register=True)
+    p_projects = project_reg.register_neighborhood_project(
+        n_projects, [root], allow_register=True)
     p_users = project_reg.register_neighborhood_project(n_users, [root])
     p_adobe = project_reg.register_neighborhood_project(n_adobe, [root])
 
     def set_nbhd_wiki_content(nbhd_proj, content):
         wiki = nbhd_proj.app_instance('wiki')
-        page = WM.Page.query.get(app_config_id=wiki.config._id, title=wiki.root_page_name)
+        page = WM.Page.query.get(
+            app_config_id=wiki.config._id, title=wiki.root_page_name)
         page.text = content
 
     set_nbhd_wiki_content(p_projects, dedent('''
@@ -159,7 +168,8 @@ def bootstrap(command, conf, vars):
 
     # add the adobe icon
     file_name = 'adobe_icon.png'
-    file_path = os.path.join(allura.__path__[0],'public','nf','images',file_name)
+    file_path = os.path.join(
+        allura.__path__[0], 'public', 'nf', 'images', file_name)
     M.NeighborhoodFile.from_path(file_path, neighborhood_id=n_adobe._id)
 
     # Add some test users
@@ -170,14 +180,18 @@ def bootstrap(command, conf, vars):
     cat1 = M.ProjectCategory(name='clustering', label='Clustering')
 
     cat2 = M.ProjectCategory(name='communications', label='Communications')
-    cat2_1 = M.ProjectCategory(name='synchronization', label='Synchronization', parent_id=cat2._id)
-    cat2_2 = M.ProjectCategory(name='streaming', label='Streaming', parent_id=cat2._id)
+    cat2_1 = M.ProjectCategory(
+        name='synchronization', label='Synchronization', parent_id=cat2._id)
+    cat2_2 = M.ProjectCategory(
+        name='streaming', label='Streaming', parent_id=cat2._id)
     cat2_3 = M.ProjectCategory(name='fax', label='Fax', parent_id=cat2._id)
     cat2_4 = M.ProjectCategory(name='bbs', label='BBS', parent_id=cat2._id)
 
     cat3 = M.ProjectCategory(name='database', label='Database')
-    cat3_1 = M.ProjectCategory(name='front_ends', label='Front-Ends', parent_id=cat3._id)
-    cat3_2 = M.ProjectCategory(name='engines_servers', label='Engines/Servers', parent_id=cat3._id)
+    cat3_1 = M.ProjectCategory(
+        name='front_ends', label='Front-Ends', parent_id=cat3._id)
+    cat3_2 = M.ProjectCategory(
+        name='engines_servers', label='Engines/Servers', parent_id=cat3._id)
 
     log.info('Registering "regular users" (non-root) and default projects')
     # since this runs a lot for tests, separate test and default users and
@@ -190,7 +204,8 @@ def bootstrap(command, conf, vars):
         u_admin.claim_address('test-admin@users.localhost')
     else:
         u_admin = make_user('Admin 1', username='admin1')
-        # Admin1 is almost root, with admin access for Users and Projects neighborhoods
+        # Admin1 is almost root, with admin access for Users and Projects
+        # neighborhoods
         p_projects.add_user(u_admin, ['Admin'])
         p_users.add_user(u_admin, ['Admin'])
 
@@ -200,11 +215,12 @@ def bootstrap(command, conf, vars):
     p_adobe.add_user(u_admin, ['Admin'])
     p0 = n_projects.register_project('test', u_admin, 'Test Project')
     p1 = n_projects.register_project('test2', u_admin, 'Test 2')
-    p0._extra_tool_status = [ 'alpha', 'beta' ]
+    p0._extra_tool_status = ['alpha', 'beta']
 
-    sess = session(M.Neighborhood) # all the sessions are the same
+    sess = session(M.Neighborhood)  # all the sessions are the same
     for x in (n_adobe, n_projects, n_users, p_projects, p_users, p_adobe):
-        # Ming doesn't detect substructural changes in newly created objects (vs loaded from DB)
+        # Ming doesn't detect substructural changes in newly created objects
+        # (vs loaded from DB)
         state(x).status = 'dirty'
         # TODO: Hope that Ming can be improved to at least avoid stuff below
         sess.flush(x)
@@ -214,7 +230,7 @@ def bootstrap(command, conf, vars):
     if asbool(conf.get('load_test_data')):
         if asbool(conf.get('cache_test_data')):
             cache_test_data()
-    else: # pragma no cover
+    else:  # pragma no cover
         # regular first-time setup
         p0.add_user(u_admin, ['Admin'])
         log.info('Registering initial apps')
@@ -224,7 +240,8 @@ def bootstrap(command, conf, vars):
                     continue
                 p0.install_app(ep_name)
 
-    # reload our p0 project so that p0.app_configs is accurate with all the newly installed apps
+    # reload our p0 project so that p0.app_configs is accurate with all the
+    # newly installed apps
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
     p0 = M.Project.query.get(_id=p0._id)
@@ -232,13 +249,14 @@ def bootstrap(command, conf, vars):
     with h.push_config(c, user=u_admin):
         sub.install_app('wiki')
 
-
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
 
+
 def wipe_database():
     conn = M.main_doc_session.bind.conn
-    create_trove_categories = CreateTroveCategoriesCommand('create_trove_categories')
+    create_trove_categories = CreateTroveCategoriesCommand(
+        'create_trove_categories')
     index = EnsureIndexCommand('ensure_index')
     if isinstance(conn, mim.Connection):
         clear_all_database_tables()
@@ -246,11 +264,13 @@ def wipe_database():
             db = conn[db]
     else:
         for database in conn.database_names():
-            if database not in ( 'allura', 'pyforge', 'project-data'): continue
+            if database not in ('allura', 'pyforge', 'project-data'):
+                continue
             log.info('Wiping database %s', database)
             db = conn[database]
             for coll in db.collection_names():
-                if coll.startswith('system.'): continue
+                if coll.startswith('system.'):
+                    continue
                 log.info('Dropping collection %s:%s', database, coll)
                 try:
                     db.drop_collection(coll)
@@ -260,7 +280,6 @@ def wipe_database():
     index.run([''])
 
 
-
 def clear_all_database_tables():
     conn = M.main_doc_session.bind.conn
     for db in conn.database_names():
@@ -282,6 +301,7 @@ def create_user(display_name, username=None, password='foo', make_project=False)
 
 
 class DBSession(Session):
+
     '''Simple session that takes a pymongo connection and a database name'''
 
     def __init__(self, db):
@@ -294,17 +314,20 @@ class DBSession(Session):
     def _impl(self, cls):
         return self.db[cls.__mongometa__.name]
 
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
+
+def pm(etype, value, tb):  # pragma no cover
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/schema.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/schema.py b/Allura/allura/websetup/schema.py
index cdfdf42..d25128e 100644
--- a/Allura/allura/websetup/schema.py
+++ b/Allura/allura/websetup/schema.py
@@ -30,6 +30,7 @@ from paste.registry import Registry
 log = logging.getLogger(__name__)
 REGISTRY = Registry()
 
+
 def setup_schema(command, conf, vars):
     """Place any commands to setup allura here"""
     import ming
@@ -45,4 +46,6 @@ def setup_schema(command, conf, vars):
     # Nothing to do
     log.info('setup_schema called')
 
-class EmptyClass(object): pass
+
+class EmptyClass(object):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/docs/conf.py
----------------------------------------------------------------------
diff --git a/Allura/docs/conf.py b/Allura/docs/conf.py
index 7b20c15..9b8e40a 100644
--- a/Allura/docs/conf.py
+++ b/Allura/docs/conf.py
@@ -28,18 +28,20 @@
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 
-import sys, os
+import sys
+import os
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
+# sys.path.append(os.path.abspath('.'))
 
-# -- General configuration -----------------------------------------------------
+# -- General configuration -----------------------------------------------
 
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
+              'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['_templates']
@@ -104,7 +106,7 @@ pygments_style = 'sphinx'
 #modindex_common_prefix = []
 
 
-# -- Options for HTML output ---------------------------------------------------
+# -- Options for HTML output ---------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  Major themes that come with
 # Sphinx are currently 'default' and 'sphinxdoc'.
@@ -180,7 +182,7 @@ html_show_sourcelink = False
 htmlhelp_basename = 'alluradoc'
 
 
-# -- Options for LaTeX output --------------------------------------------------
+# -- Options for LaTeX output --------------------------------------------
 
 # The paper size ('letter' or 'a4').
 #latex_paper_size = 'letter'
@@ -191,8 +193,8 @@ htmlhelp_basename = 'alluradoc'
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'allura.tex', u'allura Documentation',
-   u'Mark Ramm, Wolf, Rick Copeland, Jonathan Beard', 'manual'),
+    ('index', 'allura.tex', u'allura Documentation',
+     u'Mark Ramm, Wolf, Rick Copeland, Jonathan Beard', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ez_setup/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/ez_setup/__init__.py b/Allura/ez_setup/__init__.py
index f036aae..b8e23b3 100644
--- a/Allura/ez_setup/__init__.py
+++ b/Allura/ez_setup/__init__.py
@@ -33,7 +33,7 @@ This file can also be run as a script to install or upgrade setuptools.
 """
 import sys
 DEFAULT_VERSION = "0.6c7"
-DEFAULT_URL     = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
+DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
 
 md5_data = {
     'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
@@ -65,7 +65,9 @@ md5_data = {
     'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
 }
 
-import sys, os
+import sys
+import os
+
 
 def _validate_md5(egg_name, data):
     if egg_name in md5_data:
@@ -99,18 +101,20 @@ def use_setuptools(
         import setuptools
         if setuptools.__version__ == '0.0.1':
             print >>sys.stderr, (
-            "You have an obsolete version of setuptools installed.  Please\n"
-            "remove it from your system entirely before rerunning this script."
+                "You have an obsolete version of setuptools installed.  Please\n"
+                "remove it from your system entirely before rerunning this script."
             )
             sys.exit(2)
     except ImportError:
-        egg = download_setuptools(version, download_base, to_dir, download_delay)
+        egg = download_setuptools(
+            version, download_base, to_dir, download_delay)
         sys.path.insert(0, egg)
-        import setuptools; setuptools.bootstrap_install_from = egg
+        import setuptools
+        setuptools.bootstrap_install_from = egg
 
     import pkg_resources
     try:
-        pkg_resources.require("setuptools>="+version)
+        pkg_resources.require("setuptools>=" + version)
 
     except pkg_resources.VersionConflict, e:
         # XXX could we install in a subprocess here?
@@ -121,9 +125,10 @@ def use_setuptools(
         ) % (version, e.args[0])
         sys.exit(2)
 
+
 def download_setuptools(
     version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
-    delay = 15
+    delay=15
 ):
     """Download setuptools from a specified location and return its filename
 
@@ -132,8 +137,9 @@ def download_setuptools(
     with a '/'). `to_dir` is the directory where the egg will be downloaded.
     `delay` is the number of seconds to pause before an actual download attempt.
     """
-    import urllib2, shutil
-    egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
+    import urllib2
+    import shutil
+    egg_name = "setuptools-%s-py%s.egg" % (version, sys.version[:3])
     url = download_base + egg_name
     saveto = os.path.join(to_dir, egg_name)
     src = dst = None
@@ -155,19 +161,25 @@ I will start the download in %d seconds.
 
 and place it in this directory before rerunning this script.)
 ---------------------------------------------------------------------------""",
-                    version, download_base, delay, url
-                ); from time import sleep; sleep(delay)
+                         version, download_base, delay, url
+                         )
+                from time import sleep
+                sleep(delay)
             log.warn("Downloading %s", url)
             src = urllib2.urlopen(url)
             # Read/write all in one block, so we don't create a corrupt file
             # if the download is interrupted.
             data = _validate_md5(egg_name, src.read())
-            dst = open(saveto,"wb"); dst.write(data)
+            dst = open(saveto, "wb")
+            dst.write(data)
         finally:
-            if src: src.close()
-            if dst: dst.close()
+            if src:
+                src.close()
+            if dst:
+                dst.close()
     return os.path.realpath(saveto)
 
+
 def main(argv, version=DEFAULT_VERSION):
     """Install or upgrade setuptools and EasyInstall"""
 
@@ -177,9 +189,9 @@ def main(argv, version=DEFAULT_VERSION):
         egg = None
         try:
             egg = download_setuptools(version, delay=0)
-            sys.path.insert(0,egg)
+            sys.path.insert(0, egg)
             from setuptools.command.easy_install import main
-            return main(list(argv)+[egg])   # we're done here
+            return main(list(argv) + [egg])   # we're done here
         finally:
             if egg and os.path.exists(egg):
                 os.unlink(egg)
@@ -188,7 +200,7 @@ def main(argv, version=DEFAULT_VERSION):
             # tell the user to uninstall obsolete version
             use_setuptools(version)
 
-    req = "setuptools>="+version
+    req = "setuptools>=" + version
     import pkg_resources
     try:
         pkg_resources.require(req)
@@ -197,18 +209,17 @@ def main(argv, version=DEFAULT_VERSION):
             from setuptools.command.easy_install import main
         except ImportError:
             from easy_install import main
-        main(list(argv)+[download_setuptools(delay=0)])
-        sys.exit(0) # try to force an exit
+        main(list(argv) + [download_setuptools(delay=0)])
+        sys.exit(0)  # try to force an exit
     else:
         if argv:
             from setuptools.command.easy_install import main
             main(argv)
         else:
-            print "Setuptools version",version,"or greater has been installed."
+            print "Setuptools version", version, "or greater has been installed."
             print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
 
 
-
 def update_md5(filenames):
     """Update our built-in md5 registry"""
 
@@ -217,7 +228,7 @@ def update_md5(filenames):
 
     for name in filenames:
         base = os.path.basename(name)
-        f = open(name,'rb')
+        f = open(name, 'rb')
         md5_data[base] = md5(f.read()).hexdigest()
         f.close()
 
@@ -227,7 +238,9 @@ def update_md5(filenames):
 
     import inspect
     srcfile = inspect.getsourcefile(sys.modules[__name__])
-    f = open(srcfile, 'rb'); src = f.read(); f.close()
+    f = open(srcfile, 'rb')
+    src = f.read()
+    f.close()
 
     match = re.search("\nmd5_data = {\n([^}]+)}", src)
     if not match:
@@ -235,13 +248,13 @@ def update_md5(filenames):
         sys.exit(2)
 
     src = src[:match.start(1)] + repl + src[match.end(1):]
-    f = open(srcfile,'w')
+    f = open(srcfile, 'w')
     f.write(src)
     f.close()
 
 
-if __name__=='__main__':
-    if len(sys.argv)>2 and sys.argv[1]=='--md5update':
+if __name__ == '__main__':
+    if len(sys.argv) > 2 and sys.argv[1] == '--md5update':
         update_md5(sys.argv[2:])
     else:
         main(sys.argv[1:])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ldap-setup.py
----------------------------------------------------------------------
diff --git a/Allura/ldap-setup.py b/Allura/ldap-setup.py
index 3f1c090..4f58342 100644
--- a/Allura/ldap-setup.py
+++ b/Allura/ldap-setup.py
@@ -30,6 +30,7 @@ log = logging.getLogger('ldap-setup')
 
 config = ConfigParser()
 
+
 def main():
     config.read('.setup-scm-cache')
     if not config.has_section('scm'):
@@ -55,10 +56,12 @@ def main():
     os.chmod('/etc/ldap.secret', 0400)
     if get_value('add frontend ldif', 'y') == 'y':
         with tempfile(frontend_ldif, locals()) as name:
-            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' % (suffix, name))
+            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' %
+                (suffix, name))
     if get_value('add initial user/group', 'y') == 'y':
         with tempfile(initial_user_ldif, locals()) as name:
-            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' % (suffix, name))
+            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' %
+                (suffix, name))
     if get_value('setup ldap auth', 'y') == 'y':
         run('apt-get install libnss-ldap')
         run('dpkg-reconfigure ldap-auth-config')
@@ -76,18 +79,21 @@ def main():
         with open('/usr/share/ldapscripts/runtime.debian', 'w') as fp:
             fp.write(ldapscripts_debian)
 
+
 def get_value(key, default):
     try:
         default = config.get('scm', key)
     except NoOptionError:
         pass
     value = raw_input('%s? [%s]' % (key, default))
-    if not value: value = default
+    if not value:
+        value = default
     config.set('scm', key, value)
     with open('.setup-scm-cache', 'w') as fp:
         config.write(fp)
     return value
 
+
 def run(command):
     rc = os.system(command)
     if rc != 0:
@@ -95,6 +101,7 @@ def run(command):
     assert rc == 0
     return rc
 
+
 @contextmanager
 def tempfile(template, values):
     fd, name = mkstemp()
@@ -103,7 +110,7 @@ def tempfile(template, values):
     yield name
     os.remove(name)
 
-backend_ldif=string.Template('''
+backend_ldif = string.Template('''
 # Load dynamic backend modules
 dn: cn=module,cn=config
 objectClass: olcModuleList
@@ -134,7 +141,7 @@ olcAccess: to * by dn="cn=admin,$suffix" write by * read
 
 ''')
 
-frontend_ldif=string.Template('''
+frontend_ldif = string.Template('''
 # Create top-level object in domain
 dn: $suffix
 objectClass: top
@@ -167,7 +174,7 @@ objectClass: organizationalUnit
 ou: groups
 ''')
 
-initial_user_ldif=string.Template('''
+initial_user_ldif = string.Template('''
 dn: uid=john,ou=people,$suffix
 objectClass: inetOrgPerson
 objectClass: posixAccount
@@ -205,7 +212,7 @@ cn: example
 gidNumber: 10000
 ''')
 
-open_ldap_config=string.Template('''
+open_ldap_config = string.Template('''
 [open_ldap]
 nss_passwd=passwd: files ldap
 nss_group=group: files ldap
@@ -230,7 +237,7 @@ pam_session=session    required     pam_limits.so
         session    optional     pam_ldap.so
 ''')
 
-ldapscripts_conf=string.Template('''
+ldapscripts_conf = string.Template('''
 SERVER=127.0.0.1
 BINDDN='cn=admin,$suffix'
 BINDPWDFILE="/etc/ldapscripts/ldapscripts.passwd"
@@ -244,7 +251,7 @@ MIDSTART=10000
 ''')
 
 
-ldapscripts_debian='''
+ldapscripts_debian = '''
 ### Allura-customized
 ### This file predefine some ldapscripts variables for Debian boxes.
 #

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ldap-userconfig.py
----------------------------------------------------------------------
diff --git a/Allura/ldap-userconfig.py b/Allura/ldap-userconfig.py
index ae796f8..bc36c1f 100644
--- a/Allura/ldap-userconfig.py
+++ b/Allura/ldap-userconfig.py
@@ -22,11 +22,13 @@ import sys
 import pwd
 import grp
 
+
 def main():
     command = sys.argv[1]
     uname = sys.argv[2]
     eval(command)(uname, *sys.argv[3:])
 
+
 def init(uname):
     home = os.path.join('/home', uname)
     ssh = os.path.join(home, '.ssh')
@@ -39,6 +41,7 @@ def init(uname):
     os.chown(home, u.pw_uid, g.gr_gid)
     os.chown(ssh, u.pw_uid, g.gr_gid)
 
+
 def upload(uname, pubkey):
     keyfile = os.path.join('/home', uname, '.ssh', 'authorized_keys')
     u = pwd.getpwnam(uname)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/setup.py
----------------------------------------------------------------------
diff --git a/Allura/setup.py b/Allura/setup.py
index 4f3cf84..f4ea03a 100644
--- a/Allura/setup.py
+++ b/Allura/setup.py
@@ -26,7 +26,7 @@ except ImportError:
 
 exec open('allura/version.py').read()
 
-PROJECT_DESCRIPTION='''
+PROJECT_DESCRIPTION = '''
 Allura is an open source implementation of a software "forge", a web site
 that manages source code repositories, bug reports, discussions, mailing
 lists, wiki pages, blogs and more for any number of individual projects.
@@ -43,7 +43,7 @@ setup(
     platforms=[
         'Linux',
         'MacOS X',
-        ],
+    ],
     classifiers=[
         'Development Status :: 4 - Beta',
         'Environment :: Web Environment',
@@ -53,7 +53,7 @@ setup(
         'Programming Language :: Python :: 2.6',
         'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
         'License :: OSI Approved :: Apache Software License',
-        ],
+    ],
     install_requires=[
         "TurboGears2",
         "pypeline",
@@ -72,7 +72,7 @@ setup(
         "feedparser >= 5.0.1",
         "oauth2 >= 1.2.0",
         "Ming >= 0.2.2dev-20110930",
-        ],
+    ],
     setup_requires=["PasteScript >= 1.7"],
     paster_plugins=['PasteScript', 'Pylons', 'TurboGears2', 'Ming'],
     packages=find_packages(exclude=['ez_setup']),
@@ -85,11 +85,11 @@ setup(
                              'templates/**.xml',
                              'templates/**.txt',
                              'public/*/*/*/*/*',
-                            ]},
+                             ]},
     message_extractors={'allura': [
-            ('**.py', 'python', None),
-            ('templates/**.html', 'genshi', None),
-            ('public/**', 'ignore', None)]},
+        ('**.py', 'python', None),
+        ('templates/**.html', 'genshi', None),
+        ('public/**', 'ignore', None)]},
 
     entry_points="""
     [paste.app_factory]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/test-light.py
----------------------------------------------------------------------
diff --git a/Allura/test-light.py b/Allura/test-light.py
index f28eb3d..95e9f92 100644
--- a/Allura/test-light.py
+++ b/Allura/test-light.py
@@ -24,11 +24,14 @@ from allura.model.repo import CommitDoc, TreeDoc, TreesDoc, DiffInfoDoc
 from allura.model.repo import LastCommitDoc, CommitRunDoc
 from allura.model.repo_refresh import refresh_repo
 
+
 def main():
     if len(sys.argv) > 1:
         h.set_context('test')
-        c.project.install_app('Git', 'code', 'Code', init_from_url='/home/rick446/src/forge')
-        c.project.install_app('Hg', 'code2', 'Code2', init_from_url='/home/rick446/src/Kajiki')
+        c.project.install_app('Git', 'code', 'Code',
+                              init_from_url='/home/rick446/src/forge')
+        c.project.install_app('Hg', 'code2', 'Code2',
+                              init_from_url='/home/rick446/src/Kajiki')
     CommitDoc.m.remove({})
     TreeDoc.m.remove({})
     TreesDoc.m.remove({})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/controller.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/controller.py b/AlluraTest/alluratest/controller.py
index 0017c48..ca75ae5 100644
--- a/AlluraTest/alluratest/controller.py
+++ b/AlluraTest/alluratest/controller.py
@@ -86,7 +86,8 @@ def setup_functional_test(config=None, app_name=DFL_APP_NAME):
     wsgiapp = loadapp('config:%s#%s' % (config, app_name),
                       relative_to=conf_dir)
     return wsgiapp
-setup_functional_test.__test__ = False  # sometimes __test__ above isn't sufficient
+# sometimes __test__ above isn't sufficient
+setup_functional_test.__test__ = False
 
 
 def setup_unit_test():
@@ -96,10 +97,11 @@ def setup_unit_test():
     except:
         pass
     REGISTRY.prepare()
-    REGISTRY.register(ew.widget_context, ew.core.WidgetContext('http', ew.ResourceManager()))
+    REGISTRY.register(ew.widget_context,
+                      ew.core.WidgetContext('http', ew.ResourceManager()))
     REGISTRY.register(g, Globals())
     REGISTRY.register(c, mock.Mock())
-    REGISTRY.register(url, lambda:None)
+    REGISTRY.register(url, lambda: None)
     REGISTRY.register(response, Response())
     REGISTRY.register(session, beaker.session.SessionObject({}))
     REGISTRY.register(allura.credentials, allura.lib.security.Credentials())
@@ -123,7 +125,8 @@ class TestController(object):
 
     def setUp(self):
         """Method called by nose before running each test"""
-        self.app = ValidatingTestApp(setup_functional_test(app_name=self.application_under_test))
+        self.app = ValidatingTestApp(
+            setup_functional_test(app_name=self.application_under_test))
         if self.validate_skip:
             self.app.validate_skip = self.validate_skip
         if asbool(tg.config.get('smtp.mock')):
@@ -164,8 +167,9 @@ class TestRestApiBase(TestController):
 
         return self._token_cache[username]
 
-    def _api_getpost(self, method, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def _api_getpost(
+            self, method, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         '''
         If you need to use one of the method kwargs as a URL parameter,
         pass params={...} as a dict instead of **kwargs
@@ -177,13 +181,16 @@ class TestRestApiBase(TestController):
         if status is None:
             status = [200, 201, 301, 302, 400, 403, 404]
         params = variabledecode.variable_encode(params, add_repetitions=False)
-        if api_key: params['api_key'] = api_key
-        if api_timestamp: params['api_timestamp'] = api_timestamp
-        if api_signature: params['api_signature'] = api_signature
+        if api_key:
+            params['api_key'] = api_key
+        if api_timestamp:
+            params['api_timestamp'] = api_timestamp
+        if api_signature:
+            params['api_signature'] = api_signature
 
         params = self.token(user).sign_request(path, params)
 
-        fn = self.app.post if method=='POST' else self.app.get
+        fn = self.app.post if method == 'POST' else self.app.get
 
         response = fn(
             str(path),
@@ -194,10 +201,12 @@ class TestRestApiBase(TestController):
         else:
             return response
 
-    def api_get(self, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def api_get(
+            self, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         return self._api_getpost('GET', path, api_key, api_timestamp, api_signature, wrap_args, user, status, **params)
 
-    def api_post(self, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def api_post(
+            self, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         return self._api_getpost('POST', path, api_key, api_timestamp, api_signature, wrap_args, user, status, **params)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/test_syntax.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/test_syntax.py b/AlluraTest/alluratest/test_syntax.py
index b72052c..a4c03ee 100644
--- a/AlluraTest/alluratest/test_syntax.py
+++ b/AlluraTest/alluratest/test_syntax.py
@@ -22,6 +22,7 @@ import sys
 
 toplevel_dir = os.path.abspath(os.path.dirname(__file__) + "/../..")
 
+
 def run(cmd):
     proc = Popen(cmd, shell=True, cwd=toplevel_dir, stdout=PIPE, stderr=PIPE)
     # must capture & reprint stdount, so that nosetests can capture it
@@ -34,11 +35,14 @@ find_py = "find Allura Forge* -name '*.py'"
 
 # a recepe from itertools doc
 from itertools import izip_longest
+
+
 def grouper(n, iterable, fillvalue=None):
     "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
     args = [iter(iterable)] * n
     return izip_longest(fillvalue=fillvalue, *args)
 
+
 def test_pyflakes():
     # skip some that aren't critical errors
     skips = [
@@ -47,7 +51,8 @@ def test_pyflakes():
         'assigned to but never used',
         '__version__',
     ]
-    proc = Popen(find_py, shell=True, cwd=toplevel_dir, stdout=PIPE, stderr=PIPE)
+    proc = Popen(find_py, shell=True, cwd=toplevel_dir,
+                 stdout=PIPE, stderr=PIPE)
     (find_stdout, stderr) = proc.communicate()
     sys.stderr.write(stderr)
     assert proc.returncode == 0, proc.returncode
@@ -57,26 +62,30 @@ def test_pyflakes():
     all_files = [f for f in find_stdout.split('\n')
                  if '/migrations/' not in f and f.strip()]
     for files in grouper(20, all_files, fillvalue=''):
-        cmd = "pyflakes " + ' '.join(files) + " | grep -v '" + "' | grep -v '".join(skips) + "'"
-        #print 'Command was: %s' % cmd
+        cmd = "pyflakes " + \
+            ' '.join(files) + " | grep -v '" + \
+            "' | grep -v '".join(skips) + "'"
+        # print 'Command was: %s' % cmd
         retval = run(cmd)
         if retval != 1:
             print
-            #print 'Command was: %s' % cmd
+            # print 'Command was: %s' % cmd
             print 'Returned %s' % retval
             error = True
 
     if error:
         raise Exception('pyflakes failure, see stdout')
 
+
 def test_no_local_tz_functions():
-    if run(find_py + " | xargs grep '\.now(' ") not in [1,123]:
+    if run(find_py + " | xargs grep '\.now(' ") not in [1, 123]:
         raise Exception("These should use .utcnow()")
-    if run(find_py + " | xargs grep '\.fromtimestamp(' ") not in [1,123]:
+    if run(find_py + " | xargs grep '\.fromtimestamp(' ") not in [1, 123]:
         raise Exception("These should use .utcfromtimestamp()")
-    if run(find_py + " | xargs grep 'mktime(' ") not in [1,123]:
+    if run(find_py + " | xargs grep 'mktime(' ") not in [1, 123]:
         raise Exception("These should use calendar.timegm()")
 
+
 def test_no_prints():
     skips = [
         '/tests/',
@@ -92,6 +101,7 @@ def test_no_prints():
     if run(find_py + " | grep -v '" + "' | grep -v '".join(skips) + "' | xargs grep -v '^ *#' | grep 'print ' | grep -E -v '(pprint|#pragma: ?printok)' ") != 1:
         raise Exception("These should use logging instead of print")
 
+
 def test_no_tabs():
-    if run(find_py + " | xargs grep '	' ") not in [1,123]:
+    if run(find_py + " | xargs grep '	' ") not in [1, 123]:
         raise Exception('These should not use tab chars')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/validation.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/validation.py b/AlluraTest/alluratest/validation.py
index 9fbdd84..6742594 100644
--- a/AlluraTest/alluratest/validation.py
+++ b/AlluraTest/alluratest/validation.py
@@ -49,7 +49,9 @@ from allura.lib import utils
 
 log = logging.getLogger(__name__)
 
+
 class Config(object):
+
     "Config to encapsulate flexible/complex test enabled/disabled rules."
     _instance = None
 
@@ -68,7 +70,8 @@ class Config(object):
         if not self.ini_config:
             from . import controller
             import ConfigParser
-            conf = ConfigParser.ConfigParser({'validate_html5': 'false', 'validate_inlinejs': 'false'})
+            conf = ConfigParser.ConfigParser(
+                {'validate_html5': 'false', 'validate_inlinejs': 'false'})
             conf.read(controller.get_config_file())
             self.ini_config = conf
         return self.ini_config
@@ -88,7 +91,8 @@ class Config(object):
         elif env_var is not None:
             return val_type in env_var.split(',')
 
-        enabled = self.test_ini.getboolean('validation', 'validate_' + val_type)
+        enabled = self.test_ini.getboolean(
+            'validation', 'validate_' + val_type)
         return enabled
 
     def fail_on_validation(self, val_type):
@@ -99,7 +103,8 @@ class Config(object):
 
 
 def report_validation_error(val_name, filename, message):
-    message = '%s Validation errors (%s):\n%s\n' % (val_name, filename, message)
+    message = '%s Validation errors (%s):\n%s\n' % (
+        val_name, filename, message)
     if Config.instance().fail_on_validation(val_name):
         ok_(False, message)
     else:
@@ -147,7 +152,7 @@ def validate_html5(html_or_response):
     else:
         html = html_or_response
     register_openers()
-    params = [("out","text"),("content",html)]
+    params = [("out", "text"), ("content", html)]
     datagen, headers = multipart_encode(params)
     request = urllib2.Request("http://html5.validator.nu/", datagen, headers)
     count = 3
@@ -162,7 +167,7 @@ def validate_html5(html_or_response):
                 sys.stderr.write('WARNING: ' + resp + '\n')
                 break
 
-    resp = resp.replace('“','"').replace('”','"').replace('–','-')
+    resp = resp.replace('“', '"').replace('”', '"').replace('–', '-')
 
     ignored_errors = [
         'Required attributes missing on element "object"',
@@ -175,7 +180,7 @@ def validate_html5(html_or_response):
 
     if 'Error:' in resp:
         fname = dump_to_file('html5-', html)
-        message = resp.decode('ascii','ignore')
+        message = resp.decode('ascii', 'ignore')
         report_validation_error('html5', fname, message)
 
 
@@ -207,8 +212,10 @@ def validate_js(html_or_response):
     basedir = path.dirname(path.abspath(__file__))
     jslint_dir = basedir + '/../jslint'
     fname = dump_to_file('jslint-', html)
-    cmd = 'java -jar ' + jslint_dir + '/js.jar '+ jslint_dir +'/jslint.js ' + fname
-    p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    cmd = 'java -jar ' + jslint_dir + '/js.jar ' + \
+        jslint_dir + '/jslint.js ' + fname
+    p = subprocess.Popen(cmd, shell=True,
+                         stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     stdout, stderr = p.communicate(html)
     if stdout.startswith('jslint: No problems found'):
         os.unlink(fname)
@@ -241,6 +248,7 @@ class AntiSpamTestApp(TestApp):
             kwargs['params'] = params
         return super(AntiSpamTestApp, self).post(*args, **kwargs)
 
+
 class PostParamCheckingTestApp(AntiSpamTestApp):
 
     def _validate_params(self, params, method):
@@ -251,9 +259,12 @@ class PostParamCheckingTestApp(AntiSpamTestApp):
             params = params.items()
         for k, v in params:
             if not isinstance(k, basestring):
-                raise TypeError('%s key %s is %s, not str' % (method, k, type(k)))
+                raise TypeError('%s key %s is %s, not str' %
+                                (method, k, type(k)))
             if not isinstance(v, (basestring, webtest.app.File)):
-                raise TypeError('%s key %s has value %s of type %s, not str. ' % (method, k, v, type(v)))
+                raise TypeError(
+                    '%s key %s has value %s of type %s, not str. ' %
+                    (method, k, v, type(v)))
 
     def get(self, *args, **kwargs):
         self._validate_params(kwargs.get('params'), 'get')
@@ -263,6 +274,7 @@ class PostParamCheckingTestApp(AntiSpamTestApp):
         self._validate_params(kwargs.get('params'), 'post')
         return super(PostParamCheckingTestApp, self).post(*args, **kwargs)
 
+
 class ValidatingTestApp(PostParamCheckingTestApp):
 
     # Subclasses may set this to True to skip validation altogether
@@ -286,7 +298,7 @@ class ValidatingTestApp(PostParamCheckingTestApp):
             pass
         elif content_type.startswith('application/json'):
             validate_json(content)
-        elif content_type.startswith(('application/x-javascript','application/javascript', 'text/javascript')):
+        elif content_type.startswith(('application/x-javascript', 'application/javascript', 'text/javascript')):
             validate_js(content)
         elif content_type.startswith('application/xml'):
             import feedparser

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/setup.py
----------------------------------------------------------------------
diff --git a/AlluraTest/setup.py b/AlluraTest/setup.py
index 421cd39..9958a75 100644
--- a/AlluraTest/setup.py
+++ b/AlluraTest/setup.py
@@ -16,14 +16,16 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 setup(name='AlluraTest',
       version='0.1',
       description="Allura testing support",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',
@@ -33,7 +35,7 @@ setup(name='AlluraTest',
       include_package_data=True,
       zip_safe=False,
       install_requires=[
-        "poster",
+          "poster",
           # -*- Extra requirements: -*-
       ]
       )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/config/resources.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/config/resources.py b/ForgeActivity/forgeactivity/config/resources.py
index 11c100d..5d98bd3 100644
--- a/ForgeActivity/forgeactivity/config/resources.py
+++ b/ForgeActivity/forgeactivity/config/resources.py
@@ -17,6 +17,7 @@
 
 import pkg_resources
 
+
 def register_ew_resources(manager):
     manager.register_directory(
         'activity_js', pkg_resources.resource_filename('forgeactivity', 'widgets/resources/js'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/main.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/main.py b/ForgeActivity/forgeactivity/main.py
index 3a60190..9188b66 100644
--- a/ForgeActivity/forgeactivity/main.py
+++ b/ForgeActivity/forgeactivity/main.py
@@ -38,6 +38,7 @@ log = logging.getLogger(__name__)
 
 
 class ForgeActivityApp(Application):
+
     """Project Activity page for projects."""
     __version__ = version.__version__
     default_mount_point = 'activity'
@@ -49,19 +50,22 @@ class ForgeActivityApp(Application):
         self.root = ForgeActivityController(self)
         self.api_root = ForgeActivityRestController(self)
 
-    def admin_menu(self): # pragma no cover
+    def admin_menu(self):  # pragma no cover
         return []
 
     def install(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
 
     def uninstall(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
+
 
 class W:
     follow_toggle = FollowToggle()
 
+
 class ForgeActivityController(BaseController):
+
     def __init__(self, app, *args, **kw):
         super(ForgeActivityController, self).__init__(*args, **kw)
         self.app = app
@@ -77,7 +81,8 @@ class ForgeActivityController(BaseController):
 
     def _get_activities_data(self, **kw):
         activity_enabled = config.get('activitystream.enabled', False)
-        activity_enabled = request.cookies.get('activitystream.enabled', activity_enabled)
+        activity_enabled = request.cookies.get(
+            'activitystream.enabled', activity_enabled)
         activity_enabled = asbool(activity_enabled)
         if not activity_enabled:
             raise exc.HTTPNotFound()
@@ -92,8 +97,8 @@ class ForgeActivityController(BaseController):
 
         following = g.director.is_connected(c.user, followee)
         timeline = g.director.get_timeline(followee, page=kw.get('page', 0),
-                limit=kw.get('limit', 100), actor_only=actor_only,
-                filter_func=perm_check(c.user))
+                                           limit=kw.get('limit', 100), actor_only=actor_only,
+                                           filter_func=perm_check(c.user))
         return dict(followee=followee, following=following, timeline=timeline)
 
     @expose('jinja:forgeactivity:templates/index.html')
@@ -108,11 +113,11 @@ class ForgeActivityController(BaseController):
         response.headers['Content-Type'] = ''
         response.content_type = 'application/xml'
         d = {
-                'title': 'Activity for %s' % data['followee'].shortname,
-                'link': h.absurl(self.app.url),
-                'description': 'Recent activity for %s' % data['followee'].shortname,
-                'language': u'en',
-            }
+            'title': 'Activity for %s' % data['followee'].shortname,
+            'link': h.absurl(self.app.url),
+            'description': 'Recent activity for %s' % data['followee'].shortname,
+            'language': u'en',
+        }
         if request.environ['PATH_INFO'].endswith('.atom'):
             feed = FG.Atom1Feed(**d)
         else:
@@ -121,23 +126,24 @@ class ForgeActivityController(BaseController):
             url = h.absurl(t.obj.activity_url.encode('utf-8'))
             feed.add_item(title=u'%s %s %s%s' % (
                                 t.actor.activity_name,
-                                t.verb,
-                                t.obj.activity_name,
-                                ' on %s' % t.target.activity_name if t.target.activity_name else '',
-                            ),
-                          link=url,
-                          pubdate=t.published,
-                          description=t.obj.activity_extras.get('summary'),
-                          unique_id=url,
-                          author_name=t.actor.activity_name,
-                          author_link=h.absurl(t.actor.activity_url))
+                t.verb,
+                t.obj.activity_name,
+                ' on %s' % t.target.activity_name if t.target.activity_name else '',
+            ),
+                link=url,
+                pubdate=t.published,
+                description=t.obj.activity_extras.get('summary'),
+                unique_id=url,
+                author_name=t.actor.activity_name,
+                author_link=h.absurl(t.actor.activity_url))
         return feed.writeString('utf-8')
 
     @expose('json:')
     @validate(W.follow_toggle)
     def follow(self, follow, **kw):
         activity_enabled = config.get('activitystream.enabled', False)
-        activity_enabled = request.cookies.get('activitystream.enabled', activity_enabled)
+        activity_enabled = request.cookies.get(
+            'activitystream.enabled', activity_enabled)
         activity_enabled = asbool(activity_enabled)
         if not activity_enabled:
             raise exc.HTTPNotFound()
@@ -167,6 +173,7 @@ class ForgeActivityController(BaseController):
 
 
 class ForgeActivityRestController(BaseController):
+
     def __init__(self, app, *args, **kw):
         super(ForgeActivityRestController, self).__init__(*args, **kw)
         self.app = app
@@ -175,17 +182,17 @@ class ForgeActivityRestController(BaseController):
     def index(self, **kw):
         data = self.app.root._get_activities_data(**kw)
         return {
-                'following': data['following'],
-                'followee': {
-                    'activity_name': data['followee'].shortname,
-                    'activity_url': data['followee'].url(),
-                    'activity_extras': {},
-                },
-                'timeline': [{
-                        'published': '%s UTC' % a.published,
-                        'actor': a.actor._deinstrument(),
-                        'verb': a.verb,
-                        'obj': a.obj._deinstrument(),
-                        'target': a.target._deinstrument(),
-                    } for a in data['timeline']],
-            }
+            'following': data['following'],
+            'followee': {
+                'activity_name': data['followee'].shortname,
+                'activity_url': data['followee'].url(),
+                'activity_extras': {},
+            },
+            'timeline': [{
+                'published': '%s UTC' % a.published,
+                'actor': a.actor._deinstrument(),
+                'verb': a.verb,
+                'obj': a.obj._deinstrument(),
+                'target': a.target._deinstrument(),
+            } for a in data['timeline']],
+        }


[27/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/gravatar.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/gravatar.py b/Allura/allura/lib/gravatar.py
index be8ad3c..0e03be2 100644
--- a/Allura/allura/lib/gravatar.py
+++ b/Allura/allura/lib/gravatar.py
@@ -15,9 +15,12 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import re, urllib, hashlib
+import re
+import urllib
+import hashlib
+
+_wrapped_email = re.compile(r'.*<(.+)>')
 
-_wrapped_email=re.compile(r'.*<(.+)>')
 
 def id(email):
     """Turn an email address into a Gravatar id as per <http://gravatar.com/site/implement/url>
@@ -33,6 +36,7 @@ def id(email):
         email = match.group(1)
     return hashlib.md5(email.strip().lower().encode('utf8')).hexdigest()
 
+
 def url(email=None, gravatar_id=None, **kw):
     """Build a complete gravatar URL with our favorite defaults.
 
@@ -73,8 +77,10 @@ def url(email=None, gravatar_id=None, **kw):
     assert gravatar_id or email
     if gravatar_id is None:
         gravatar_id = id(email)
-    if 'r' not in kw and 'rating' not in kw: kw['r'] = 'pg'
+    if 'r' not in kw and 'rating' not in kw:
+        kw['r'] = 'pg'
     return ('https://secure.gravatar.com/avatar/%s?%s' % (gravatar_id, urllib.urlencode(kw)))
 
+
 def for_user(user):
     return url(user.get_pref('email_address'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/helpers.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/helpers.py b/Allura/allura/lib/helpers.py
index 0b4e2dc..ca1d386 100644
--- a/Allura/allura/lib/helpers.py
+++ b/Allura/allura/lib/helpers.py
@@ -67,8 +67,10 @@ re_project_name = re.compile(r'^[a-z][-a-z0-9]{2,14}$')
 # validates tool mount point names
 re_tool_mount_point = re.compile(r'^[a-z][-a-z0-9]{0,62}$')
 re_tool_mount_point_fragment = re.compile(r'[a-z][-a-z0-9]*')
-re_relaxed_tool_mount_point = re.compile(r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$')
-re_relaxed_tool_mount_point_fragment = re.compile(r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*')
+re_relaxed_tool_mount_point = re.compile(
+    r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$')
+re_relaxed_tool_mount_point_fragment = re.compile(
+    r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*')
 
 re_clean_vardec_key = re.compile(r'''\A
 ( # first part
@@ -101,6 +103,7 @@ re_angle_bracket_open = re.compile('<')
 re_angle_bracket_close = re.compile('>')
 md_chars_matcher_all = re.compile(r"([`\*_{}\[\]\(\)#!\\\.+-])")
 
+
 def make_safe_path_portion(ustr, relaxed=True):
     """Return an ascii representation of ``ustr`` that conforms to mount point
     naming :attr:`rules <re_tool_mount_point_fragment>`.
@@ -113,7 +116,7 @@ def make_safe_path_portion(ustr, relaxed=True):
 
     """
     regex = (re_relaxed_tool_mount_point_fragment if relaxed else
-                re_tool_mount_point_fragment)
+             re_tool_mount_point_fragment)
     ustr = really_unicode(ustr)
     s = ustr.encode('latin1', 'ignore')
     s = AsciiDammit.asciiDammit(s)
@@ -123,26 +126,31 @@ def make_safe_path_portion(ustr, relaxed=True):
     s = s.replace('--', '-')
     return s
 
+
 def monkeypatch(*objs):
     def patchem(func):
         for obj in objs:
             setattr(obj, func.__name__, func)
     return patchem
 
+
 def urlquote(url, safe="/"):
     try:
         return urllib.quote(str(url), safe=safe)
     except UnicodeEncodeError:
         return urllib.quote(url.encode('utf-8'), safe=safe)
 
+
 def urlquoteplus(url, safe=""):
     try:
         return urllib.quote_plus(str(url), safe=safe)
     except UnicodeEncodeError:
         return urllib.quote_plus(url.encode('utf-8'), safe=safe)
 
+
 def _attempt_encodings(s, encodings):
-    if s is None: return u''
+    if s is None:
+        return u''
     for enc in encodings:
         try:
             if enc is None:
@@ -154,6 +162,7 @@ def _attempt_encodings(s, encodings):
     # Return the repr of the str -- should always be safe
     return unicode(repr(str(s)))[1:-1]
 
+
 def really_unicode(s):
     # Try to guess the encoding
     def encodings():
@@ -164,6 +173,7 @@ def really_unicode(s):
         yield 'latin-1'
     return _attempt_encodings(s, encodings())
 
+
 def find_user(email=None, username=None, display_name=None):
     from allura import model as M
     user = None
@@ -175,6 +185,7 @@ def find_user(email=None, username=None, display_name=None):
         user = M.User.by_display_name(display_name)
     return user
 
+
 def find_project(url_path):
     from allura import model as M
     for n in M.Neighborhood.query.find():
@@ -182,45 +193,55 @@ def find_project(url_path):
             break
     else:
         return None, url_path
-    project_part = n.shortname_prefix + url_path[len(n.url_prefix):] # easily off-by-one, might be better to join together everything but url_prefix
+    # easily off-by-one, might be better to join together everything but
+    # url_prefix
+    project_part = n.shortname_prefix + url_path[len(n.url_prefix):]
     parts = project_part.split('/')
     length = len(parts)
     while length:
         shortname = '/'.join(parts[:length])
         p = M.Project.query.get(shortname=shortname, deleted=False,
                                 neighborhood_id=n._id)
-        if p: return p, parts[length:]
+        if p:
+            return p, parts[length:]
         length -= 1
     return None, url_path.split('/')
 
+
 def make_neighborhoods(ids):
     return _make_xs('Neighborhood', ids)
 
+
 def make_projects(ids):
     return _make_xs('Project', ids)
 
+
 def make_users(ids):
     return _make_xs('User', ids)
 
+
 def make_roles(ids):
     return _make_xs('ProjectRole', ids)
 
+
 def _make_xs(X, ids):
     from allura import model as M
     X = getattr(M, X)
     ids = list(ids)
     results = dict(
         (r._id, r)
-        for r in X.query.find(dict(_id={'$in':ids})))
+        for r in X.query.find(dict(_id={'$in': ids})))
     result = (results.get(i) for i in ids)
     return (r for r in result if r is not None)
 
+
 def make_app_admin_only(app):
     from allura.model.auth import ProjectRole
     admin_role = ProjectRole.by_name('Admin', app.project)
     for ace in [ace for ace in app.acl if ace.role_id != admin_role._id]:
         app.acl.remove(ace)
 
+
 @contextmanager
 def push_config(obj, **kw):
     saved_attrs = {}
@@ -239,12 +260,14 @@ def push_config(obj, **kw):
         for k in new_attrs:
             delattr(obj, k)
 
+
 def sharded_path(name, num_parts=2):
     parts = [
         name[:i + 1]
-        for i in range(num_parts) ]
+        for i in range(num_parts)]
     return '/'.join(parts)
 
+
 def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, neighborhood=None):
     from allura import model
     try:
@@ -258,19 +281,22 @@ def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, n
             n = model.Neighborhood.query.get(name=neighborhood)
             if n is None:
                 try:
-                    n = model.Neighborhood.query.get(_id=ObjectId(str(neighborhood)))
+                    n = model.Neighborhood.query.get(
+                        _id=ObjectId(str(neighborhood)))
                 except InvalidId:
                     pass
             if n is None:
-                raise exc.NoSuchNeighborhoodError("Couldn't find neighborhood %s" %
-                                      repr(neighborhood))
+                raise exc.NoSuchNeighborhoodError(
+                    "Couldn't find neighborhood %s" %
+                    repr(neighborhood))
             neighborhood = n
 
-        query = dict(shortname=project_shortname_or_id, neighborhood_id=neighborhood._id)
+        query = dict(shortname=project_shortname_or_id,
+                     neighborhood_id=neighborhood._id)
         p = model.Project.query.get(**query)
     if p is None:
         raise exc.NoSuchProjectError("Couldn't find project %s nbhd %s" %
-                                 (project_shortname_or_id, neighborhood))
+                                     (project_shortname_or_id, neighborhood))
     c.project = p
 
     if app_config_id is None:
@@ -281,6 +307,7 @@ def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, n
         app_config = model.AppConfig.query.get(_id=app_config_id)
         c.app = p.app_instance(app_config)
 
+
 @contextmanager
 def push_context(project_id, mount_point=None, app_config_id=None, neighborhood=None):
     project = getattr(c, 'project', ())
@@ -298,6 +325,7 @@ def push_context(project_id, mount_point=None, app_config_id=None, neighborhood=
         else:
             c.app = app
 
+
 def encode_keys(d):
     '''Encodes the unicode keys of d, making the result
     a valid kwargs argument'''
@@ -305,29 +333,34 @@ def encode_keys(d):
         (k.encode('utf-8'), v)
         for k, v in d.iteritems())
 
+
 def vardec(fun):
     def vardec_hook(remainder, params):
         new_params = variable_decode(dict(
-                (k, v) for k, v in params.items()
-                if re_clean_vardec_key.match(k)))
+            (k, v) for k, v in params.items()
+            if re_clean_vardec_key.match(k)))
         params.update(new_params)
     before_validate(vardec_hook)(fun)
     return fun
 
+
 def nonce(length=4):
     return sha1(ObjectId().binary + os.urandom(10)).hexdigest()[:length]
 
+
 def cryptographic_nonce(length=40):
     hex_format = '%.2x' * length
     return hex_format % tuple(map(ord, os.urandom(length)))
 
+
 def ago(start_time, show_date_after=7):
     """
     Return time since starting time as a rounded, human readable string.
     E.g., "3 hours ago"
     """
 
-    if start_time is None: return 'unknown'
+    if start_time is None:
+        return 'unknown'
     granularities = ['century', 'decade', 'year', 'month', 'day', 'hour',
                      'minute']
     end_time = datetime.utcnow()
@@ -343,15 +376,18 @@ def ago(start_time, show_date_after=7):
             break
     return ago + ' ago'
 
+
 def ago_ts(timestamp):
     return ago(datetime.utcfromtimestamp(timestamp))
 
+
 def ago_string(s):
     try:
         return ago(parse(s, ignoretz=True))
     except (ValueError, AttributeError):
         return 'unknown'
 
+
 class DateTimeConverter(FancyValidator):
 
     def _to_python(self, value, state):
@@ -363,7 +399,6 @@ class DateTimeConverter(FancyValidator):
             else:
                 raise
 
-
     def _from_python(self, value, state):
         return value.isoformat()
 
@@ -403,13 +438,14 @@ def diff_text(t1, t2, differ=None):
     result = []
     for tag, i1, i2, j1, j2 in differ.get_opcodes():
         if tag in ('delete', 'replace'):
-            result += [ '<del>' ] + t1_words[i1:i2] + [ '</del>' ]
+            result += ['<del>'] + t1_words[i1:i2] + ['</del>']
         if tag in ('insert', 'replace'):
-            result += [ '<ins>' ] + t2_words[j1:j2] + [ '</ins>' ]
+            result += ['<ins>'] + t2_words[j1:j2] + ['</ins>']
         if tag == 'equal':
             result += t1_words[i1:i2]
     return ' '.join(result).replace('\n', '<br/>\n')
 
+
 def gen_message_id(_id=None):
     if not _id:
         _id = nonce(40)
@@ -424,14 +460,18 @@ def gen_message_id(_id=None):
     return '%s@%s.sourceforge.net' % (
         addr, '.'.join(reversed(parts)))
 
+
 class ProxiedAttrMeta(type):
+
     def __init__(cls, name, bases, dct):
         for v in dct.itervalues():
             if isinstance(v, attrproxy):
                 v.cls = cls
 
+
 class attrproxy(object):
     cls = None
+
     def __init__(self, *attrs):
         self.attrs = attrs
 
@@ -448,12 +488,14 @@ class attrproxy(object):
 
     def __getattr__(self, name):
         if self.cls is None:
-            return promised_attrproxy(lambda:self.cls, name)
+            return promised_attrproxy(lambda: self.cls, name)
         return getattr(
             attrproxy(self.cls, *self.attrs),
             name)
 
+
 class promised_attrproxy(attrproxy):
+
     def __init__(self, promise, *attrs):
         super(promised_attrproxy, self).__init__(*attrs)
         self._promise = promise
@@ -465,14 +507,19 @@ class promised_attrproxy(attrproxy):
         cls = self._promise()
         return getattr(cls, name)
 
+
 class proxy(object):
+
     def __init__(self, obj):
         self._obj = obj
+
     def __getattr__(self, name):
         return getattr(self._obj, name)
+
     def __call__(self, *args, **kwargs):
         return self._obj(*args, **kwargs)
 
+
 def render_genshi_plaintext(template_name, **template_vars):
     assert os.path.exists(template_name)
     fd = open(template_name)
@@ -482,11 +529,12 @@ def render_genshi_plaintext(template_name, **template_vars):
         fd.close()
     filepath = os.path.dirname(template_name)
     tt = genshi.template.NewTextTemplate(tpl_text,
-            filepath=filepath, filename=template_name)
+                                         filepath=filepath, filename=template_name)
     stream = tt.generate(**template_vars)
     return stream.render(encoding='utf-8').decode('utf-8')
 
-site_url = None # cannot set it just yet since tg.config is empty
+site_url = None  # cannot set it just yet since tg.config is empty
+
 
 def full_url(url):
     """Make absolute URL from the relative one.
@@ -494,7 +542,8 @@ def full_url(url):
     global site_url
     if site_url is None:
         # XXX: add a separate tg option instead of re-using openid.realm
-        site_url = tg.config.get('openid.realm', 'https://newforge.sf.geek.net/')
+        site_url = tg.config.get(
+            'openid.realm', 'https://newforge.sf.geek.net/')
         site_url = site_url.replace('https:', 'http:')
         if not site_url.endswith('/'):
             site_url += '/'
@@ -502,26 +551,30 @@ def full_url(url):
         url = url[1:]
     return site_url + url
 
+
 @tg.expose(content_type='text/plain')
 def json_validation_error(controller, **kwargs):
     result = dict(status='Validation Error',
-                errors=c.validation_exception.unpack_errors(),
-                value=c.validation_exception.value,
-                params=kwargs)
+                  errors=c.validation_exception.unpack_errors(),
+                  value=c.validation_exception.value,
+                  params=kwargs)
     response.status = 400
     return json.dumps(result, indent=2)
 
+
 def pop_user_notifications(user=None):
     from allura import model as M
     if user is None:
         user = c.user
     mbox = M.Mailbox.query.get(user_id=user._id, is_flash=True)
     if mbox:
-        notifications = M.Notification.query.find(dict(_id={'$in':mbox.queue}))
+        notifications = M.Notification.query.find(
+            dict(_id={'$in': mbox.queue}))
         mbox.queue = []
         mbox.queue_empty = True
         for n in notifications:
-            M.Notification.query.remove({'_id': n._id}) # clean it up so it doesn't hang around
+            # clean it up so it doesn't hang around
+            M.Notification.query.remove({'_id': n._id})
             yield n
 
 
@@ -533,11 +586,12 @@ def config_with_prefix(d, prefix):
     return dict((k[plen:], v) for k, v in d.iteritems()
                 if k.startswith(prefix))
 
+
 @contextmanager
 def twophase_transaction(*engines):
     connections = [
         e.contextual_connect()
-        for e in engines ]
+        for e in engines]
     txns = []
     to_rollback = []
     try:
@@ -557,6 +611,7 @@ def twophase_transaction(*engines):
             txn.rollback()
         raise
 
+
 class log_action(object):
     extra_proto = dict(
         action=None,
@@ -617,7 +672,8 @@ class log_action(object):
                 result['username'] = '*system'
             try:
                 result['url'] = request.url
-                ip_address = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
+                ip_address = request.headers.get(
+                    'X_FORWARDED_FOR', request.remote_addr)
                 if ip_address is not None:
                     ip_address = ip_address.split(',')[0].strip()
                     result['ip_address'] = ip_address
@@ -627,9 +683,11 @@ class log_action(object):
                 pass
             return result
         except:
-            self._logger.warning('Error logging to rtstats, some info may be missing', exc_info=True)
+            self._logger.warning(
+                'Error logging to rtstats, some info may be missing', exc_info=True)
             return result
 
+
 def paging_sanitizer(limit, page, total_count, zero_based_pages=True):
     """Return limit, page - both converted to int and constrained to
     valid ranges based on total_count.
@@ -646,7 +704,9 @@ def paging_sanitizer(limit, page, total_count, zero_based_pages=True):
 def _add_inline_line_numbers_to_text(text):
     markup_text = '<div class="codehilite"><pre>'
     for line_num, line in enumerate(text.splitlines(), 1):
-        markup_text = markup_text + '<span id="l%s" class="code_block"><span class="lineno">%s</span> %s</span>' % (line_num, line_num, line)
+        markup_text = markup_text + \
+            '<span id="l%s" class="code_block"><span class="lineno">%s</span> %s</span>' % (
+                line_num, line_num, line)
     markup_text = markup_text + '</pre></div>'
     return markup_text
 
@@ -662,16 +722,21 @@ def _add_table_line_numbers_to_text(text):
         return '\n'.join(map(_prepend_whitespaces, range(start, max_num), [max_num] * l))
 
     lines = text.splitlines(True)
-    linenumbers = '<td class="linenos"><div class="linenodiv"><pre>' + _len_to_str_column(len(lines)) + '</pre></div></td>'
-    markup_text = '<table class="codehilitetable"><tbody><tr>' + linenumbers + '<td class="code"><div class="codehilite"><pre>'
+    linenumbers = '<td class="linenos"><div class="linenodiv"><pre>' + \
+        _len_to_str_column(len(lines)) + '</pre></div></td>'
+    markup_text = '<table class="codehilitetable"><tbody><tr>' + \
+        linenumbers + '<td class="code"><div class="codehilite"><pre>'
     for line_num, line in enumerate(lines, 1):
-        markup_text = markup_text + '<span id="l%s" class="code_block">%s</span>' % (line_num, line)
+        markup_text = markup_text + \
+            '<span id="l%s" class="code_block">%s</span>' % (line_num, line)
     markup_text = markup_text + '</pre></div></td></tr></tbody></table>'
     return markup_text
 
 
 INLINE = 'inline'
 TABLE = 'table'
+
+
 def render_any_markup(name, text, code_mode=False, linenumbers_style=TABLE):
     """
     renders markdown using allura enhacements if file is in markdown format
@@ -698,6 +763,8 @@ def render_any_markup(name, text, code_mode=False, linenumbers_style=TABLE):
 # copied from jinja2 dev
 # latest release, 2.6, implements this incorrectly
 # can remove and use jinja2 implementation after upgrading to 2.7
+
+
 def do_filesizeformat(value, binary=False):
     """Format the value like a 'human-readable' file size (i.e. 13 kB,
 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
@@ -763,6 +830,7 @@ def datetimeformat(value, format='%Y-%m-%d %H:%M:%S'):
 @contextmanager
 def log_output(log):
     class Writer(object):
+
         def __init__(self, func):
             self.func = func
             self.closed = False
@@ -783,6 +851,7 @@ def log_output(log):
         sys.stdout = _stdout
         sys.stderr = _stderr
 
+
 def topological_sort(items, partial_order):
     """Perform topological sort.
        items is a list of items to be sorted.
@@ -793,7 +862,8 @@ def topological_sort(items, partial_order):
 
        Modified from: http://www.bitformation.com/art/python_toposort.html
     """
-    # Original topological sort code written by Ofer Faigon (www.bitformation.com) and used with permission
+    # Original topological sort code written by Ofer Faigon
+    # (www.bitformation.com) and used with permission
 
     def add_arc(graph, fromnode, tonode):
         """Add an arc to a graph. Can create multiple arcs.
@@ -814,8 +884,8 @@ def topological_sort(items, partial_order):
     # Note that our representation does not contain reference loops to
     # cause GC problems even when the represented graph contains loops,
     # because we keep the node names rather than references to the nodes.
-    graph = defaultdict(lambda:[0])
-    for a,b in partial_order:
+    graph = defaultdict(lambda: [0])
+    for a, b in partial_order:
         add_arc(graph, a, b)
 
     # Step 2 - find all roots (nodes with zero incoming arcs).
@@ -923,6 +993,7 @@ def null_contextmanager(*args, **kw):
 
 
 class exceptionless(object):
+
     '''Decorator making the decorated function return 'error_result' on any
     exceptions rather than propagating exceptions up the stack
     '''
@@ -933,13 +1004,15 @@ class exceptionless(object):
 
     def __call__(self, fun):
         fname = 'exceptionless(%s)' % fun.__name__
+
         def inner(*args, **kwargs):
             try:
                 return fun(*args, **kwargs)
             except Exception as e:
                 if self.log:
-                    self.log.exception('Error calling %s(args=%s, kwargs=%s): %s',
-                            fname, args, kwargs, str(e))
+                    self.log.exception(
+                        'Error calling %s(args=%s, kwargs=%s): %s',
+                        fname, args, kwargs, str(e))
                 return self.error_result
         inner.__name__ = fname
         return inner
@@ -961,7 +1034,7 @@ def urlopen(url, retries=3, codes=(408,), timeout=None):
             return urllib2.urlopen(url, timeout=timeout)
         except (urllib2.HTTPError, socket.timeout) as e:
             if attempts < retries and (isinstance(e, socket.timeout) or
-                    e.code in codes):
+                                       e.code in codes):
                 attempts += 1
                 continue
             else:
@@ -971,7 +1044,9 @@ def urlopen(url, retries=3, codes=(408,), timeout=None):
                     url_string = url
                 if timeout is None:
                     timeout = socket.getdefaulttimeout()
-                log.exception('Failed after %s retries on url with a timeout of %s: %s: %s', attempts, timeout, url_string, e)
+                log.exception(
+                    'Failed after %s retries on url with a timeout of %s: %s: %s',
+                    attempts, timeout, url_string, e)
                 raise e
 
 
@@ -1014,9 +1089,11 @@ def iter_entry_points(group, *a, **kw):
 
     """
     def active_eps():
-        disabled = aslist(tg.config.get('disable_entry_points.' + group), sep=',')
+        disabled = aslist(
+            tg.config.get('disable_entry_points.' + group), sep=',')
         return [ep for ep in pkg_resources.iter_entry_points(group, *a, **kw)
                 if ep.name not in disabled]
+
     def unique_eps(entry_points):
         by_name = defaultdict(list)
         for ep in entry_points:
@@ -1027,6 +1104,7 @@ def iter_entry_points(group, *a, **kw):
                 yield eps[0]
             else:
                 yield subclass(eps)
+
     def subclass(entry_points):
         loaded = dict((ep, ep.load()) for ep in entry_points)
         for ep, cls in loaded.iteritems():
@@ -1035,7 +1113,7 @@ def iter_entry_points(group, *a, **kw):
             if all([issubclass(cls, other) for other in others]):
                 return ep
         raise ImportError('Ambiguous [allura] entry points detected. ' +
-                'Multiple entry points with name "%s".' % entry_points[0].name)
+                          'Multiple entry points with name "%s".' % entry_points[0].name)
     return iter(unique_eps(active_eps()) if group == 'allura' else active_eps())
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/macro.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/macro.py b/Allura/allura/lib/macro.py
index d4c974e..3f827ed 100644
--- a/Allura/allura/lib/macro.py
+++ b/Allura/allura/lib/macro.py
@@ -35,6 +35,8 @@ from . import security
 log = logging.getLogger(__name__)
 
 _macros = {}
+
+
 class macro(object):
 
     def __init__(self, context=None):
@@ -44,6 +46,7 @@ class macro(object):
         _macros[func.__name__] = (func, self._context)
         return func
 
+
 class parse(object):
 
     def __init__(self, context):
@@ -54,10 +57,13 @@ class parse(object):
             if s.startswith('quote '):
                 return '[[' + s[len('quote '):] + ']]'
             try:
-                parts = [ unicode(x, 'utf-8') for x in shlex.split(s.encode('utf-8')) ]
-                if not parts: return '[[' + s + ']]'
+                parts = [unicode(x, 'utf-8')
+                         for x in shlex.split(s.encode('utf-8'))]
+                if not parts:
+                    return '[[' + s + ']]'
                 macro = self._lookup_macro(parts[0])
-                if not macro: return  '[[' + s + ']]'
+                if not macro:
+                    return '[[' + s + ']]'
                 for t in parts[1:]:
                     if '=' not in t:
                         return '[-%s: missing =-]' % ' '.join(parts)
@@ -81,6 +87,7 @@ class parse(object):
         else:
             return None
 
+
 @macro('neighborhood-wiki')
 def neighborhood_feeds(tool_name, max_number=5, sort='pubdate'):
     from allura import model as M
@@ -91,17 +98,18 @@ def neighborhood_feeds(tool_name, max_number=5, sort='pubdate'):
             neighborhood_id=c.project.neighborhood._id))
     feed = feed.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=item.link,
-                title=item.title,
-                author=item.author_name,
-                ago=h.ago(item.pubdate),
-                description=g.markdown.cached_convert(item, 'description')))
+        href=item.link,
+        title=item.title,
+        author=item.author_name,
+        ago=h.ago(item.pubdate),
+        description=g.markdown.cached_convert(item, 'description')))
         for item in feed)
     feeds = NeighborhoodFeeds(feeds=output)
     g.resource_manager.register(feeds)
     response = feeds.display(feeds=output)
     return response
 
+
 @macro('neighborhood-wiki')
 def neighborhood_blog_posts(max_number=5, sort='timestamp', summary=False):
     from forgeblog import model as BM
@@ -111,47 +119,50 @@ def neighborhood_blog_posts(max_number=5, sort='timestamp', summary=False):
         state='published'))
     posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=post.url(),
-                title=post.title,
-                author=post.author().display_name,
-                ago=h.ago(post.timestamp),
-                description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
+        href=post.url(),
+        title=post.title,
+        author=post.author().display_name,
+        ago=h.ago(post.timestamp),
+        description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
         for post in posts if post.app and
-                             security.has_access(post, 'read', project=post.app.project)() and
-                             security.has_access(post.app.project, 'read', project=post.app.project)())
+        security.has_access(post, 'read', project=post.app.project)() and
+        security.has_access(post.app.project, 'read', project=post.app.project)())
 
     posts = BlogPosts(posts=output)
     g.resource_manager.register(posts)
     response = posts.display(posts=output)
     return response
 
+
 @macro()
 def project_blog_posts(max_number=5, sort='timestamp', summary=False, mount_point=None):
     from forgeblog import model as BM
     from allura.lib.widgets.macros import BlogPosts
     app_config_ids = []
     for conf in c.project.app_configs:
-        if conf.tool_name.lower() == 'blog' and (mount_point is None or conf.options.mount_point==mount_point):
+        if conf.tool_name.lower() == 'blog' and (mount_point is None or conf.options.mount_point == mount_point):
             app_config_ids.append(conf._id)
     posts = BM.BlogPost.query.find({
         'app_config_id': {'$in': app_config_ids},
-        'state':'published',
+        'state': 'published',
     })
     posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=post.url(),
-                title=post.title,
-                author=post.author().display_name,
-                ago=h.ago(post.timestamp),
-                description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
+        href=post.url(),
+        title=post.title,
+        author=post.author().display_name,
+        ago=h.ago(post.timestamp),
+        description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
         for post in posts if security.has_access(post, 'read', project=post.app.project)() and
-                             security.has_access(post.app.project, 'read', project=post.app.project)())
+        security.has_access(post.app.project, 'read', project=post.app.project)())
     posts = BlogPosts(posts=output)
     g.resource_manager.register(posts)
     response = posts.display(posts=output)
     return response
 
-def get_projects_for_macro(category=None, display_mode='grid', sort='last_updated',
+
+def get_projects_for_macro(
+        category=None, display_mode='grid', sort='last_updated',
         show_total=False, limit=100, labels='', award='', private=False,
         columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
         grid_view_tools='',
@@ -178,9 +189,9 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
             short=award)).first()
         if aw:
             ids = [grant.granted_to_project_id for grant in
-                M.AwardGrant.query.find(dict(
-                    granted_by_neighborhood_id=c.project.neighborhood_id,
-                    award_id=aw._id))]
+                   M.AwardGrant.query.find(dict(
+                       granted_by_neighborhood_id=c.project.neighborhood_id,
+                       award_id=aw._id))]
             if '_id' in q:
                 ids = list(set(q['_id']['$in']).intersection(ids))
             q['_id'] = {'$in': ids}
@@ -203,7 +214,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
         # Can't filter these with a mongo query directly - have to iterate
         # through and check the ACL of each project.
         for chunk in utils.chunked_find(M.Project, q, sort_key=sort_key,
-                sort_dir=sort_dir):
+                                        sort_dir=sort_dir):
             projects.extend([p for p in chunk if p.private])
         total = len(projects)
         if sort == 'random':
@@ -225,7 +236,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
             docs = list(collection.find(q, {'_id': 1}))
             if docs:
                 ids = [doc['_id'] for doc in
-                        random.sample(docs, min(limit, len(docs)))]
+                       random.sample(docs, min(limit, len(docs)))]
                 if '_id' in q:
                     ids = list(set(q['_id']['$in']).intersection(ids))
                 q['_id'] = {'$in': ids}
@@ -233,7 +244,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
                 random.shuffle(projects)
         else:
             projects = M.Project.query.find(q).limit(limit).sort(sort_key,
-                sort_dir).all()
+                                                                 sort_dir).all()
 
     pl = ProjectList()
     g.resource_manager.register(pl)
@@ -249,27 +260,29 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
                 if h.has_access(p, 'read')():
                     total = total + 1
         response = '<p class="macro_projects_total">%s Projects</p>%s' % \
-                (total, response)
+            (total, response)
     return response
 
 
 @macro('neighborhood-wiki')
 def projects(category=None, display_mode='grid', sort='last_updated',
-        show_total=False, limit=100, labels='', award='', private=False,
-        columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
-        grid_view_tools=''):
+             show_total=False, limit=100, labels='', award='', private=False,
+             columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
+             grid_view_tools=''):
     initial_q = dict(neighborhood_id=c.project.neighborhood_id)
-    return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
-                   show_total=show_total, limit=limit, labels=labels, award=award, private=private,
-                   columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
-                   show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
-                   initial_q=initial_q)
+    return get_projects_for_macro(
+        category=category, display_mode=display_mode, sort=sort,
+        show_total=show_total, limit=limit, labels=labels, award=award, private=private,
+        columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
+        show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
+        initial_q=initial_q)
+
 
 @macro('userproject-wiki')
 def my_projects(category=None, display_mode='grid', sort='last_updated',
-        show_total=False, limit=100, labels='', award='', private=False,
-        columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
-        grid_view_tools=''):
+                show_total=False, limit=100, labels='', award='', private=False,
+                columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
+                grid_view_tools=''):
 
     myproj_user = c.project.user_project_of
     if myproj_user is None:
@@ -280,11 +293,13 @@ def my_projects(category=None, display_mode='grid', sort='last_updated',
         ids.append(p._id)
 
     initial_q = dict(_id={'$in': ids})
-    return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
-                   show_total=show_total, limit=limit, labels=labels, award=award, private=private,
-                   columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
-                   show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
-                   initial_q=initial_q)
+    return get_projects_for_macro(
+        category=category, display_mode=display_mode, sort=sort,
+        show_total=show_total, limit=limit, labels=labels, award=award, private=private,
+        columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
+        show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
+        initial_q=initial_q)
+
 
 @macro()
 def project_screenshots():
@@ -294,6 +309,7 @@ def project_screenshots():
     response = ps.display(project=c.project)
     return response
 
+
 @macro()
 def gittip_button(username):
     from allura.lib.widgets.macros import GittipButton
@@ -302,7 +318,10 @@ def gittip_button(username):
     response = button.display(username=username)
     return response
 
-# FIXME: this is SourceForge specific - need to provide a way for macros to come from other packages
+# FIXME: this is SourceForge specific - need to provide a way for macros
+# to come from other packages
+
+
 @macro()
 def download_button():
     from allura.lib.widgets.macros import DownloadButton
@@ -311,7 +330,8 @@ def download_button():
         res_mgr = g.resource_manager
     except TypeError:
         # e.g. "TypeError: No object (name: widget_context) has been registered for this thread"
-        # this is an ugly way to check to see if we're outside of a web request and avoid errors
+        # this is an ugly way to check to see if we're outside of a web request
+        # and avoid errors
         return '[[download_button]]'
     else:
         res_mgr.register(button)
@@ -341,6 +361,7 @@ def include(ref=None, **kw):
     response = sb.display(artifact=artifact, attrs=kw)
     return response
 
+
 @macro()
 def img(src=None, **kw):
     attrs = ('%s="%s"' % t for t in kw.iteritems())
@@ -351,19 +372,21 @@ def img(src=None, **kw):
     else:
         return '<img src="./attachment/%s" %s/>' % (src, ' '.join(attrs))
 
+
 @macro()
 def project_admins():
     admins = c.project.users_with_role('Admin')
     from allura.lib.widgets.macros import ProjectAdmins
     output = ((dict(
-            url=user.url(),
-            name=user.display_name))
+        url=user.url(),
+        name=user.display_name))
         for user in admins)
     users = ProjectAdmins(users=output)
     g.resource_manager.register(users)
     response = users.display(users=output)
     return response
 
+
 @macro()
 def members(limit=20):
     from allura.lib.widgets.macros import Members
@@ -371,10 +394,10 @@ def members(limit=20):
     admins = set(c.project.users_with_role('Admin'))
     members = sorted(c.project.users(), key=attrgetter('display_name'))
     output = [dict(
-            url=user.url(),
-            name=user.display_name,
-            admin=' (admin)' if user in admins else '',
-            )
+        url=user.url(),
+        name=user.display_name,
+        admin=' (admin)' if user in admins else '',
+    )
         for user in members[:limit]]
 
     over_limit = len(members) > limit
@@ -383,10 +406,12 @@ def members(limit=20):
     response = users.display(users=output, over_limit=over_limit)
     return response
 
+
 @macro()
 def embed(url=None):
     consumer = oembed.OEmbedConsumer()
-    endpoint = oembed.OEmbedEndpoint('http://www.youtube.com/oembed', ['http://*.youtube.com/*', 'https://*.youtube.com/*'])
+    endpoint = oembed.OEmbedEndpoint(
+        'http://www.youtube.com/oembed', ['http://*.youtube.com/*', 'https://*.youtube.com/*'])
     consumer.addEndpoint(endpoint)
     try:
         return jinja2.Markup('<div class="grid-20">%s</div>' % consumer.embed(url)['html'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/mail_util.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/mail_util.py b/Allura/allura/lib/mail_util.py
index d7d0b4b..5ec98da 100644
--- a/Allura/allura/lib/mail_util.py
+++ b/Allura/allura/lib/mail_util.py
@@ -38,14 +38,16 @@ RE_MESSAGE_ID = re.compile(r'<(?:[^>]*/)?([^>]*)>')
 config = ConfigProxy(
     common_suffix='forgemail.domain',
     return_path='forgemail.return_path')
-EMAIL_VALIDATOR=fev.Email(not_empty=True)
+EMAIL_VALIDATOR = fev.Email(not_empty=True)
+
 
 def Header(text, *more_text):
     '''Helper to make sure we encode headers properly'''
     if isinstance(text, header.Header):
         return text
     # email.header.Header handles str vs unicode differently
-    # see http://docs.python.org/library/email.header.html#email.header.Header.append
+    # see
+    # http://docs.python.org/library/email.header.html#email.header.Header.append
     if type(text) != unicode:
         raise TypeError('This must be unicode: %r' % text)
     head = header.Header(text)
@@ -55,6 +57,7 @@ def Header(text, *more_text):
         head.append(m)
     return head
 
+
 def AddrHeader(fromaddr):
     '''Accepts any of:
         Header() instance
@@ -63,9 +66,9 @@ def AddrHeader(fromaddr):
     '''
     if isinstance(fromaddr, basestring) and ' <' in fromaddr:
         name, addr = fromaddr.rsplit(' <', 1)
-        addr = '<' + addr # restore the char we just split off
+        addr = '<' + addr  # restore the char we just split off
         addrheader = Header(name, addr)
-        if str(addrheader).startswith('=?'): # encoding escape chars
+        if str(addrheader).startswith('=?'):  # encoding escape chars
             # then quoting the name is no longer necessary
             name = name.strip('"')
             addrheader = Header(name, addr)
@@ -111,6 +114,7 @@ def parse_address(addr):
             raise exc.AddressException, 'Unknown tool: ' + domain
     return userpart, project, app
 
+
 def parse_message(data):
     # Parse the email to its constituent parts
     parser = email.feedparser.FeedParser()
@@ -149,10 +153,12 @@ def parse_message(data):
             result['payload'] = result['payload'].decode(charset)
     return result
 
+
 def identify_sender(peer, email_address, headers, msg):
     from allura import model as M
     # Dumb ID -- just look for email address claimed by a particular user
-    addr = M.EmailAddress.query.get(_id=M.EmailAddress.canonical(email_address))
+    addr = M.EmailAddress.query.get(
+        _id=M.EmailAddress.canonical(email_address))
     if addr and addr.claimed_by_user_id:
         return addr.claimed_by_user()
     from_address = headers.get('From', '').strip()
@@ -163,12 +169,14 @@ def identify_sender(peer, email_address, headers, msg):
         return addr.claimed_by_user()
     return M.User.anonymous()
 
+
 def encode_email_part(content, content_type):
     try:
         return MIMEText(content.encode('ascii'), content_type, 'ascii')
     except:
         return MIMEText(content.encode('utf-8'), content_type, 'utf-8')
 
+
 def make_multipart_message(*parts):
     msg = MIMEMultipart('related')
     msg.preamble = 'This is a multi-part message in MIME format.'
@@ -178,18 +186,24 @@ def make_multipart_message(*parts):
         alt.attach(part)
     return msg
 
+
 def _parse_message_id(msgid):
-    if msgid is None: return []
-    return [ mo.group(1)
-             for mo in RE_MESSAGE_ID.finditer(msgid) ]
+    if msgid is None:
+        return []
+    return [mo.group(1)
+            for mo in RE_MESSAGE_ID.finditer(msgid)]
+
 
 def _parse_smtp_addr(addr):
     addr = str(addr)
     addrs = _parse_message_id(addr)
-    if addrs and addrs[0]: return addrs[0]
-    if '@' in addr: return addr
+    if addrs and addrs[0]:
+        return addrs[0]
+    if '@' in addr:
+        return addr
     return u'noreply@in.sf.net'
 
+
 def isvalid(addr):
     '''return True if addr is a (possibly) valid email address, false
     otherwise'''
@@ -199,13 +213,15 @@ def isvalid(addr):
     except fev.Invalid:
         return False
 
+
 class SMTPClient(object):
 
     def __init__(self):
         self._client = None
 
-    def sendmail(self, addrs, fromaddr, reply_to, subject, message_id, in_reply_to, message,
-                 sender=None, references=None, cc=None, to=None):
+    def sendmail(
+            self, addrs, fromaddr, reply_to, subject, message_id, in_reply_to, message,
+            sender=None, references=None, cc=None, to=None):
         if not addrs:
             return
         if to:
@@ -232,7 +248,7 @@ class SMTPClient(object):
             message['References'] = Header(*references)
         content = message.as_string()
         smtp_addrs = map(_parse_smtp_addr, addrs)
-        smtp_addrs = [ a for a in smtp_addrs if isvalid(a) ]
+        smtp_addrs = [a for a in smtp_addrs if isvalid(a)]
         if not smtp_addrs:
             log.warning('No valid addrs in %s, so not sending mail',
                         map(unicode, addrs))
@@ -263,7 +279,8 @@ class SMTPClient(object):
                 timeout=float(tg.config.get('smtp_timeout', 10)),
             )
         if tg.config.get('smtp_user', None):
-            smtp_client.login(tg.config['smtp_user'], tg.config['smtp_password'])
+            smtp_client.login(tg.config['smtp_user'],
+                              tg.config['smtp_password'])
         if asbool(tg.config.get('smtp_tls', False)):
             smtp_client.starttls()
         self._client = smtp_client

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/markdown_extensions.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/markdown_extensions.py b/Allura/allura/lib/markdown_extensions.py
index 8a959b9..a7af428 100644
--- a/Allura/allura/lib/markdown_extensions.py
+++ b/Allura/allura/lib/markdown_extensions.py
@@ -31,15 +31,16 @@ from allura.lib.utils import ForgeHTMLSanitizer
 
 log = logging.getLogger(__name__)
 
-PLAINTEXT_BLOCK_RE = re.compile( \
+PLAINTEXT_BLOCK_RE = re.compile(
     r'(?P<bplain>\[plain\])(?P<code>.*?)(?P<eplain>\[\/plain\])',
-    re.MULTILINE|re.DOTALL
-    )
+    re.MULTILINE | re.DOTALL
+)
 
 MACRO_PATTERN = r'\[\[([^\]\[]+)\]\]'
 
 
 class CommitMessageExtension(markdown.Extension):
+
     """Markdown extension for processing commit messages.
 
     People don't expect their commit messages to be parsed as Markdown. This
@@ -61,6 +62,7 @@ class CommitMessageExtension(markdown.Extension):
     the :class:`PatternReplacingProcessor` preprocessor.
 
     """
+
     def __init__(self, app):
         markdown.Extension.__init__(self)
         self.app = app
@@ -75,13 +77,13 @@ class CommitMessageExtension(markdown.Extension):
         # remove all inlinepattern processors except short refs and links
         md.inlinePatterns.clear()
         md.inlinePatterns["link"] = markdown.inlinepatterns.LinkPattern(
-                markdown.inlinepatterns.LINK_RE, md)
+            markdown.inlinepatterns.LINK_RE, md)
         md.inlinePatterns['short_reference'] = ForgeLinkPattern(
-                markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
+            markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
         # remove all default block processors except for paragraph
         md.parser.blockprocessors.clear()
         md.parser.blockprocessors['paragraph'] = \
-                markdown.blockprocessors.ParagraphProcessor(md.parser)
+            markdown.blockprocessors.ParagraphProcessor(md.parser)
         # wrap artifact link text in square brackets
         self.forge_link_tree_processor = ForgeLinkTreeProcessor(md)
         md.treeprocessors['links'] = self.forge_link_tree_processor
@@ -96,6 +98,7 @@ class CommitMessageExtension(markdown.Extension):
 
 
 class Pattern(object):
+
     """Base class for regex patterns used by the :class:`PatternReplacingProcessor`.
 
     Subclasses must define :attr:`pattern` (a compiled regex), and
@@ -116,6 +119,7 @@ class Pattern(object):
 
 
 class TracRef1(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         #100 (ticket 100)
@@ -128,12 +132,13 @@ class TracRef1(Pattern):
         shortlink = M.Shortlink.lookup(match.group(1))
         if shortlink and not getattr(shortlink.ref.artifact, 'deleted', False):
             return '[{ref}]({url})'.format(
-                    ref=match.group(1),
-                    url=shortlink.url)
+                ref=match.group(1),
+                url=shortlink.url)
         return match.group()
 
 
 class TracRef2(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         ticket:100
@@ -141,22 +146,23 @@ class TracRef2(Pattern):
 
     """
     pattern = re.compile(
-            Pattern.BEGIN + r'((comment:(\d+):)?(ticket:)(\d+))' + Pattern.END)
+        Pattern.BEGIN + r'((comment:(\d+):)?(ticket:)(\d+))' + Pattern.END)
 
     def repl(self, match):
         shortlink = M.Shortlink.lookup('#' + match.group(6))
         if shortlink and not getattr(shortlink.ref.artifact, 'deleted', False):
             url = shortlink.url
             if match.group(4):
-                slug = self.get_comment_slug(shortlink.ref.artifact, match.group(4))
+                slug = self.get_comment_slug(
+                    shortlink.ref.artifact, match.group(4))
                 slug = '#' + slug if slug else ''
                 url = url + slug
 
             return '{front}[{ref}]({url}){back}'.format(
-                    front=match.group(1),
-                    ref=match.group(2),
-                    url=url,
-                    back=match.group(7))
+                front=match.group(1),
+                ref=match.group(2),
+                url=url,
+                back=match.group(7))
         return match.group()
 
     def get_comment_slug(self, ticket, comment_num):
@@ -173,10 +179,11 @@ class TracRef2(Pattern):
             status={'$in': ['ok', 'pending']})).sort('timestamp')
 
         if comment_num <= comments.count():
-            return comments.all()[comment_num-1].slug
+            return comments.all()[comment_num - 1].slug
 
 
 class TracRef3(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         source:trunk/server/file.c@123#L456 (rev 123, lineno 456)
@@ -185,7 +192,7 @@ class TracRef3(Pattern):
 
     """
     pattern = re.compile(
-            Pattern.BEGIN + r'((source:)([^@#\s]+)(@(\w+))?(#L(\d+))?)' + Pattern.END)
+        Pattern.BEGIN + r'((source:)([^@#\s]+)(@(\w+))?(#L(\d+))?)' + Pattern.END)
 
     def __init__(self, app):
         super(Pattern, self).__init__()
@@ -195,22 +202,23 @@ class TracRef3(Pattern):
         if not self.app:
             return match.group()
         file, rev, lineno = (
-                match.group(4),
-                match.group(6) or 'HEAD',
-                '#l' + match.group(8) if match.group(8) else '')
+            match.group(4),
+            match.group(6) or 'HEAD',
+            '#l' + match.group(8) if match.group(8) else '')
         url = '{app_url}{rev}/tree/{file}{lineno}'.format(
-                app_url=self.app.url,
-                rev=rev,
-                file=file,
-                lineno=lineno)
+            app_url=self.app.url,
+            rev=rev,
+            file=file,
+            lineno=lineno)
         return '{front}[{ref}]({url}){back}'.format(
-                front=match.group(1),
-                ref=match.group(2),
-                url=url,
-                back=match.group(9))
+            front=match.group(1),
+            ref=match.group(2),
+            url=url,
+            back=match.group(9))
 
 
 class PatternReplacingProcessor(markdown.preprocessors.Preprocessor):
+
     """A Markdown preprocessor that searches the source lines for patterns and
     replaces matches with alternate text.
 
@@ -238,23 +246,33 @@ class ForgeExtension(markdown.Extension):
 
     def extendMarkdown(self, md, md_globals):
         md.registerExtension(self)
-        # allow markdown within e.g. <div markdown>...</div>  More info at: https://github.com/waylan/Python-Markdown/issues/52
+        # allow markdown within e.g. <div markdown>...</div>  More info at:
+        # https://github.com/waylan/Python-Markdown/issues/52
         md.preprocessors['html_block'].markdown_in_raw = True
         md.preprocessors['fenced-code'] = FencedCodeProcessor()
-        md.preprocessors.add('plain_text_block', PlainTextPreprocessor(md), "_begin")
-        md.preprocessors.add('macro_include', ForgeMacroIncludePreprocessor(md), '_end')
-        # this has to be before the 'escape' processor, otherwise weird placeholders are inserted for escaped chars within urls, and then the autolink can't match the whole url
-        md.inlinePatterns.add('autolink_without_brackets', AutolinkPattern(r'(http(?:s?)://[a-zA-Z0-9./\-\\_%?&=+#;~:!]+)', md), '<escape')
+        md.preprocessors.add('plain_text_block',
+                             PlainTextPreprocessor(md), "_begin")
+        md.preprocessors.add(
+            'macro_include', ForgeMacroIncludePreprocessor(md), '_end')
+        # this has to be before the 'escape' processor, otherwise weird
+        # placeholders are inserted for escaped chars within urls, and then the
+        # autolink can't match the whole url
+        md.inlinePatterns.add('autolink_without_brackets', AutolinkPattern(
+            r'(http(?:s?)://[a-zA-Z0-9./\-\\_%?&=+#;~:!]+)', md), '<escape')
         # replace the link pattern with our extended version
-        md.inlinePatterns['link'] = ForgeLinkPattern(markdown.inlinepatterns.LINK_RE, md, ext=self)
-        md.inlinePatterns['short_reference'] = ForgeLinkPattern(markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
+        md.inlinePatterns['link'] = ForgeLinkPattern(
+            markdown.inlinepatterns.LINK_RE, md, ext=self)
+        md.inlinePatterns['short_reference'] = ForgeLinkPattern(
+            markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
         # macro must be processed before links
-        md.inlinePatterns.add('macro', ForgeMacroPattern(MACRO_PATTERN, md, ext=self), '<link')
+        md.inlinePatterns.add(
+            'macro', ForgeMacroPattern(MACRO_PATTERN, md, ext=self), '<link')
         self.forge_link_tree_processor = ForgeLinkTreeProcessor(md)
         md.treeprocessors['links'] = self.forge_link_tree_processor
         # Sanitize HTML
         md.postprocessors['sanitize_html'] = HTMLSanitizer()
-        # Rewrite all relative links that don't start with . to have a '../' prefix
+        # Rewrite all relative links that don't start with . to have a '../'
+        # prefix
         md.postprocessors['rewrite_relative_links'] = RelativeLinkRewriter(
             make_absolute=self._is_email)
         # Put a class around markdown content for custom css
@@ -334,6 +352,7 @@ class ForgeLinkPattern(markdown.inlinepatterns.LinkPattern):
 
 
 class PlainTextPreprocessor(markdown.preprocessors.Preprocessor):
+
     '''
     This was used earlier for [plain] tags that the Blog tool's rss importer
     created, before html2text did good escaping of all special markdown chars.
@@ -347,7 +366,8 @@ class PlainTextPreprocessor(markdown.preprocessors.Preprocessor):
             for m in res:
                 code = self._escape(m.group('code'))
                 placeholder = self.markdown.htmlStash.store(code, safe=True)
-                text = '%s%s%s'% (text[:m.start()], placeholder, text[m.end():])
+                text = '%s%s%s' % (
+                    text[:m.start()], placeholder, text[m.end():])
                 break
             else:
                 break
@@ -393,6 +413,7 @@ class ForgeMacroPattern(markdown.inlinepatterns.Pattern):
 
 
 class ForgeLinkTreeProcessor(markdown.treeprocessors.Treeprocessor):
+
     '''Wraps artifact links with []'''
 
     def __init__(self, parent):
@@ -448,7 +469,8 @@ class RelativeLinkRewriter(markdown.postprocessors.Postprocessor):
 
     def _rewrite(self, tag, attr):
         val = tag.get(attr)
-        if val is None: return
+        if val is None:
+            return
         if ' ' in val:
             # Don't urllib.quote to avoid possible double-quoting
             # just make sure no spaces
@@ -458,18 +480,22 @@ class RelativeLinkRewriter(markdown.postprocessors.Postprocessor):
             if 'sf.net' in val or 'sourceforge.net' in val:
                 return
             else:
-                tag['rel']='nofollow'
+                tag['rel'] = 'nofollow'
                 return
-        if val.startswith('/'): return
-        if val.startswith('.'): return
-        if val.startswith('mailto:'): return
-        if val.startswith('#'): return
+        if val.startswith('/'):
+            return
+        if val.startswith('.'):
+            return
+        if val.startswith('mailto:'):
+            return
+        if val.startswith('#'):
+            return
         tag[attr] = '../' + val
 
     def _rewrite_abs(self, tag, attr):
         self._rewrite(tag, attr)
         val = tag.get(attr)
-        val = urljoin(config.get('base_url', 'http://sourceforge.net/'),val)
+        val = urljoin(config.get('base_url', 'http://sourceforge.net/'), val)
         tag[attr] = val
 
 
@@ -478,7 +504,7 @@ class HTMLSanitizer(markdown.postprocessors.Postprocessor):
     def run(self, text):
         try:
             p = ForgeHTMLSanitizer('utf-8')
-        except TypeError: # $@%## pre-released versions from SOG
+        except TypeError:  # $@%## pre-released versions from SOG
             p = ForgeHTMLSanitizer('utf-8', '')
         p.feed(text.encode('utf-8'))
         return unicode(p.output(), 'utf-8')
@@ -487,7 +513,8 @@ class HTMLSanitizer(markdown.postprocessors.Postprocessor):
 class AutolinkPattern(markdown.inlinepatterns.Pattern):
 
     def __init__(self, pattern, markdown_instance=None):
-        markdown.inlinepatterns.Pattern.__init__(self, pattern, markdown_instance)
+        markdown.inlinepatterns.Pattern.__init__(
+            self, pattern, markdown_instance)
         # override the complete regex, requiring the preceding text (.*?) to end
         # with whitespace or beginning of line "\s|^"
         self.compiled_re = re.compile("^(.*?\s|^)%s(.*?)$" % pattern,
@@ -497,7 +524,8 @@ class AutolinkPattern(markdown.inlinepatterns.Pattern):
         old_link = mo.group(2)
         result = markdown.util.etree.Element('a')
         result.text = old_link
-        # since this is run before the builtin 'escape' processor, we have to do our own unescaping
+        # since this is run before the builtin 'escape' processor, we have to
+        # do our own unescaping
         for char in markdown.Markdown.ESCAPED_CHARS:
             old_link = old_link.replace('\\' + char, char)
         result.set('href', old_link)
@@ -505,6 +533,7 @@ class AutolinkPattern(markdown.inlinepatterns.Pattern):
 
 
 class ForgeMacroIncludePreprocessor(markdown.preprocessors.Preprocessor):
+
     '''Join include statements to prevent extra <br>'s inserted by nl2br extension.
 
     Converts:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/oid_helper.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/oid_helper.py b/Allura/allura/lib/oid_helper.py
index bf16dd8..8e41a1f 100644
--- a/Allura/allura/lib/oid_helper.py
+++ b/Allura/allura/lib/oid_helper.py
@@ -30,8 +30,9 @@ log = logging.getLogger(__name__)
 from openid import oidutil
 oidutil.log = log.info
 
+
 def verify_oid(oid_url, failure_redirect=None, return_to=None,
-                  **kw):
+               **kw):
     '''Step 1 of OID verification -- redirect to provider site'''
     log.info('Trying to login via %s', oid_url)
     realm = config.get('openid.realm', 'http://localhost:8080/')
@@ -43,7 +44,7 @@ def verify_oid(oid_url, failure_redirect=None, return_to=None,
         log.exception('Error in openid login')
         flash(str(ex[0]), 'error')
         redirect(failure_redirect)
-    if req is None: # pragma no cover
+    if req is None:  # pragma no cover
         flash('No openid services found for <code>%s</code>' % oid_url,
               'error')
         redirect(failure_redirect)
@@ -54,7 +55,8 @@ def verify_oid(oid_url, failure_redirect=None, return_to=None,
         session.save()
         redirect(redirect_url)
     else:
-        return dict(kw, form=req.formMarkup(realm, return_to=return_to))    
+        return dict(kw, form=req.formMarkup(realm, return_to=return_to))
+
 
 def process_oid(failure_redirect=None):
     oidconsumer = consumer.Consumer(g.oid_session(), g.oid_store)
@@ -84,7 +86,7 @@ def process_oid(failure_redirect=None):
             # way their account with you is not compromised if their
             # i-name registration expires and is bought by someone else.
             message += ("  This is an i-name, and its persistent ID is %s"
-                        % info.endpoint.canonicalID )
+                        % info.endpoint.canonicalID)
         flash(message, 'info')
     elif info.status == consumer.CANCEL:
         # cancelled
@@ -109,5 +111,6 @@ def process_oid(failure_redirect=None):
         flash(message, 'error')
         redirect(failure_redirect)
     session.save()
-    oid_obj = M.OpenId.upsert(info.identity_url, display_identifier=display_identifier)
+    oid_obj = M.OpenId.upsert(
+        info.identity_url, display_identifier=display_identifier)
     return oid_obj

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/package_path_loader.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/package_path_loader.py b/Allura/allura/lib/package_path_loader.py
index 8272acf..9e6548a 100644
--- a/Allura/allura/lib/package_path_loader.py
+++ b/Allura/allura/lib/package_path_loader.py
@@ -132,20 +132,21 @@ from allura.lib.helpers import topological_sort, iter_entry_points
 
 
 class PackagePathLoader(jinja2.BaseLoader):
+
     def __init__(self, override_entrypoint='allura.theme.override',
-                default_paths=None,
-                override_root='override',
-                ):
+                 default_paths=None,
+                 override_root='override',
+                 ):
         '''
         Set up initial values... defaults are for Allura.
         '''
         # TODO: How does one handle project-theme?
         if default_paths is None:
             default_paths = [
-                    #['project-theme', None],
-                    ['site-theme', None],
-                    ['allura', '/'],
-                ]
+                #['project-theme', None],
+                ['site-theme', None],
+                ['allura', '/'],
+            ]
 
         self.override_entrypoint = override_entrypoint
         self.default_paths = default_paths
@@ -161,9 +162,9 @@ class PackagePathLoader(jinja2.BaseLoader):
         """
         paths = self.default_paths[:]  # copy default_paths
         paths[-1:0] = [  # insert all eps just before last item, by default
-                [ep.name, pkg_resources.resource_filename(ep.module_name, "")]
-                for ep in iter_entry_points(self.override_entrypoint)
-            ]
+            [ep.name, pkg_resources.resource_filename(ep.module_name, "")]
+            for ep in iter_entry_points(self.override_entrypoint)
+        ]
         return paths
 
     def _load_rules(self):
@@ -213,7 +214,8 @@ class PackagePathLoader(jinja2.BaseLoader):
         path 'a' should come before path 'b'.
         """
         names = [p[0] for p in paths]
-        # filter rules that reference non-existent paths to prevent "loops" in the graph
+        # filter rules that reference non-existent paths to prevent "loops" in
+        # the graph
         rules = [r for r in rules if r[0] in names and r[1] in names]
         ordered_paths = topological_sort(names, rules)
         if ordered_paths is None:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/patches.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/patches.py b/Allura/allura/lib/patches.py
index 804c947..19f91b3 100644
--- a/Allura/allura/lib/patches.py
+++ b/Allura/allura/lib/patches.py
@@ -26,6 +26,7 @@ import simplejson
 
 from allura.lib import helpers as h
 
+
 def apply():
     old_lookup_template_engine = tg.decorators.Decoration.lookup_template_engine
 
@@ -67,7 +68,7 @@ def apply():
         '''Monkey-patched to use 301 redirects for SEO'''
         response_type = getattr(request, 'response_type', None)
         if (request.method == 'GET' and request.path.endswith('/')
-                and not response_type and len(request.params)==0):
+                and not response_type and len(request.params) == 0):
             raise webob.exc.HTTPMovedPermanently(location=request.url[:-1])
         return func(*args, **kwargs)
 
@@ -77,18 +78,17 @@ def apply():
         '''Monkey-patched to use 301 redirects for SEO'''
         response_type = getattr(request, 'response_type', None)
         if (request.method == 'GET' and not(request.path.endswith('/'))
-                and not response_type and len(request.params)==0):
-            raise webob.exc.HTTPMovedPermanently(location=request.url+'/')
+                and not response_type and len(request.params) == 0):
+            raise webob.exc.HTTPMovedPermanently(location=request.url + '/')
         return func(*args, **kwargs)
 
-
     # http://blog.watchfire.com/wfblog/2011/10/json-based-xss-exploitation.html
     # change < to its unicode escape when rendering JSON out of turbogears
     # This is to avoid IE9 and earlier, which don't know the json content type
     # and may attempt to render JSON data as HTML if the URL ends in .html
-    
     original_tg_jsonify_GenericJSON_encode = tg.jsonify.GenericJSON.encode
-    escape_pattern_with_lt = re.compile(simplejson.encoder.ESCAPE.pattern.rstrip(']') + '<' + ']')
+    escape_pattern_with_lt = re.compile(
+        simplejson.encoder.ESCAPE.pattern.rstrip(']') + '<' + ']')
 
     @h.monkeypatch(tg.jsonify.GenericJSON)
     def encode(self, o):
@@ -96,8 +96,8 @@ def apply():
         # encode_basestring_ascii() and encode_basestring_ascii may likely be c-compiled
         # and thus not monkeypatchable
         with h.push_config(self, ensure_ascii=False), \
-             h.push_config(simplejson.encoder, ESCAPE=escape_pattern_with_lt), \
-             mock.patch.dict(simplejson.encoder.ESCAPE_DCT, {'<': r'\u003C'}):
+                h.push_config(simplejson.encoder, ESCAPE=escape_pattern_with_lt), \
+                mock.patch.dict(simplejson.encoder.ESCAPE_DCT, {'<': r'\u003C'}):
             return original_tg_jsonify_GenericJSON_encode(self, o)
 
 
@@ -106,11 +106,13 @@ def apply():
 # over and over
 old_controller_call = tg.controllers.DecoratedController._call
 
+
 def newrelic():
     @h.monkeypatch(tg.controllers.DecoratedController,
                    tg.controllers.decoratedcontroller.DecoratedController)
     def _call(self, controller, *args, **kwargs):
         '''Set NewRelic transaction name to actual controller name'''
         import newrelic.agent
-        newrelic.agent.set_transaction_name(newrelic.agent.callable_name(controller))
+        newrelic.agent.set_transaction_name(
+            newrelic.agent.callable_name(controller))
         return old_controller_call(self, controller, *args, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/plugin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/plugin.py b/Allura/allura/lib/plugin.py
index e809fed..809d895 100644
--- a/Allura/allura/lib/plugin.py
+++ b/Allura/allura/lib/plugin.py
@@ -54,7 +54,9 @@ from paste.deploy.converters import asbool
 
 log = logging.getLogger(__name__)
 
+
 class AuthenticationProvider(object):
+
     '''
     An interface to provide authentication services for Allura.
 
@@ -115,7 +117,8 @@ class AuthenticationProvider(object):
 
     def login(self, user=None):
         try:
-            if user is None: user = self._login()
+            if user is None:
+                user = self._login()
             self.session['userid'] = user._id
             self.session.save()
             g.zarkov_event('login', user=user)
@@ -210,7 +213,9 @@ class AuthenticationProvider(object):
         '''
         raise NotImplementedError, 'user_registration_date'
 
+
 class LocalAuthenticationProvider(AuthenticationProvider):
+
     '''
     Stores user passwords on the User model, in mongo.  Uses per-user salt and
     SHA-256 encryption.
@@ -232,11 +237,14 @@ class LocalAuthenticationProvider(AuthenticationProvider):
         return user
 
     def _validate_password(self, user, password):
-        if user is None: return False
-        if not user.password: return False
-        salt = str(user.password[6:6+user.SALT_LEN])
+        if user is None:
+            return False
+        if not user.password:
+            return False
+        salt = str(user.password[6:6 + user.SALT_LEN])
         check = self._encode_password(password, salt)
-        if check != user.password: return False
+        if check != user.password:
+            return False
         return True
 
     def by_username(self, username):
@@ -273,7 +281,9 @@ class LocalAuthenticationProvider(AuthenticationProvider):
             return user._id.generation_time
         return datetime.utcnow()
 
+
 class LdapAuthenticationProvider(AuthenticationProvider):
+
     def register_user(self, user_doc):
         from allura import model as M
         password = user_doc['password'].encode('utf-8')
@@ -289,7 +299,7 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             ldif_u = modlist.addModlist(dict(
                 uid=uname,
                 userPassword=password,
-                objectClass=['account', 'posixAccount' ],
+                objectClass=['account', 'posixAccount'],
                 cn=display_name,
                 uidNumber=uid,
                 gidNumber='10001',
@@ -307,7 +317,8 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             if asbool(config.get('auth.ldap.use_schroot', True)):
                 argv = ('schroot -d / -c %s -u root /ldap-userconfig.py init %s' % (
                     config['auth.ldap.schroot_name'], user_doc['username'])).split()
-                p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+                p = subprocess.Popen(
+                    argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                 rc = p.wait()
                 if rc != 0:
                     log.error('Error creating home directory for %s',
@@ -321,8 +332,9 @@ class LdapAuthenticationProvider(AuthenticationProvider):
                 raise NotImplemented, 'SSH keys are not supported'
 
             argv = ('schroot -d / -c %s -u root /ldap-userconfig.py upload %s' % (
-                config['auth.ldap.schroot_name'], username)).split() + [ pubkey ]
-            p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+                config['auth.ldap.schroot_name'], username)).split() + [pubkey]
+            p = subprocess.Popen(
+                argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
             rc = p.wait()
             if rc != 0:
                 errmsg = p.stdout.read()
@@ -339,15 +351,18 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             dn = 'uid=%s,%s' % (user.username, config['auth.ldap.suffix'])
             con = ldap.initialize(config['auth.ldap.server'])
             con.bind_s(dn, old_password.encode('utf-8'))
-            con.modify_s(dn, [(ldap.MOD_REPLACE, 'userPassword', new_password.encode('utf-8'))])
+            con.modify_s(
+                dn, [(ldap.MOD_REPLACE, 'userPassword', new_password.encode('utf-8'))])
             con.unbind_s()
         except ldap.INVALID_CREDENTIALS:
             raise exc.HTTPUnauthorized()
 
     def _login(self):
         from allura import model as M
-        user = M.User.query.get(username=self.request.params['username'], disabled=False)
-        if user is None: raise exc.HTTPUnauthorized()
+        user = M.User.query.get(
+            username=self.request.params['username'], disabled=False)
+        if user is None:
+            raise exc.HTTPUnauthorized()
         try:
             dn = 'uid=%s,%s' % (user.username, config['auth.ldap.suffix'])
             con = ldap.initialize(config['auth.ldap.server'])
@@ -369,7 +384,9 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             return user._id.generation_time
         return datetime.utcnow()
 
+
 class ProjectRegistrationProvider(object):
+
     '''
     Project registration services for Allura.  This is a full implementation
     and the default.  Extend this class with your own if you need to add more
@@ -392,7 +409,8 @@ class ProjectRegistrationProvider(object):
     def __init__(self):
         from allura.lib.widgets import forms
         self.add_project_widget = forms.NeighborhoodAddProjectForm
-        self.shortname_validator = forms.NeighborhoodProjectShortNameValidator()
+        self.shortname_validator = forms.NeighborhoodProjectShortNameValidator(
+        )
 
     @classmethod
     def get(cls):
@@ -423,7 +441,8 @@ class ProjectRegistrationProvider(object):
         rate_limits = json.loads(config.get('project.rate_limits', '{}'))
         for rate, count in rate_limits.items():
             user_age = now - user._id.generation_time
-            user_age = (user_age.microseconds + (user_age.seconds + user_age.days * 24 * 3600) * 10**6) / 10**6
+            user_age = (user_age.microseconds +
+                        (user_age.seconds + user_age.days * 24 * 3600) * 10 ** 6) / 10 ** 6
             if user_age < int(rate) and project_count >= count:
                 raise forge_exc.ProjectRatelimitError()
 
@@ -432,14 +451,15 @@ class ProjectRegistrationProvider(object):
         shortname = '--init--'
         name = 'Home Project for %s' % neighborhood.name
         p = M.Project(neighborhood_id=neighborhood._id,
-                    shortname=shortname,
-                    name=name,
-                    short_description='',
-                    description=('You can edit this description in the admin page'),
-                    homepage_title = '# ' + name,
-                    last_updated = datetime.utcnow(),
-                    is_nbhd_project=True,
-                    is_root=True)
+                      shortname=shortname,
+                      name=name,
+                      short_description='',
+                      description=(
+                          'You can edit this description in the admin page'),
+                      homepage_title = '# ' + name,
+                      last_updated = datetime.utcnow(),
+                      is_nbhd_project=True,
+                      is_root=True)
         try:
             p.configure_project(
                 users=users,
@@ -461,7 +481,8 @@ class ProjectRegistrationProvider(object):
         '''Register a new project in the neighborhood.  The given user will
         become the project's superuser.
         '''
-        self.validate_project(neighborhood, shortname, project_name, user, user_project, private_project)
+        self.validate_project(neighborhood, shortname,
+                              project_name, user, user_project, private_project)
         return self._create_project(neighborhood, shortname, project_name, user, user_project, private_project, apps)
 
     def validate_project(self, neighborhood, shortname, project_name, user, user_project, private_project):
@@ -472,16 +493,18 @@ class ProjectRegistrationProvider(object):
 
         # Check for private project rights
         if neighborhood.features['private_projects'] == False and private_project:
-            raise ValueError("You can't create private projects for %s neighborhood" % neighborhood.name)
+            raise ValueError(
+                "You can't create private projects for %s neighborhood" %
+                neighborhood.name)
 
         # Check for project limit creation
         nb_max_projects = neighborhood.get_max_projects()
         if nb_max_projects is not None:
             count = M.Project.query.find(dict(
-                    neighborhood_id=neighborhood._id,
-                    deleted=False,
-                    is_nbhd_project=False,
-                    )).count()
+                neighborhood_id=neighborhood._id,
+                deleted=False,
+                is_nbhd_project=False,
+            )).count()
             if count >= nb_max_projects:
                 log.exception('Error registering project %s' % project_name)
                 raise forge_exc.ProjectOverlimitError()
@@ -492,11 +515,14 @@ class ProjectRegistrationProvider(object):
             check_shortname = shortname.replace('u/', '', 1)
         else:
             check_shortname = shortname
-        self.shortname_validator.to_python(check_shortname, neighborhood=neighborhood)
+        self.shortname_validator.to_python(
+            check_shortname, neighborhood=neighborhood)
 
-        p = M.Project.query.get(shortname=shortname, neighborhood_id=neighborhood._id)
+        p = M.Project.query.get(
+            shortname=shortname, neighborhood_id=neighborhood._id)
         if p:
-            raise forge_exc.ProjectConflict('%s already exists in nbhd %s' % (shortname, neighborhood._id))
+            raise forge_exc.ProjectConflict(
+                '%s already exists in nbhd %s' % (shortname, neighborhood._id))
 
     def _create_project(self, neighborhood, shortname, project_name, user, user_project, private_project, apps):
         '''
@@ -507,18 +533,20 @@ class ProjectRegistrationProvider(object):
 
         project_template = neighborhood.get_project_template()
         p = M.Project(neighborhood_id=neighborhood._id,
-                    shortname=shortname,
-                    name=project_name,
-                    short_description='',
-                    description=('You can edit this description in the admin page'),
-                    homepage_title=shortname,
-                    last_updated = datetime.utcnow(),
-                    is_nbhd_project=False,
-                    is_root=True)
+                      shortname=shortname,
+                      name=project_name,
+                      short_description='',
+                      description=(
+                          'You can edit this description in the admin page'),
+                      homepage_title=shortname,
+                      last_updated = datetime.utcnow(),
+                      is_nbhd_project=False,
+                      is_root=True)
         p.configure_project(
             users=[user],
             is_user_project=user_project,
-            is_private_project=private_project or project_template.get('private', False),
+            is_private_project=private_project or project_template.get(
+                'private', False),
             apps=apps or [] if 'tools' in project_template else None)
 
         # Setup defaults from neighborhood project template if applicable
@@ -527,22 +555,25 @@ class ProjectRegistrationProvider(object):
             for obj in project_template['groups']:
                 name = obj.get('name')
                 permissions = set(obj.get('permissions', [])) & \
-                              set(p.permissions)
+                    set(p.permissions)
                 usernames = obj.get('usernames', [])
                 # Must provide a group name
-                if not name: continue
+                if not name:
+                    continue
                 # If the group already exists, we'll add users to it,
                 # but we won't change permissions on the group
                 group = M.ProjectRole.by_name(name, project=p)
                 if not group:
                     # If creating a new group, *must* specify permissions
-                    if not permissions: continue
+                    if not permissions:
+                        continue
                     group = M.ProjectRole(project_id=p._id, name=name)
                     p.acl += [M.ACE.allow(group._id, perm)
-                            for perm in permissions]
+                              for perm in permissions]
                 for username in usernames:
                     guser = M.User.by_username(username)
-                    if not (guser and guser._id): continue
+                    if not (guser and guser._id):
+                        continue
                     pr = M.ProjectRole.by_user(guser, project=p, upsert=True)
                     if group._id not in pr.roles:
                         pr.roles.append(group._id)
@@ -553,19 +584,20 @@ class ProjectRegistrationProvider(object):
                 for k, v in tool_options.iteritems():
                     if isinstance(v, basestring):
                         tool_options[k] = \
-                                string.Template(v).safe_substitute(
-                                    p.__dict__.get('root_project', {}))
+                            string.Template(v).safe_substitute(
+                                p.__dict__.get('root_project', {}))
                 if p.app_instance(tool) is None:
                     app = p.install_app(tool,
-                        mount_label=tool_config['label'],
-                        mount_point=tool_config['mount_point'],
-                        ordinal=i + offset,
-                    **tool_options)
+                                        mount_label=tool_config['label'],
+                                        mount_point=tool_config['mount_point'],
+                                        ordinal=i + offset,
+                                        **tool_options)
                     if tool == 'wiki':
                         from forgewiki import model as WM
                         text = tool_config.get('home_text',
-                            '[[members limit=20]]\n[[download_button]]')
-                        WM.Page.query.get(app_config_id=app.config._id).text = text
+                                               '[[members limit=20]]\n[[download_button]]')
+                        WM.Page.query.get(
+                            app_config_id=app.config._id).text = text
 
         if 'tool_order' in project_template:
             for i, tool in enumerate(project_template['tool_order']):
@@ -576,9 +608,11 @@ class ProjectRegistrationProvider(object):
             for trove_type in project_template['trove_cats'].keys():
                 troves = getattr(p, 'trove_%s' % trove_type)
                 for trove_id in project_template['trove_cats'][trove_type]:
-                    troves.append(M.TroveCategory.query.get(trove_cat_id=trove_id)._id)
+                    troves.append(
+                        M.TroveCategory.query.get(trove_cat_id=trove_id)._id)
         if 'icon' in project_template:
-            icon_file = StringIO(urlopen(project_template['icon']['url']).read())
+            icon_file = StringIO(
+                urlopen(project_template['icon']['url']).read())
             M.ProjectFile.save_image(
                 project_template['icon']['filename'], icon_file,
                 square=True, thumbnail_size=(48, 48),
@@ -592,14 +626,15 @@ class ProjectRegistrationProvider(object):
             home_app = p.app_instance('wiki')
             home_page = WM.Page.query.get(app_config_id=home_app.config._id)
             home_page.text = ("This is the personal project of %s."
-            " This project is created automatically during user registration"
-            " as an easy place to store personal data that doesn't need its own"
-            " project such as cloned repositories.") % user.display_name
+                              " This project is created automatically during user registration"
+                              " as an easy place to store personal data that doesn't need its own"
+                              " project such as cloned repositories.") % user.display_name
 
         # clear the RoleCache for the user so this project will
         # be picked up by user.my_projects()
         g.credentials.clear_user(user._id, None)  # unnamed roles for this user
-        g.credentials.clear_user(user._id, p._id)  # named roles for this project + user
+        # named roles for this project + user
+        g.credentials.clear_user(user._id, p._id)
         with h.push_config(c, project=p, user=user):
             ThreadLocalORMSession.flush_all()
             # have to add user to context, since this may occur inside auth code
@@ -611,13 +646,14 @@ class ProjectRegistrationProvider(object):
         from allura import model as M
         assert h.re_project_name.match(name), 'Invalid subproject shortname'
         shortname = project.shortname + '/' + name
-        ordinal = int(project.ordered_mounts(include_hidden=True)[-1]['ordinal']) + 1
+        ordinal = int(project.ordered_mounts(include_hidden=True)
+                      [-1]['ordinal']) + 1
         sp = M.Project(
             parent_id=project._id,
             neighborhood_id=project.neighborhood_id,
             shortname=shortname,
             name=project_name or name,
-            last_updated = datetime.utcnow(),
+            last_updated=datetime.utcnow(),
             is_root=False,
             ordinal=ordinal,
         )
@@ -644,7 +680,9 @@ class ProjectRegistrationProvider(object):
            It should be overridden for your specific envirnoment'''
         return None
 
+
 class ThemeProvider(object):
+
     '''
     Theme information for Allura.  This is a full implementation
     and the default.  Extend this class with your own if you need to add more
@@ -881,7 +919,7 @@ class ThemeProvider(object):
             return None
         cookie = request.cookies.get('site-notification', '').split('-')
         if len(cookie) == 3 and cookie[0] == str(note._id):
-            views = asint(cookie[1])+1
+            views = asint(cookie[1]) + 1
             closed = asbool(cookie[2])
         else:
             views = 1
@@ -889,15 +927,18 @@ class ThemeProvider(object):
         if closed or note.impressions > 0 and views > note.impressions:
             return None
         response.set_cookie(
-                'site-notification',
-                '-'.join(map(str, [note._id, views, closed])),
-                max_age=timedelta(days=365))
+            'site-notification',
+            '-'.join(map(str, [note._id, views, closed])),
+            max_age=timedelta(days=365))
         return note
 
+
 class LocalProjectRegistrationProvider(ProjectRegistrationProvider):
     pass
 
+
 class UserPreferencesProvider(object):
+
     '''
     An interface for user preferences, like display_name and email_address
 
@@ -937,7 +978,9 @@ class UserPreferencesProvider(object):
         '''
         raise NotImplementedError, 'find_by_display_name'
 
+
 class LocalUserPreferencesProvider(UserPreferencesProvider):
+
     '''
     The default UserPreferencesProvider, storing preferences on the User object
     in mongo.
@@ -959,11 +1002,12 @@ class LocalUserPreferencesProvider(UserPreferencesProvider):
         from allura import model as M
         name_regex = re.compile('(?i)%s' % re.escape(name))
         users = M.User.query.find(dict(
-                display_name=name_regex)).sort('username').all()
+            display_name=name_regex)).sort('username').all()
         return users
 
 
 class AdminExtension(object):
+
     """
     A base class for extending the admin areas in Allura.
 
@@ -994,7 +1038,9 @@ class AdminExtension(object):
         """
         pass
 
+
 class ImportIdConverter(object):
+
     '''
     An interface to convert to and from import_id values for indexing,
     searching, or displaying.
@@ -1021,7 +1067,7 @@ class ImportIdConverter(object):
 
     def expand(self, source_id, app_instance):
         import_id = {
-                'source_id': source_id,
-            }
+            'source_id': source_id,
+        }
         import_id.update(app_instance.config.options.get('import_id', {}))
         return import_id


[11/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/forge/tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/forge/tracker.py b/ForgeImporters/forgeimporters/forge/tracker.py
index 0fe3fea..c06db73 100644
--- a/ForgeImporters/forgeimporters/forge/tracker.py
+++ b/ForgeImporters/forgeimporters/forge/tracker.py
@@ -24,15 +24,15 @@ from pylons import app_globals as g
 from ming.orm import session, ThreadLocalORMSession
 
 from tg import (
-        expose,
-        flash,
-        redirect,
-        validate,
-        )
+    expose,
+    flash,
+    redirect,
+    validate,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.controllers import BaseController
 from allura.lib import helpers as h
@@ -44,12 +44,12 @@ from allura import model as M
 from forgetracker.tracker_main import ForgeTrackerApp
 from forgetracker import model as TM
 from forgeimporters.base import (
-        ToolImporter,
-        ToolImportForm,
-        File,
-        get_importer_upload_path,
-        save_importer_upload,
-        )
+    ToolImporter,
+    ToolImportForm,
+    File,
+    get_importer_upload_path,
+    save_importer_upload,
+)
 
 
 class ForgeTrackerImportForm(ToolImportForm):
@@ -57,6 +57,7 @@ class ForgeTrackerImportForm(ToolImportForm):
 
 
 class ForgeTrackerImportController(BaseController):
+
     def __init__(self):
         self.importer = ForgeTrackerImporter()
 
@@ -68,7 +69,7 @@ class ForgeTrackerImportController(BaseController):
     @expose('jinja:forgeimporters.forge:templates/tracker/index.html')
     def index(self, **kw):
         return dict(importer=self.importer,
-                target_app=self.target_app)
+                    target_app=self.target_app)
 
     @without_trailing_slash
     @expose()
@@ -76,16 +77,18 @@ class ForgeTrackerImportController(BaseController):
     @validate(ForgeTrackerImportForm(ForgeTrackerApp), error_handler=index)
     def create(self, tickets_json, mount_point, mount_label, **kw):
         if self.importer.enforce_limit(c.project):
-            save_importer_upload(c.project, 'tickets.json', json.dumps(tickets_json))
+            save_importer_upload(
+                c.project, 'tickets.json', json.dumps(tickets_json))
             self.importer.post(
-                    mount_point=mount_point,
-                    mount_label=mount_label,
-                )
+                mount_point=mount_point,
+                mount_label=mount_label,
+            )
             flash('Ticket import has begun. Your new tracker will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
             redirect(c.project.url() + 'admin/')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -106,7 +109,7 @@ class ForgeTrackerImporter(ToolImporter):
             return json.load(fp)
 
     def import_tool(self, project, user, mount_point=None,
-            mount_label=None, **kw):
+                    mount_label=None, **kw):
         import_id_converter = ImportIdConverter.get()
         tracker_json = self._load_json(project)
         tracker_json['tracker_config']['options'].pop('ordinal', None)
@@ -114,14 +117,16 @@ class ForgeTrackerImporter(ToolImporter):
         tracker_json['tracker_config']['options'].pop('mount_label', None)
         tracker_json['tracker_config']['options'].pop('import_id', None)
         app = project.install_app('tickets', mount_point, mount_label,
-                import_id={
-                        'source': self.source,
-                        'app_config_id': tracker_json['tracker_config']['_id'],
-                    },
-                open_status_names=tracker_json['open_status_names'],
-                closed_status_names=tracker_json['closed_status_names'],
-                **tracker_json['tracker_config']['options']
-            )
+                                  import_id={
+                                      'source': self.source,
+                                      'app_config_id': tracker_json['tracker_config']['_id'],
+                                  },
+                                  open_status_names=tracker_json[
+                                      'open_status_names'],
+                                  closed_status_names=tracker_json[
+                                      'closed_status_names'],
+                                  **tracker_json['tracker_config']['options']
+                                  )
         ThreadLocalORMSession.flush_all()
         try:
             M.session.artifact_orm_session._get().skip_mod_date = True
@@ -129,42 +134,51 @@ class ForgeTrackerImporter(ToolImporter):
                 reporter = self.get_user(ticket_json['reported_by'])
                 owner = self.get_user(ticket_json['assigned_to'])
                 with h.push_config(c, user=reporter, app=app):
-                    self.max_ticket_num = max(ticket_json['ticket_num'], self.max_ticket_num)
+                    self.max_ticket_num = max(
+                        ticket_json['ticket_num'], self.max_ticket_num)
                     ticket = TM.Ticket(
-                            app_config_id=app.config._id,
-                            import_id=import_id_converter.expand(ticket_json['ticket_num'], app),
-                            description=self.annotate(
-                                self.annotate(
-                                    ticket_json['description'],
-                                    owner, ticket_json['assigned_to'], label=' owned'),
-                                reporter, ticket_json['reported_by'], label=' created'),
-                            created_date=dateutil.parser.parse(ticket_json['created_date']),
-                            mod_date=dateutil.parser.parse(ticket_json['mod_date']),
-                            ticket_num=ticket_json['ticket_num'],
-                            summary=ticket_json['summary'],
-                            custom_fields=ticket_json['custom_fields'],
-                            status=ticket_json['status'],
-                            labels=ticket_json['labels'],
-                            votes_down=ticket_json['votes_down'],
-                            votes_up=ticket_json['votes_up'],
-                            votes=ticket_json['votes_up'] - ticket_json['votes_down'],
-                            assigned_to_id=owner._id,
-                        )
-                    ticket.private = ticket_json['private']  # trigger the private property
-                    self.process_comments(ticket, ticket_json['discussion_thread']['posts'])
+                        app_config_id=app.config._id,
+                        import_id=import_id_converter.expand(
+                            ticket_json['ticket_num'], app),
+                        description=self.annotate(
+                            self.annotate(
+                                ticket_json['description'],
+                                owner, ticket_json[
+                                    'assigned_to'], label=' owned'),
+                            reporter, ticket_json[
+                                'reported_by'], label=' created'),
+                        created_date=dateutil.parser.parse(
+                            ticket_json['created_date']),
+                        mod_date=dateutil.parser.parse(
+                            ticket_json['mod_date']),
+                        ticket_num=ticket_json['ticket_num'],
+                        summary=ticket_json['summary'],
+                        custom_fields=ticket_json['custom_fields'],
+                        status=ticket_json['status'],
+                        labels=ticket_json['labels'],
+                        votes_down=ticket_json['votes_down'],
+                        votes_up=ticket_json['votes_up'],
+                        votes=ticket_json['votes_up'] -
+                        ticket_json['votes_down'],
+                        assigned_to_id=owner._id,
+                    )
+                    # trigger the private property
+                    ticket.private = ticket_json['private']
+                    self.process_comments(
+                        ticket, ticket_json['discussion_thread']['posts'])
                     session(ticket).flush(ticket)
                     session(ticket).expunge(ticket)
             app.globals.custom_fields = tracker_json['custom_fields']
             self.process_bins(app, tracker_json['saved_bins'])
             app.globals.last_ticket_num = self.max_ticket_num
             M.AuditLog.log(
-                    'import tool %s from exported Allura JSON' % (
-                            app.config.options.mount_point,
-                        ),
-                    project=project,
-                    user=user,
-                    url=app.url,
-                )
+                'import tool %s from exported Allura JSON' % (
+                    app.config.options.mount_point,
+                ),
+                project=project,
+                user=user,
+                url=app.url,
+            )
             g.post_event('project_updated')
             app.globals.invalidate_bin_counts()
             ThreadLocalORMSession.flush_all()
@@ -193,11 +207,15 @@ class ForgeTrackerImporter(ToolImporter):
             user = self.get_user(comment_json['author'])
             with h.push_config(c, user=user):
                 p = ticket.discussion_thread.add_post(
-                        text = self.annotate(comment_json['text'], user, comment_json['author']),
-                        ignore_security = True,
-                        timestamp = dateutil.parser.parse(comment_json['timestamp']),
-                    )
-                p.add_multiple_attachments([File(a['url']) for a in comment_json['attachments']])
+                    text=self.annotate(
+                        comment_json[
+                            'text'], user, comment_json['author']),
+                    ignore_security=True,
+                    timestamp=dateutil.parser.parse(
+                        comment_json['timestamp']),
+                )
+                p.add_multiple_attachments([File(a['url'])
+                                           for a in comment_json['attachments']])
 
     def process_bins(self, app, bins):
         TM.Bin.query.remove({'app_config_id': app.config._id})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/__init__.py b/ForgeImporters/forgeimporters/github/__init__.py
index 58afba3..2d4e85c 100644
--- a/ForgeImporters/forgeimporters/github/__init__.py
+++ b/ForgeImporters/forgeimporters/github/__init__.py
@@ -34,10 +34,10 @@ log = logging.getLogger(__name__)
 
 class GitHubProjectExtractor(base.ProjectExtractor):
     PAGE_MAP = {
-            'project_info': 'https://api.github.com/repos/{project_name}',
-            'issues': 'https://api.github.com/repos/{project_name}/issues',
-            'wiki_url': 'https://github.com/{project_name}.wiki',
-        }
+        'project_info': 'https://api.github.com/repos/{project_name}',
+        'issues': 'https://api.github.com/repos/{project_name}/issues',
+        'wiki_url': 'https://github.com/{project_name}.wiki',
+    }
     POSSIBLE_STATES = ('opened', 'closed')
     SUPPORTED_ISSUE_EVENTS = ('closed', 'reopened', 'assigned')
     NEXT_PAGE_URL_RE = re.compile(r'<([^>]*)>; rel="next"')
@@ -66,7 +66,8 @@ class GitHubProjectExtractor(base.ProjectExtractor):
 
     def urlopen(self, url, **kw):
         try:
-            resp = super(GitHubProjectExtractor, self).urlopen(self.add_token(url), **kw)
+            resp = super(GitHubProjectExtractor, self).urlopen(
+                self.add_token(url), **kw)
         except urllib2.HTTPError as e:
             # GitHub will return 403 if rate limit exceeded.
             # We're checking for limit on every request below, but we still
@@ -95,10 +96,12 @@ class GitHubProjectExtractor(base.ProjectExtractor):
         return json.loads(page.read().decode('utf8')), next_page_url
 
     def get_page(self, page_name_or_url, **kw):
-        page = super(GitHubProjectExtractor, self).get_page(page_name_or_url, **kw)
+        page = super(GitHubProjectExtractor, self).get_page(
+            page_name_or_url, **kw)
         page, next_page_url = page
         while next_page_url:
-            p = super(GitHubProjectExtractor, self).get_page(next_page_url, **kw)
+            p = super(GitHubProjectExtractor,
+                      self).get_page(next_page_url, **kw)
             p, next_page_url = p
             page += p
         self.page = page
@@ -114,7 +117,8 @@ class GitHubProjectExtractor(base.ProjectExtractor):
         return self.get_page('project_info').get('clone_url')
 
     def iter_issues(self):
-        # github api doesn't allow getting closed and opened tickets in one query
+        # github api doesn't allow getting closed and opened tickets in one
+        # query
         issues = []
         url = self.get_page_url('issues') + '?state={state}'
         for state in self.POSSIBLE_STATES:
@@ -144,6 +148,7 @@ class GitHubProjectExtractor(base.ProjectExtractor):
 
 
 class GitHubOAuthMixin(object):
+
     '''Support for github oauth web application flow.'''
 
     def oauth_begin(self):
@@ -155,8 +160,10 @@ class GitHubOAuthMixin(object):
             return  # token already exists, nothing to do
         redirect_uri = request.url.rstrip('/') + '/oauth_callback'
         oauth = OAuth2Session(client_id, redirect_uri=redirect_uri)
-        auth_url, state = oauth.authorization_url('https://github.com/login/oauth/authorize')
-        session['github.oauth.state'] = state  # Used in callback to prevent CSRF
+        auth_url, state = oauth.authorization_url(
+            'https://github.com/login/oauth/authorize')
+        # Used in callback to prevent CSRF
+        session['github.oauth.state'] = state
         session['github.oauth.redirect'] = request.url
         session.save()
         redirect(auth_url)
@@ -168,11 +175,13 @@ class GitHubOAuthMixin(object):
         secret = config.get('github_importer.client_secret')
         if not client_id or not secret:
             return  # GitHub app is not configured
-        oauth = OAuth2Session(client_id, state=session.get('github.oauth.state'))
+        oauth = OAuth2Session(
+            client_id, state=session.get('github.oauth.state'))
         token = oauth.fetch_token(
             'https://github.com/login/oauth/access_token',
             client_secret=secret,
             authorization_response=request.url
         )
-        c.user.set_tool_data('GitHubProjectImport', token=token['access_token'])
+        c.user.set_tool_data('GitHubProjectImport',
+                             token=token['access_token'])
         redirect(session.get('github.oauth.redirect', '/'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/code.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/code.py b/ForgeImporters/forgeimporters/github/code.py
index 6fdd0b4..83095ff 100644
--- a/ForgeImporters/forgeimporters/github/code.py
+++ b/ForgeImporters/forgeimporters/github/code.py
@@ -19,15 +19,15 @@ from pylons import tmpl_context as c
 from pylons import app_globals as g
 from formencode import validators as fev
 from tg import (
-        expose,
-        flash,
-        redirect,
-        validate,
-        )
+    expose,
+    flash,
+    redirect,
+    validate,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.lib.decorators import require_post
 from allura.lib import helpers as h
@@ -37,9 +37,9 @@ from allura import model as M
 from forgegit.git_main import ForgeGitApp
 
 from forgeimporters.base import (
-        ToolImporter,
-        ToolImportForm,
-        )
+    ToolImporter,
+    ToolImportForm,
+)
 from forgeimporters.github import GitHubProjectExtractor, GitHubOAuthMixin
 
 
@@ -49,6 +49,7 @@ class GitHubRepoImportForm(ToolImportForm):
 
 
 class GitHubRepoImportController(BaseController, GitHubOAuthMixin):
+
     def __init__(self):
         self.importer = GitHubRepoImporter()
 
@@ -61,7 +62,7 @@ class GitHubRepoImportController(BaseController, GitHubOAuthMixin):
     def index(self, **kw):
         self.oauth_begin()
         return dict(importer=self.importer,
-                target_app=self.target_app)
+                    target_app=self.target_app)
 
     @without_trailing_slash
     @expose()
@@ -70,14 +71,15 @@ class GitHubRepoImportController(BaseController, GitHubOAuthMixin):
     def create(self, gh_project_name, gh_user_name, mount_point, mount_label, **kw):
         if self.importer.enforce_limit(c.project):
             self.importer.post(
-                    project_name=gh_project_name,
-                    user_name=gh_user_name,
-                    mount_point=mount_point,
-                    mount_label=mount_label)
+                project_name=gh_project_name,
+                user_name=gh_user_name,
+                mount_point=mount_point,
+                mount_label=mount_label)
             flash('Repo import has begun. Your new repo will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -89,7 +91,7 @@ class GitHubRepoImporter(ToolImporter):
     tool_description = 'Import your repo from GitHub'
 
     def import_tool(self, project, user, project_name=None, mount_point=None,
-            mount_label=None, user_name=None, **kw):
+                    mount_label=None, user_name=None, **kw):
         """ Import a GitHub repo into a new Git Allura tool.
 
         """
@@ -107,9 +109,9 @@ class GitHubRepoImporter(ToolImporter):
             }
         )
         M.AuditLog.log(
-                'import tool %s from %s on %s' % (
-                    app.config.options.mount_point,
-                    project_name, self.source,
-                ), project=project, user=user, url=app.url)
+            'import tool %s from %s on %s' % (
+                app.config.options.mount_point,
+                project_name, self.source,
+            ), project=project, user=user, url=app.url)
         g.post_event('project_updated')
         return app

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/project.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/project.py b/ForgeImporters/forgeimporters/github/project.py
index 2462617..9c8e549 100644
--- a/ForgeImporters/forgeimporters/github/project.py
+++ b/ForgeImporters/forgeimporters/github/project.py
@@ -32,12 +32,14 @@ from . import GitHubOAuthMixin
 
 log = logging.getLogger(__name__)
 
+
 class GitHubProjectForm(base.ProjectImportForm):
     project_name = fev.Regex(r'^[a-zA-Z0-9-_.]+$',
-            not_empty=True,
-            messages={
-                'invalid': 'Valid symbols are: letters, numbers, dashes, underscores and periods',
-            })
+                             not_empty=True,
+                             messages={
+                                 'invalid': 'Valid symbols are: letters, numbers, dashes, underscores and periods',
+                             })
+
 
 class GitHubProjectImporter(base.ProjectImporter, GitHubOAuthMixin):
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/tests/test_code.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/tests/test_code.py b/ForgeImporters/forgeimporters/github/tests/test_code.py
index 7c85058..8a6f95b 100644
--- a/ForgeImporters/forgeimporters/github/tests/test_code.py
+++ b/ForgeImporters/forgeimporters/github/tests/test_code.py
@@ -26,7 +26,8 @@ from forgeimporters.github.code import GitHubRepoImporter
 from forgeimporters.github import GitHubOAuthMixin
 
 
-# important to be distinct from 'test' which ForgeGit uses, so that the tests can run in parallel and not clobber each other
+# important to be distinct from 'test' which ForgeGit uses, so that the
+# tests can run in parallel and not clobber each other
 test_project_with_repo = 'test2'
 with_git = with_tool(test_project_with_repo, 'git', 'src', 'git')
 
@@ -48,7 +49,8 @@ class TestGitHubRepoImporter(TestCase):
         app = p.install_app.return_value
         app.config.options.mount_point = 'code'
         app.url = 'foo'
-        GitHubRepoImporter().import_tool(p, u, project_name='project_name', user_name='testuser')
+        GitHubRepoImporter().import_tool(
+            p, u, project_name='project_name', user_name='testuser')
         p.install_app.assert_called_once_with(
             'Git',
             mount_point='code',
@@ -56,8 +58,8 @@ class TestGitHubRepoImporter(TestCase):
             init_from_url='http://remote/clone/url/',
             import_id={'source': 'GitHub', 'project_name': 'testuser/project_name'})
         M.AuditLog.log.assert_called_once_with(
-                'import tool code from testuser/project_name on GitHub',
-                project=p, user=u, url='foo')
+            'import tool code from testuser/project_name on GitHub',
+            project=p, user=u, url='foo')
         g.post_event.assert_called_once_with('project_updated')
 
 
@@ -65,7 +67,8 @@ class TestGitHubImportController(TestController, TestCase):
 
     @with_git
     def test_index(self):
-        r = self.app.get('/p/{}/admin/ext/import/github-repo/'.format(test_project_with_repo))
+        r = self.app.get(
+            '/p/{}/admin/ext/import/github-repo/'.format(test_project_with_repo))
         self.assertIsNotNone(r.html.find(attrs=dict(name="gh_user_name")))
         self.assertIsNotNone(r.html.find(attrs=dict(name="gh_project_name")))
         self.assertIsNotNone(r.html.find(attrs=dict(name="mount_label")))
@@ -75,18 +78,23 @@ class TestGitHubImportController(TestController, TestCase):
     @patch('forgeimporters.base.import_tool')
     def test_create(self, import_tool):
         params = dict(
-                gh_user_name='spooky',
-                gh_project_name='poop',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
-        r = self.app.post('/p/{}/admin/ext/import/github-repo/create'.format(test_project_with_repo),
-                params,
-                status=302)
-        self.assertEqual(r.location, 'http://localhost/p/{}/admin/'.format(test_project_with_repo))
-        self.assertEqual(u'mymount', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mylabel', import_tool.post.call_args[1]['mount_label'])
-        self.assertEqual(u'poop', import_tool.post.call_args[1]['project_name'])
+            gh_user_name='spooky',
+            gh_project_name='poop',
+            mount_label='mylabel',
+            mount_point='mymount',
+        )
+        r = self.app.post(
+            '/p/{}/admin/ext/import/github-repo/create'.format(test_project_with_repo),
+            params,
+            status=302)
+        self.assertEqual(
+            r.location, 'http://localhost/p/{}/admin/'.format(test_project_with_repo))
+        self.assertEqual(
+            u'mymount', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mylabel', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual(
+            u'poop', import_tool.post.call_args[1]['project_name'])
         self.assertEqual(u'spooky', import_tool.post.call_args[1]['user_name'])
 
     @with_git
@@ -96,19 +104,21 @@ class TestGitHubImportController(TestController, TestCase):
         project.set_tool_data('GitHubRepoImporter', pending=1)
         ThreadLocalORMSession.flush_all()
         params = dict(
-                gh_user_name='spooky',
-                gh_project_name='poop',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
-        r = self.app.post('/p/{}/admin/ext/import/github-repo/create'.format(test_project_with_repo),
-                params,
-                status=302).follow()
+            gh_user_name='spooky',
+            gh_project_name='poop',
+            mount_label='mylabel',
+            mount_point='mymount',
+        )
+        r = self.app.post(
+            '/p/{}/admin/ext/import/github-repo/create'.format(test_project_with_repo),
+            params,
+            status=302).follow()
         self.assertIn('Please wait and try again', r)
         self.assertEqual(import_tool.post.call_count, 0)
 
     @with_git
     @patch.object(GitHubOAuthMixin, 'oauth_begin')
     def test_oauth(self, oauth_begin):
-        r = self.app.get('/p/{}/admin/ext/import/github-repo/'.format(test_project_with_repo))
+        r = self.app.get(
+            '/p/{}/admin/ext/import/github-repo/'.format(test_project_with_repo))
         oauth_begin.assert_called_once()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/tests/test_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/tests/test_tracker.py b/ForgeImporters/forgeimporters/github/tests/test_tracker.py
index d63b4d6..c494453 100644
--- a/ForgeImporters/forgeimporters/github/tests/test_tracker.py
+++ b/ForgeImporters/forgeimporters/github/tests/test_tracker.py
@@ -25,9 +25,11 @@ from allura import model as M
 
 from forgeimporters.github import GitHubOAuthMixin
 
-# important to be distinct from 'test' which ForgeTracker uses, so that the tests can run in parallel and not clobber each other
+# important to be distinct from 'test' which ForgeTracker uses, so that
+# the tests can run in parallel and not clobber each other
 test_project_with_tracker = 'test2'
-with_tracker = with_tool(test_project_with_tracker, 'tickets', 'spooky-issues', 'tickets')
+with_tracker = with_tool(test_project_with_tracker,
+                         'tickets', 'spooky-issues', 'tickets')
 
 
 class TestGitHubTrackerImportController(TestController, TestCase):
@@ -51,10 +53,14 @@ class TestGitHubTrackerImportController(TestController, TestCase):
             mount_point='issues',
             mount_label='Issues')
         r = self.app.post(self.url + 'create', params, status=302)
-        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' % test_project_with_tracker)
-        self.assertEqual(u'Issues', import_tool.post.call_args[1]['mount_label'])
-        self.assertEqual(u'issues', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mulder', import_tool.post.call_args[1]['project_name'])
+        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' %
+                         test_project_with_tracker)
+        self.assertEqual(
+            u'Issues', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual(
+            u'issues', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mulder', import_tool.post.call_args[1]['project_name'])
         self.assertEqual(u'spooky', import_tool.post.call_args[1]['user_name'])
 
     @with_tracker

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/tests/test_wiki.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/tests/test_wiki.py b/ForgeImporters/forgeimporters/github/tests/test_wiki.py
index 70ea74d..29e9757 100644
--- a/ForgeImporters/forgeimporters/github/tests/test_wiki.py
+++ b/ForgeImporters/forgeimporters/github/tests/test_wiki.py
@@ -32,7 +32,8 @@ from forgeimporters.github.utils import GitHubMarkdownConverter
 from forgeimporters.github import GitHubOAuthMixin
 
 
-# important to be distinct from 'test' which ForgeWiki uses, so that the tests can run in parallel and not clobber each other
+# important to be distinct from 'test' which ForgeWiki uses, so that the
+# tests can run in parallel and not clobber each other
 test_project_with_wiki = 'test2'
 with_wiki = with_tool(test_project_with_wiki, 'wiki', 'w', 'wiki')
 
@@ -44,7 +45,6 @@ class TestGitHubWikiImporter(TestCase):
         project.get_tool_data.side_effect = lambda *args: gh_proj_name
         return project
 
-
     @patch('forgeimporters.github.wiki.M')
     @patch('forgeimporters.github.wiki.ThreadLocalORMSession')
     @patch('forgeimporters.github.wiki.g')
@@ -57,7 +57,8 @@ class TestGitHubWikiImporter(TestCase):
             app = p.install_app.return_value
             app.config.options.mount_point = 'wiki'
             app.url = 'foo'
-            GitHubWikiImporter().import_tool(p, u, project_name='project_name', user_name='testuser')
+            GitHubWikiImporter().import_tool(
+                p, u, project_name='project_name', user_name='testuser')
             p.install_app.assert_called_once_with(
                 'Wiki',
                 mount_point='wiki',
@@ -96,7 +97,8 @@ class TestGitHubWikiImporter(TestCase):
         self.commit2 = Mock()
         blobs = [self.blob1, self.blob2, self.blob3]
         self.commit2.tree.blobs = blobs
-        self.commit2.tree.__contains__ = lambda _, item: item in [self.blob1.name, self.blob2.name, self.blob3.name]
+        self.commit2.tree.__contains__ = lambda _, item: item in [
+            self.blob1.name, self.blob2.name, self.blob3.name]
         self.commit2.tree.traverse.return_value = blobs
         self.commit2.committed_date = 1256291446
 
@@ -144,7 +146,8 @@ class TestGitHubWikiImporter(TestCase):
         with patch('forgeimporters.github.wiki.rmtree'):
             path.return_value = 'temp_path'
             GitHubWikiImporter().import_pages('wiki_url')
-            repo.clone_from.assert_called_with('wiki_url', to_path='temp_path', bare=True)
+            repo.clone_from.assert_called_with(
+                'wiki_url', to_path='temp_path', bare=True)
 
     @patch('forgeimporters.github.wiki.git.Repo._clone')
     @patch('forgeimporters.github.wiki.GitHubWikiImporter._with_history')
@@ -178,7 +181,8 @@ class TestGitHubWikiImporter(TestCase):
         importer.rewrite_links = Mock(return_value='')
         importer._with_history(self.commit2)
         assert_equal(upsert.call_args_list, [call('Home')])
-        assert_equal(render.call_args_list, [call('Home.rst', u'# test message')])
+        assert_equal(render.call_args_list,
+                     [call('Home.rst', u'# test message')])
 
     @skipif(module_not_available('html2text'))
     @patch('forgeimporters.github.wiki.WM.Page.upsert')
@@ -230,12 +234,15 @@ class TestGitHubWikiImporter(TestCase):
         assert_equal(f(u'[[Pagê Nâme]]'), u'[Pagê Nâme]')
         # Github always converts spaces and slashes in links to hyphens,
         # to lookup page in the filesystem. During import we're converting
-        # all hyphens in page name to spaces, but still supporting both link formats.
+        # all hyphens in page name to spaces, but still supporting both link
+        # formats.
         assert_equal(f(u'[[Page With Spaces]]'), u'[Page With Spaces]')
         assert_equal(f(u'[[Page-With-Spaces]]'), u'[Page With Spaces]')
         assert_equal(f(u'[[Page / 1]]'), u'[Page   1]')
-        assert_equal(f(u'[[Title|Page With Spaces]]'), u'[Title](Page With Spaces)')
-        assert_equal(f(u'[[Title|Page-With-Spaces]]'), u'[Title](Page With Spaces)')
+        assert_equal(f(u'[[Title|Page With Spaces]]'),
+                     u'[Title](Page With Spaces)')
+        assert_equal(f(u'[[Title|Page-With-Spaces]]'),
+                     u'[Title](Page With Spaces)')
         assert_equal(f(u'[[go here|Page / 1]]'), u'[go here](Page   1)')
 
     def test_convert_gollum_page_links_escaped(self):
@@ -245,21 +252,25 @@ class TestGitHubWikiImporter(TestCase):
         assert_equal(f(u"'[[Page With Spaces]]"), u'[[Page With Spaces]]')
         assert_equal(f(u"'[[Page-With-Spaces]]"), u'[[Page-With-Spaces]]')
         assert_equal(f(u"'[[Page / 1]]"), u'[[Page / 1]]')
-        assert_equal(f(u"'[[Title|Page With Spaces]]"), u'[[Title|Page With Spaces]]')
-        assert_equal(f(u"'[[Title|Page-With-Spaces]]"), u'[[Title|Page-With-Spaces]]')
+        assert_equal(f(u"'[[Title|Page With Spaces]]"),
+                     u'[[Title|Page With Spaces]]')
+        assert_equal(f(u"'[[Title|Page-With-Spaces]]"),
+                     u'[[Title|Page-With-Spaces]]')
         assert_equal(f(u"'[[go here|Page / 1]]"), u'[[go here|Page / 1]]')
 
     def test_convert_gollum_external_links(self):
         f = GitHubWikiImporter().convert_gollum_tags
         assert_equal(f(u'[[http://sf.net]]'), u'<http://sf.net>')
         assert_equal(f(u'[[https://sf.net]]'), u'<https://sf.net>')
-        assert_equal(f(u'[[SourceForge|http://sf.net]]'), u'[SourceForge](http://sf.net)')
+        assert_equal(f(u'[[SourceForge|http://sf.net]]'),
+                     u'[SourceForge](http://sf.net)')
 
     def test_convert_gollum_external_links_escaped(self):
         f = GitHubWikiImporter().convert_gollum_tags
         assert_equal(f(u"'[[http://sf.net]]"), u'[[http://sf.net]]')
         assert_equal(f(u"'[[https://sf.net]]"), u'[[https://sf.net]]')
-        assert_equal(f(u"'[[SourceForge|http://sf.net]]"), u'[[SourceForge|http://sf.net]]')
+        assert_equal(f(u"'[[SourceForge|http://sf.net]]"),
+                     u'[[SourceForge|http://sf.net]]')
 
     def test_convert_gollum_toc(self):
         f = GitHubWikiImporter().convert_gollum_tags
@@ -292,7 +303,8 @@ Our website is <http://sf.net>.
         importer.github_wiki_url = 'https://github.com/a/b/wiki'
         importer.app = Mock()
         importer.app.url = '/p/test/wiki/'
-        importer.github_markdown_converter = GitHubMarkdownConverter('user', 'proj')
+        importer.github_markdown_converter = GitHubMarkdownConverter(
+            'user', 'proj')
         f = importer.convert_markup
         source = u'''Look at [[this page|Some Page]]
 
@@ -363,16 +375,26 @@ Our website is [[http://sf.net]].
         f = GitHubWikiImporter().rewrite_links
         prefix = 'https://github/a/b/wiki'
         new = '/p/test/wiki/'
-        assert_equal(f(u'<a href="https://github/a/b/wiki/Test Page">Test Page</a>', prefix, new),
-                     u'<a href="/p/test/wiki/Test Page">Test Page</a>')
-        assert_equal(f(u'<a href="https://github/a/b/wiki/Test-Page">Test-Page</a>', prefix, new),
-                     u'<a href="/p/test/wiki/Test Page">Test Page</a>')
-        assert_equal(f(u'<a href="https://github/a/b/issues/1" class="1"></a>', prefix, new),
-                     u'<a href="https://github/a/b/issues/1" class="1"></a>')
-        assert_equal(f(u'<a href="https://github/a/b/wiki/Test Page">https://github/a/b/wiki/Test Page</a>', prefix, new),
-                     u'<a href="/p/test/wiki/Test Page">/p/test/wiki/Test Page</a>')
-        assert_equal(f(u'<a href="https://github/a/b/wiki/Test Page">Test <b>Page</b></a>', prefix, new),
-                     u'<a href="/p/test/wiki/Test Page">Test <b>Page</b></a>')
+        assert_equal(
+            f(u'<a href="https://github/a/b/wiki/Test Page">Test Page</a>',
+              prefix, new),
+            u'<a href="/p/test/wiki/Test Page">Test Page</a>')
+        assert_equal(
+            f(u'<a href="https://github/a/b/wiki/Test-Page">Test-Page</a>',
+              prefix, new),
+            u'<a href="/p/test/wiki/Test Page">Test Page</a>')
+        assert_equal(
+            f(u'<a href="https://github/a/b/issues/1" class="1"></a>',
+              prefix, new),
+            u'<a href="https://github/a/b/issues/1" class="1"></a>')
+        assert_equal(
+            f(u'<a href="https://github/a/b/wiki/Test Page">https://github/a/b/wiki/Test Page</a>',
+              prefix, new),
+            u'<a href="/p/test/wiki/Test Page">/p/test/wiki/Test Page</a>')
+        assert_equal(
+            f(u'<a href="https://github/a/b/wiki/Test Page">Test <b>Page</b></a>',
+              prefix, new),
+            u'<a href="/p/test/wiki/Test Page">Test <b>Page</b></a>')
 
     @skipif(module_not_available('html2text'))
     def test_convert_markup_with_mediawiki2markdown(self):
@@ -489,7 +511,8 @@ some text and **[Tips n' Tricks]**
         importer.app.url = '/p/test/wiki/'
         f = importer.convert_markup
         source = u'*[[this checklist|Troubleshooting]]*'
-        assert_equal(f(source, 't.textile').strip(), u'**[this checklist](Troubleshooting)**')
+        assert_equal(f(source, 't.textile').strip(),
+                     u'**[this checklist](Troubleshooting)**')
 
     @without_module('html2text')
     def test_convert_textile_special_tag_without_html2text(self):
@@ -514,7 +537,8 @@ class TestGitHubWikiImportController(TestController, TestCase):
         self.assertIsNotNone(r.html.find(attrs=dict(name='gh_project_name')))
         self.assertIsNotNone(r.html.find(attrs=dict(name='mount_label')))
         self.assertIsNotNone(r.html.find(attrs=dict(name='mount_point')))
-        self.assertIsNotNone(r.html.find(attrs=dict(name='tool_option', value='import_history')))
+        self.assertIsNotNone(
+            r.html.find(attrs=dict(name='tool_option', value='import_history')))
 
     @with_wiki
     @patch('forgeimporters.base.import_tool')
@@ -526,7 +550,8 @@ class TestGitHubWikiImportController(TestController, TestCase):
             mount_label='GitHub Wiki',
             tool_option='import_history')
         r = self.app.post(self.url + 'create', params, status=302)
-        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' % test_project_with_wiki)
+        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' %
+                         test_project_with_wiki)
         args = import_tool.post.call_args[1]
         self.assertEqual(u'GitHub Wiki', args['mount_label'])
         self.assertEqual(u'gh-wiki', args['mount_point'])
@@ -537,7 +562,8 @@ class TestGitHubWikiImportController(TestController, TestCase):
         # without history
         params.pop('tool_option')
         r = self.app.post(self.url + 'create', params, status=302)
-        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' % test_project_with_wiki)
+        self.assertEqual(r.location, 'http://localhost/p/%s/admin/' %
+                         test_project_with_wiki)
         args = import_tool.post.call_args[1]
         self.assertEqual(u'GitHub Wiki', args['mount_label'])
         self.assertEqual(u'gh-wiki', args['mount_point'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/tracker.py b/ForgeImporters/forgeimporters/github/tracker.py
index db16c83..3af1517 100644
--- a/ForgeImporters/forgeimporters/github/tracker.py
+++ b/ForgeImporters/forgeimporters/github/tracker.py
@@ -27,15 +27,15 @@ except ImportError:
 
 from formencode import validators as fev
 from tg import (
-        expose,
-        validate,
-        flash,
-        redirect
-        )
+    expose,
+    validate,
+    flash,
+    redirect
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash
-        )
+    with_trailing_slash,
+    without_trailing_slash
+)
 
 from allura import model as M
 from allura.controllers import BaseController
@@ -85,14 +85,15 @@ class GitHubTrackerImportController(BaseController, GitHubOAuthMixin):
     def create(self, gh_project_name, gh_user_name, mount_point, mount_label, **kw):
         if self.importer.enforce_limit(c.project):
             self.importer.post(
-                    project_name=gh_project_name,
-                    user_name=gh_user_name,
-                    mount_point=mount_point,
-                    mount_label=mount_label)
+                project_name=gh_project_name,
+                user_name=gh_user_name,
+                mount_point=mount_point,
+                mount_label=mount_label)
             flash('Ticket import has begun. Your new tracker will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -105,18 +106,18 @@ class GitHubTrackerImporter(ToolImporter):
     open_milestones = set()
 
     def import_tool(self, project, user, project_name, mount_point=None,
-            mount_label=None, **kw):
+                    mount_label=None, **kw):
         import_id_converter = ImportIdConverter.get()
         project_name = '%s/%s' % (kw['user_name'], project_name)
         app = project.install_app('tickets', mount_point, mount_label,
-                EnableVoting=False,
-                open_status_names='open',
-                closed_status_names='closed',
-                import_id={
-                    'source': self.source,
-                    'project_name': project_name,
-                }
-            )
+                                  EnableVoting=False,
+                                  open_status_names='open',
+                                  closed_status_names='closed',
+                                  import_id={
+                                      'source': self.source,
+                                      'project_name': project_name,
+                                  }
+                                  )
         self.github_markdown_converter = GitHubMarkdownConverter(
             kw['user_name'], project_name)
         ThreadLocalORMSession.flush_all()
@@ -142,10 +143,10 @@ class GitHubTrackerImporter(ToolImporter):
                 app.globals.last_ticket_num = self.max_ticket_num
                 ThreadLocalORMSession.flush_all()
             M.AuditLog.log(
-                    'import tool %s from %s on %s' % (
-                        app.config.options.mount_point,
-                        project_name, self.source),
-                    project=project, user=user, url=app.url)
+                'import tool %s from %s on %s' % (
+                    app.config.options.mount_point,
+                    project_name, self.source),
+                project=project, user=user, url=app.url)
             g.post_event('project_updated')
             app.globals.invalidate_bin_counts()
             return app
@@ -165,35 +166,36 @@ class GitHubTrackerImporter(ToolImporter):
         ticket.mod_date = self.parse_datetime(issue['updated_at'])
         if issue['assignee']:
             owner_line = '*Originally owned by:* {}\n'.format(
-                    self.get_user_link(issue['assignee']['login']))
+                self.get_user_link(issue['assignee']['login']))
         else:
             owner_line = ''
         # body processing happens here
         body, attachments = self._get_attachments(extractor, issue['body'])
         ticket.add_multiple_attachments(attachments)
         ticket.description = (
-                u'*Originally created by:* {creator}\n'
-                u'{owner}'
-                u'\n'
-                u'{body}').format(
-                    creator=self.get_user_link(issue['user']['login']),
-                    owner=owner_line,
-                    body=self.github_markdown_converter.convert(body),
-                )
+            u'*Originally created by:* {creator}\n'
+            u'{owner}'
+            u'\n'
+            u'{body}').format(
+            creator=self.get_user_link(issue['user']['login']),
+            owner=owner_line,
+            body=self.github_markdown_converter.convert(body),
+        )
         ticket.labels = [label['name'] for label in issue['labels']]
 
     def process_comments(self, extractor, ticket, issue):
         for comment in extractor.iter_comments(issue):
-            body, attachments = self._get_attachments(extractor, comment['body'])
+            body, attachments = self._get_attachments(
+                extractor, comment['body'])
             if comment['user']:
                 posted_by = u'*Originally posted by:* {}\n\n'.format(
                     self.get_user_link(comment['user']['login']))
                 body = posted_by + body
             p = ticket.discussion_thread.add_post(
-                    text = self.github_markdown_converter.convert(body),
-                    ignore_security = True,
-                    timestamp = self.parse_datetime(comment['created_at']),
-                )
+                text=self.github_markdown_converter.convert(body),
+                ignore_security=True,
+                timestamp=self.parse_datetime(comment['created_at']),
+            )
             p.add_multiple_attachments(attachments)
 
     def process_events(self, extractor, ticket, issue):
@@ -201,22 +203,22 @@ class GitHubTrackerImporter(ToolImporter):
             prefix = text = ''
             if event['event'] in ('reopened', 'closed'):
                 prefix = '*Ticket changed by:* {}\n\n'.format(
-                        self.get_user_link(event['actor']['login']))
+                    self.get_user_link(event['actor']['login']))
             if event['event'] == 'reopened':
                 text = '- **status**: closed --> open'
             elif event['event'] == 'closed':
                 text = '- **status**: open --> closed'
             elif event['event'] == 'assigned':
                 text = '- **assigned_to**: {}'.format(
-                        self.get_user_link(event['actor']['login']))
+                    self.get_user_link(event['actor']['login']))
 
             text = prefix + text
             if not text:
                 continue
             ticket.discussion_thread.add_post(
-                text = text,
-                ignore_security = True,
-                timestamp = self.parse_datetime(event['created_at'])
+                text=text,
+                ignore_security=True,
+                timestamp=self.parse_datetime(event['created_at'])
             )
 
     def process_milestones(self, ticket, issue):
@@ -265,11 +267,13 @@ class GitHubTrackerImporter(ToolImporter):
             attachments.append(Attachment(
                 extractor,
                 match.group(1),  # url
-                'attach{}.{}'.format(i + 1, match.group(2)) # extension
+                'attach{}.{}'.format(i + 1, match.group(2))  # extension
             ))
         return (body, attachments)
 
+
 class Attachment(object):
+
     def __init__(self, extractor, url, filename):
         self.url = url
         self.filename = filename

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/utils.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/utils.py b/ForgeImporters/forgeimporters/github/utils.py
index 0fb650b..3f00310 100644
--- a/ForgeImporters/forgeimporters/github/utils.py
+++ b/ForgeImporters/forgeimporters/github/utils.py
@@ -38,7 +38,7 @@ class GitHubMarkdownConverter(object):
             nextline = False
             for p in self.code_patterns:
                 if line.startswith(p):
-                    prev_line = lines[i-1].strip() if (i-1) >= 0 else ''
+                    prev_line = lines[i - 1].strip() if (i - 1) >= 0 else ''
                     if len(prev_line) > 0 and not in_block:
                         new_lines.append('')
                     if p == '```':
@@ -62,10 +62,12 @@ class GitHubMarkdownConverter(object):
                 if inline_matches:
                     # need to not handle inline blocks as a text
                     for i, m in enumerate(inline_matches):
-                        line = line.replace(m, '<inline_block>%s</inline_block>' % i)
+                        line = line.replace(
+                            m, '<inline_block>%s</inline_block>' % i)
                     line = self._handle_non_code(line)
                     for i, m in enumerate(inline_matches):
-                        line = line.replace('<inline_block>%s</inline_block>' % i, m)
+                        line = line.replace(
+                            '<inline_block>%s</inline_block>' % i, m)
                     new_lines.append(line)
                 else:
                     new_lines.append(self._handle_non_code(line))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/github/wiki.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/github/wiki.py b/ForgeImporters/forgeimporters/github/wiki.py
index 33801df..74789d2 100644
--- a/ForgeImporters/forgeimporters/github/wiki.py
+++ b/ForgeImporters/forgeimporters/github/wiki.py
@@ -28,28 +28,28 @@ from pylons import tmpl_context as c
 from ming.orm import ThreadLocalORMSession
 from formencode import validators as fev
 from tg import (
-        expose,
-        validate,
-        flash,
-        redirect,
-        )
+    expose,
+    validate,
+    flash,
+    redirect,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.controllers import BaseController
 from allura.lib import helpers as h
 from allura.lib import utils
 from allura.lib.plugin import ImportIdConverter
 from allura.lib.decorators import (
-        require_post,
-        )
+    require_post,
+)
 from allura import model as M
 from forgeimporters.base import (
-        ToolImporter,
-        ToolImportForm,
-        )
+    ToolImporter,
+    ToolImportForm,
+)
 from forgeimporters.github import GitHubProjectExtractor, GitHubOAuthMixin
 from forgeimporters.github.utils import GitHubMarkdownConverter
 from forgewiki import model as WM
@@ -105,7 +105,8 @@ class GitHubWikiImportController(BaseController, GitHubOAuthMixin):
             flash('Wiki import has begun. Your new wiki will be available '
                   'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -120,22 +121,24 @@ class GitHubWikiImporter(ToolImporter):
     mediawiki_exts = ['.wiki', '.mediawiki']
     markdown_exts = utils.MARKDOWN_EXTENSIONS
     textile_exts = ['.textile']
-    # List of supported formats https://github.com/gollum/gollum/wiki#page-files
+    # List of supported formats
+    # https://github.com/gollum/gollum/wiki#page-files
     supported_formats = [
-            '.asciidoc',
-            '.creole',
-            '.org',
-            '.pod',
-            '.rdoc',
-            '.rest.txt',
-            '.rst.txt',
-            '.rest',
-            '.rst',
+        '.asciidoc',
+        '.creole',
+        '.org',
+        '.pod',
+        '.rdoc',
+        '.rest.txt',
+        '.rst.txt',
+        '.rest',
+        '.rst',
     ] + mediawiki_exts + markdown_exts + textile_exts
     available_pages = []
 
-    def import_tool(self, project, user, project_name=None, mount_point=None, mount_label=None, user_name=None,
-                    tool_option=None, **kw):
+    def import_tool(
+            self, project, user, project_name=None, mount_point=None, mount_label=None, user_name=None,
+            tool_option=None, **kw):
         """ Import a GitHub wiki into a new Wiki Allura tool.
 
         """
@@ -144,7 +147,8 @@ class GitHubWikiImporter(ToolImporter):
         if not extractor.has_wiki():
             return
 
-        self.github_wiki_url = extractor.get_page_url('wiki_url').replace('.wiki', '/wiki')
+        self.github_wiki_url = extractor.get_page_url(
+            'wiki_url').replace('.wiki', '/wiki')
         self.app = project.install_app(
             "Wiki",
             mount_point=mount_point or 'wiki',
@@ -161,7 +165,8 @@ class GitHubWikiImporter(ToolImporter):
         try:
             M.session.artifact_orm_session._get().skip_mod_date = True
             with h.push_config(c, app=self.app):
-                self.import_pages(extractor.get_page_url('wiki_url'), history=with_history)
+                self.import_pages(
+                    extractor.get_page_url('wiki_url'), history=with_history)
             ThreadLocalORMSession.flush_all()
             M.AuditLog.log(
                 'import tool %s from %s on %s' % (
@@ -183,7 +188,7 @@ class GitHubWikiImporter(ToolImporter):
         pages = [blob.name for blob in commit.tree.traverse()]
         pages = map(os.path.splitext, pages)
         pages = [self._convert_page_name(name) for name, ext in pages
-                if ext in self.supported_formats]
+                 if ext in self.supported_formats]
         self.available_pages = pages
 
     def _without_history(self, commit):
@@ -196,7 +201,8 @@ class GitHubWikiImporter(ToolImporter):
             self._set_available_pages(commit)
             renamed_to = None
             if '=>' in filename:
-                # File renamed. Stats contains entry like 'Page.md => NewPage.md'
+                # File renamed. Stats contains entry like 'Page.md =>
+                # NewPage.md'
                 filename, renamed_to = filename.split(' => ')
             if renamed_to and renamed_to in commit.tree:
                 text = commit.tree[renamed_to].data_stream.read()
@@ -209,7 +215,8 @@ class GitHubWikiImporter(ToolImporter):
 
     def _make_page(self, text, filename, commit, renamed_to=None):
         orig_name = self._format_supported(filename)
-        renamed_orig_name = self._format_supported(renamed_to) if renamed_to else None
+        renamed_orig_name = self._format_supported(
+            renamed_to) if renamed_to else None
         if not orig_name:
             return
         if renamed_to and not renamed_orig_name:
@@ -220,13 +227,16 @@ class GitHubWikiImporter(ToolImporter):
         wiki_page.viewable_by = ['all']
         if renamed_orig_name and renamed_to in commit.tree:
             wiki_page.title = self._convert_page_name(renamed_orig_name)
-            wiki_page.text = self.convert_markup(h.really_unicode(text), renamed_to)
+            wiki_page.text = self.convert_markup(
+                h.really_unicode(text), renamed_to)
         elif filename in commit.tree:
-            wiki_page.text = self.convert_markup(h.really_unicode(text), filename)
+            wiki_page.text = self.convert_markup(
+                h.really_unicode(text), filename)
         else:
             wiki_page.delete()
         import_id_name = renamed_orig_name if renamed_orig_name else orig_name
-        wiki_page.import_id = ImportIdConverter.get().expand(import_id_name, self.app)
+        wiki_page.import_id = ImportIdConverter.get().expand(
+            import_id_name, self.app)
         wiki_page.commit()
         return wiki_page
 
@@ -290,12 +300,14 @@ class GitHubWikiImporter(ToolImporter):
                 if not new_prefix.endswith('/'):
                     new_prefix += '/'
                 _re = re.compile(r'%s(\S*)' % prefix)
+
                 def repl(m):
                     return new_prefix + self._convert_page_name(m.group(1))
                 text = _re.sub(repl, text)
             else:
                 text = h.render_any_markup(filename, text)
-                text = self.rewrite_links(text, self.github_wiki_url, self.app.url)
+                text = self.rewrite_links(
+                    text, self.github_wiki_url, self.app.url)
             return text
         elif ext and ext in self.textile_exts:
             text = self._prepare_textile_text(text)
@@ -306,8 +318,10 @@ class GitHubWikiImporter(ToolImporter):
                 text = html2text.html2text(text)
                 text = self.convert_gollum_tags(text)
             text = text.replace('<notextile>', '').replace('</notextile>', '')
-            text = text.replace('&#60;notextile&#62;', '').replace('&#60;/notextile&#62;', '')
-            text = text.replace('&lt;notextile&gt;', '').replace('&lt;/notextile&gt;', '')
+            text = text.replace('&#60;notextile&#62;', '').replace(
+                '&#60;/notextile&#62;', '')
+            text = text.replace('&lt;notextile&gt;', '').replace(
+                '&lt;/notextile&gt;', '')
             return text
         else:
             text = h.render_any_markup(filename, text)
@@ -372,7 +386,8 @@ class GitHubWikiImporter(ToolImporter):
         # E.g. if you have two pages: a.md and A.md both [[a]] and [[A]] will refer a.md.
         # We're emulating this behavior using list of all available pages
         try:
-            idx = map(lambda p: p.lower(), self.available_pages).index(page.lower())
+            idx = map(lambda p: p.lower(),
+                      self.available_pages).index(page.lower())
         except ValueError:
             idx = None
         if idx is not None:
@@ -409,6 +424,6 @@ class GitHubWikiImporter(ToolImporter):
 
         # to convert gollum tags properly used <notextile> tag,
         # so these tags will not be affected by converter
-        text = text.replace('[[', '<notextile>[[').replace(']]', ']]</notextile>')
+        text = text.replace(
+            '[[', '<notextile>[[').replace(']]', ']]</notextile>')
         return text
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/__init__.py b/ForgeImporters/forgeimporters/google/__init__.py
index 87990d0..e2ff4df 100644
--- a/ForgeImporters/forgeimporters/google/__init__.py
+++ b/ForgeImporters/forgeimporters/google/__init__.py
@@ -36,6 +36,7 @@ from forgeimporters.base import File
 
 log = logging.getLogger(__name__)
 
+
 def _as_text(node, chunks=None):
     """
     Similar to node.text, but preserves whitespace around tags,
@@ -52,6 +53,7 @@ def _as_text(node, chunks=None):
             _as_text(n, chunks)
     return ''.join(chunks)
 
+
 def _as_markdown(tag, project_name):
     fragments = []
     for fragment in tag:
@@ -60,7 +62,8 @@ def _as_markdown(tag, project_name):
             qs = parse_qs(href.query)
             gc_link = not href.netloc or href.netloc == 'code.google.com'
             path_parts = href.path.split('/')
-            target_project = path_parts[2] if gc_link and len(path_parts) >= 3 else ''
+            target_project = path_parts[
+                2] if gc_link and len(path_parts) >= 3 else ''
             internal_link = target_project == project_name
             if gc_link and internal_link and 'id' in qs:
                 # rewrite issue 123 project-internal issue links
@@ -69,29 +72,37 @@ def _as_markdown(tag, project_name):
                 # rewrite r123 project-internal revision links
                 fragment = '[r%s]' % qs['r'][0]
             elif gc_link:
-                # preserve GC-internal links (probably issue PROJECT:123 inter-project issue links)
+                # preserve GC-internal links (probably issue PROJECT:123
+                # inter-project issue links)
                 fragment = '[%s](%s)' % (
-                        h.plain2markdown(fragment.text, preserve_multiple_spaces=True, has_html_entities=True),
-                        urljoin('https://code.google.com/p/%s/issues/' % project_name, fragment['href']),
-                    )
+                    h.plain2markdown(
+                        fragment.text, preserve_multiple_spaces=True, has_html_entities=True),
+                    urljoin('https://code.google.com/p/%s/issues/' %
+                            project_name, fragment['href']),
+                )
             else:
                 # convert all other links to Markdown syntax
                 fragment = '[%s](%s)' % (fragment.text, fragment['href'])
         elif getattr(fragment, 'name', None) == 'i':
-            # preserve styling of "(No comment was entered for this change.)" messages
-            fragment = '*%s*' % h.plain2markdown(fragment.text, preserve_multiple_spaces=True, has_html_entities=True)
+            # preserve styling of "(No comment was entered for this change.)"
+            # messages
+            fragment = '*%s*' % h.plain2markdown(fragment.text,
+                                                 preserve_multiple_spaces=True, has_html_entities=True)
         elif getattr(fragment, 'name', None) == 'b':
             # preserve styling of issue template
-            fragment = '**%s**' % h.plain2markdown(fragment.text, preserve_multiple_spaces=True, has_html_entities=True)
+            fragment = '**%s**' % h.plain2markdown(fragment.text,
+                                                   preserve_multiple_spaces=True, has_html_entities=True)
         elif getattr(fragment, 'name', None) == 'br':
             # preserve forced line-breaks
             fragment = '\n'
         else:
             # convert all others to plain MD
-            fragment = h.plain2markdown(unicode(fragment), preserve_multiple_spaces=True, has_html_entities=True)
+            fragment = h.plain2markdown(
+                unicode(fragment), preserve_multiple_spaces=True, has_html_entities=True)
         fragments.append(fragment)
     return ''.join(fragments).strip()
 
+
 def csv_parser(page):
     lines = page.readlines()
     if not lines:
@@ -107,9 +118,9 @@ def csv_parser(page):
 
 class GoogleCodeProjectNameValidator(fev.FancyValidator):
     not_empty = True
-    messages={
-            'invalid': 'Please enter a project URL, or a project name containing only letters, numbers, and dashes.',
-        }
+    messages = {
+        'invalid': 'Please enter a project URL, or a project name containing only letters, numbers, and dashes.',
+    }
 
     def _to_python(self, value, state=None):
         url = urlparse(value.strip())
@@ -127,30 +138,31 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
     RE_REPO_TYPE = re.compile(r'(svn|hg|git)')
 
     PAGE_MAP = {
-            'project_info': BASE_URL + '/p/{project_name}/',
-            'source_browse': BASE_URL + '/p/{project_name}/source/browse/',
-            'issues_csv': BASE_URL + '/p/{project_name}/issues/csv?can=1&colspec=ID&sort=ID&start={start}',
-            'issue': BASE_URL + '/p/{project_name}/issues/detail?id={issue_id}',
-        }
-
-    LICENSE_MAP = defaultdict(lambda:'Other/Proprietary License', {
-            'Apache License 2.0': 'Apache License V2.0',
-            'Artistic License/GPL': 'Artistic License',
-            'Eclipse Public License 1.0': 'Eclipse Public License',
-            'GNU GPL v2': 'GNU General Public License version 2.0 (GPLv2)',
-            'GNU GPL v3': 'GNU General Public License version 3.0 (GPLv3)',
-            'GNU Lesser GPL': 'GNU Library or Lesser General Public License version 2.0 (LGPLv2)',
-            'MIT License': 'MIT License',
-            'Mozilla Public License 1.1': 'Mozilla Public License 1.1 (MPL 1.1)',
-            'New BSD License': 'BSD License',
-            'Other Open Source': 'Open Software License',
-        })
+        'project_info': BASE_URL + '/p/{project_name}/',
+        'source_browse': BASE_URL + '/p/{project_name}/source/browse/',
+        'issues_csv': BASE_URL + '/p/{project_name}/issues/csv?can=1&colspec=ID&sort=ID&start={start}',
+        'issue': BASE_URL + '/p/{project_name}/issues/detail?id={issue_id}',
+    }
+
+    LICENSE_MAP = defaultdict(lambda: 'Other/Proprietary License', {
+        'Apache License 2.0': 'Apache License V2.0',
+        'Artistic License/GPL': 'Artistic License',
+        'Eclipse Public License 1.0': 'Eclipse Public License',
+        'GNU GPL v2': 'GNU General Public License version 2.0 (GPLv2)',
+        'GNU GPL v3': 'GNU General Public License version 3.0 (GPLv3)',
+        'GNU Lesser GPL': 'GNU Library or Lesser General Public License version 2.0 (LGPLv2)',
+        'MIT License': 'MIT License',
+        'Mozilla Public License 1.1': 'Mozilla Public License 1.1 (MPL 1.1)',
+        'New BSD License': 'BSD License',
+        'Other Open Source': 'Open Software License',
+    })
 
     DEFAULT_ICON = 'http://www.gstatic.com/codesite/ph/images/defaultlogo.png'
 
     def get_short_description(self, project):
         page = self.get_page('project_info')
-        project.short_description = page.find(itemprop='description').text.strip()
+        project.short_description = page.find(
+            itemprop='description').text.strip()
 
     def get_icon(self, project):
         page = self.get_page('project_info')
@@ -165,12 +177,13 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
             filetype = 'image/png'
         M.ProjectFile.save_image(
             icon_name, icon.file, filetype,
-            square=True, thumbnail_size=(48,48),
+            square=True, thumbnail_size=(48, 48),
             thumbnail_meta={'project_id': project._id, 'category': 'icon'})
 
     def get_license(self, project):
         page = self.get_page('project_info')
-        license = page.find(text='Code license').findNext().find('a').text.strip()
+        license = page.find(text='Code license').findNext().find(
+            'a').text.strip()
         trove = M.TroveCategory.query.get(fullname=self.LICENSE_MAP[license])
         project.trove_license.append(trove._id)
 
@@ -179,7 +192,7 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
         repo_type = page.find(id="crumb_root")
         if not repo_type:
             raise Exception("Couldn't detect repo type: no #crumb_root in "
-                    "{0}".format(self.url))
+                            "{0}".format(self.url))
         re_match = self.RE_REPO_TYPE.match(repo_type.text.lower())
         if re_match:
             return re_match.group(0)
@@ -200,13 +213,14 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
                     yield (int(issue_id), cls(project_name, 'issue', issue_id=issue_id))
                 except HTTPError as e:
                     if e.code == 404:
-                        log.warn('Unable to load GC issue: %s #%s: %s: %s', project_name, issue_id, e, e.url)
+                        log.warn('Unable to load GC issue: %s #%s: %s: %s',
+                                 project_name, issue_id, e, e.url)
                         continue
                     else:
                         raise
             # get any new issues that were created while importing
             # (jumping back a few in case some were deleted and new ones added)
-            new_ids = extractor.get_issue_ids(start=len(issue_ids)-10)
+            new_ids = extractor.get_issue_ids(start=len(issue_ids) - 10)
             issue_ids = new_ids - issue_ids
 
     def get_issue_ids(self, start=0):
@@ -223,7 +237,8 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
         return issue_ids
 
     def get_issue_summary(self):
-        text = self.page.find(id='issueheader').findAll('td', limit=2)[1].span.text.strip()
+        text = self.page.find(id='issueheader').findAll(
+            'td', limit=2)[1].span.text.strip()
         bs = BeautifulSoup(text, convertEntities=BeautifulSoup.HTML_ENTITIES)
         return bs.text
 
@@ -246,14 +261,16 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
         return UserLink(a)
 
     def get_issue_status(self):
-        tag = self.page.find(id='issuemeta').find('th', text=re.compile('Status:')).findNext().span
+        tag = self.page.find(id='issuemeta').find(
+            'th', text=re.compile('Status:')).findNext().span
         if tag:
             return tag.text.strip()
         else:
             return ''
 
     def get_issue_owner(self):
-        tag = self.page.find(id='issuemeta').find('th', text=re.compile('Owner:')).findNext().find(True, 'userlink')
+        tag = self.page.find(id='issuemeta').find(
+            'th', text=re.compile('Owner:')).findNext().find(True, 'userlink')
         if tag:
             return UserLink(tag)
         else:
@@ -277,20 +294,24 @@ class GoogleCodeProjectExtractor(ProjectExtractor):
         for comment in self.page.findAll('div', 'issuecomment'):
             yield Comment(comment, self.project_name)
 
+
 class UserLink(object):
+
     def __init__(self, tag):
         self.name = tag.text.strip()
         if tag.get('href'):
-            self.url = urljoin(GoogleCodeProjectExtractor.BASE_URL, tag.get('href'))
+            self.url = urljoin(
+                GoogleCodeProjectExtractor.BASE_URL, tag.get('href'))
         else:
             self.url = None
 
     def __str__(self):
         if self.url:
-            return '[{name}]({url})'.format(name = self.name, url = self.url)
+            return '[{name}]({url})'.format(name=self.name, url=self.url)
         else:
             return self.name
 
+
 def _get_attachments(tag):
     attachment_links = tag.find('div', 'attachments')
     if attachment_links:
@@ -307,9 +328,12 @@ def _get_attachments(tag):
     else:
         return []
 
+
 class Comment(object):
+
     def __init__(self, tag, project_name):
-        self.author = UserLink(tag.find('span', 'author').find(True, 'userlink'))
+        self.author = UserLink(
+            tag.find('span', 'author').find(True, 'userlink'))
         self.created_date = tag.find('span', 'date').get('title')
         self.body = _as_markdown(tag.find('pre'), project_name)
         self._get_updates(tag)
@@ -318,28 +342,30 @@ class Comment(object):
     def _get_updates(self, tag):
         _updates = tag.find('div', 'updates')
         self.updates = {
-                b.text: b.nextSibling.strip()
-                for b in _updates.findAll('b')} if _updates else {}
+            b.text: b.nextSibling.strip()
+            for b in _updates.findAll('b')} if _updates else {}
 
     @property
     def annotated_text(self):
         text = (
-                u'*Originally posted by:* {author}\n'
-                u'\n'
-                u'{body}\n'
-                u'\n'
-                u'{updates}'
-            ).format(
-                author=self.author,
-                body=self.body,
-                updates='\n'.join(
-                        '**%s** %s' % (k,v)
-                        for k,v in self.updates.items()
-                    ),
-            )
+            u'*Originally posted by:* {author}\n'
+            u'\n'
+            u'{body}\n'
+            u'\n'
+            u'{updates}'
+        ).format(
+            author=self.author,
+            body=self.body,
+            updates='\n'.join(
+                '**%s** %s' % (k, v)
+                for k, v in self.updates.items()
+            ),
+        )
         return text
 
+
 class Attachment(File):
+
     def __init__(self, url):
         url = urljoin(GoogleCodeProjectExtractor.BASE_URL, url)
         filename = parse_qs(urlparse(url).query)['name'][0]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/code.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/code.py b/ForgeImporters/forgeimporters/google/code.py
index bcb379e..8181c87 100644
--- a/ForgeImporters/forgeimporters/google/code.py
+++ b/ForgeImporters/forgeimporters/google/code.py
@@ -23,15 +23,15 @@ from formencode import validators as fev
 from pylons import tmpl_context as c
 from pylons import app_globals as g
 from tg import (
-        expose,
-        flash,
-        redirect,
-        validate,
-        )
+    expose,
+    flash,
+    redirect,
+    validate,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.controllers import BaseController
 from allura.lib import validators as v
@@ -39,8 +39,8 @@ from allura.lib.decorators import require_post
 from allura import model as M
 
 from forgeimporters.base import (
-        ToolImporter,
-        )
+    ToolImporter,
+)
 from forgeimporters.google import GoogleCodeProjectExtractor
 from forgeimporters.google import GoogleCodeProjectNameValidator
 
@@ -96,7 +96,8 @@ class GoogleRepoImportForm(fe.schema.Schema):
         gc_project_name = value['gc_project_name']
         mount_point = value['mount_point']
         try:
-            repo_type = GoogleCodeProjectExtractor(gc_project_name).get_repo_type()
+            repo_type = GoogleCodeProjectExtractor(
+                gc_project_name).get_repo_type()
         except urllib2.HTTPError as e:
             if e.code == 404:
                 msg = 'No such project'
@@ -108,13 +109,15 @@ class GoogleRepoImportForm(fe.schema.Schema):
             raise
         tool_class = REPO_APPS[repo_type]
         try:
-            value['mount_point'] = v.MountPointValidator(tool_class).to_python(mount_point)
+            value['mount_point'] = v.MountPointValidator(
+                tool_class).to_python(mount_point)
         except fe.Invalid as e:
             raise fe.Invalid('mount_point:' + str(e), value, state)
         return value
 
 
 class GoogleRepoImportController(BaseController):
+
     def __init__(self):
         self.importer = GoogleRepoImporter()
 
@@ -126,7 +129,7 @@ class GoogleRepoImportController(BaseController):
     @expose('jinja:forgeimporters.google:templates/code/index.html')
     def index(self, **kw):
         return dict(importer=self.importer,
-                target_app=self.target_app)
+                    target_app=self.target_app)
 
     @without_trailing_slash
     @expose()
@@ -135,13 +138,14 @@ class GoogleRepoImportController(BaseController):
     def create(self, gc_project_name, mount_point, mount_label, **kw):
         if self.importer.enforce_limit(c.project):
             self.importer.post(
-                    project_name=gc_project_name,
-                    mount_point=mount_point,
-                    mount_label=mount_label)
+                project_name=gc_project_name,
+                mount_point=mount_point,
+                mount_label=mount_label)
             flash('Repo import has begun. Your new repo will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -153,7 +157,7 @@ class GoogleRepoImporter(ToolImporter):
     tool_description = 'Import your primary SVN, Git, or Hg repo from Google Code'
 
     def import_tool(self, project, user, project_name=None, mount_point=None,
-            mount_label=None, **kw):
+                    mount_label=None, **kw):
         """ Import a Google Code repo into a new SVN, Git, or Hg Allura tool.
 
         """
@@ -161,19 +165,19 @@ class GoogleRepoImporter(ToolImporter):
         repo_type = extractor.get_repo_type()
         repo_url = get_repo_url(project_name, repo_type)
         app = project.install_app(
-                REPO_ENTRY_POINTS[repo_type],
-                mount_point=mount_point or 'code',
-                mount_label=mount_label or 'Code',
-                init_from_url=repo_url,
-                import_id={
-                        'source': self.source,
-                        'project_name': project_name,
-                    }
-            )
+            REPO_ENTRY_POINTS[repo_type],
+            mount_point=mount_point or 'code',
+            mount_label=mount_label or 'Code',
+            init_from_url=repo_url,
+            import_id={
+                'source': self.source,
+                'project_name': project_name,
+            }
+        )
         M.AuditLog.log(
-                'import tool %s from %s on %s' % (
-                    app.config.options.mount_point,
-                    project_name, self.source,
-                ), project=project, user=user, url=app.url)
+            'import tool %s from %s on %s' % (
+                app.config.options.mount_point,
+                project_name, self.source,
+            ), project=project, user=user, url=app.url)
         g.post_event('project_updated')
         return app

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/project.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/project.py b/ForgeImporters/forgeimporters/google/project.py
index 4bd6181..bb74c80 100644
--- a/ForgeImporters/forgeimporters/google/project.py
+++ b/ForgeImporters/forgeimporters/google/project.py
@@ -37,6 +37,7 @@ class GoogleCodeProjectForm(base.ProjectImportForm):
 
 
 class GoogleCodeProjectImporter(base.ProjectImporter):
+
     """
     Project importer for Google Code.
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/tests/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/tests/__init__.py b/ForgeImporters/forgeimporters/google/tests/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/google/tests/__init__.py
+++ b/ForgeImporters/forgeimporters/google/tests/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/tests/test_code.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/tests/test_code.py b/ForgeImporters/forgeimporters/google/tests/test_code.py
index 7c79e36..e8f6e35 100644
--- a/ForgeImporters/forgeimporters/google/tests/test_code.py
+++ b/ForgeImporters/forgeimporters/google/tests/test_code.py
@@ -24,15 +24,16 @@ from allura.tests.decorators import with_tool
 from allura import model as M
 
 
-# important to be distinct from 'test' which ForgeSVN uses, so that the tests can run in parallel and not clobber each other
+# important to be distinct from 'test' which ForgeSVN uses, so that the
+# tests can run in parallel and not clobber each other
 test_project_with_repo = 'test2'
 
 
 from forgeimporters.google.code import (
-        get_repo_url,
-        GoogleRepoImporter,
-        GoogleRepoImportController,
-        )
+    get_repo_url,
+    GoogleRepoImporter,
+    GoogleRepoImportController,
+)
 
 
 class TestGetRepoUrl(TestCase):
@@ -72,24 +73,25 @@ class TestGoogleRepoImporter(TestCase):
         GoogleRepoImporter().import_tool(p, u, project_name='project_name')
         get_repo_url.assert_called_once_with('project_name', 'git')
         p.install_app.assert_called_once_with('Git',
-                mount_point='code',
-                mount_label='Code',
-                init_from_url='http://remote/clone/url/',
-                import_id={
-                        'source': 'Google Code',
-                        'project_name': 'project_name',
-                    },
-            )
+                                              mount_point='code',
+                                              mount_label='Code',
+                                              init_from_url='http://remote/clone/url/',
+                                              import_id={
+                                                  'source': 'Google Code',
+                                                  'project_name': 'project_name',
+                                              },
+                                              )
         M.AuditLog.log.assert_called_once_with(
-                'import tool code from project_name on Google Code',
-                project=p, user=u, url='foo')
+            'import tool code from project_name on Google Code',
+            project=p, user=u, url='foo')
         g.post_event.assert_called_once_with('project_updated')
 
 
 class TestGoogleRepoImportController(TestController, TestCase):
 
     def test_index(self):
-        r = self.app.get('/p/{}/admin/ext/import/google-code-repo/'.format(test_project_with_repo))
+        r = self.app.get(
+            '/p/{}/admin/ext/import/google-code-repo/'.format(test_project_with_repo))
         self.assertIsNotNone(r.html.find(attrs=dict(name="gc_project_name")))
         self.assertIsNotNone(r.html.find(attrs=dict(name="mount_label")))
         self.assertIsNotNone(r.html.find(attrs=dict(name="mount_point")))
@@ -99,16 +101,21 @@ class TestGoogleRepoImportController(TestController, TestCase):
     def test_create(self, import_tool, extractor):
         extractor.return_value.get_repo_type.return_value = 'git'
         params = dict(gc_project_name='poop',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
-        r = self.app.post('/p/{}/admin/ext/import/google-code-repo/create'.format(test_project_with_repo),
-                params,
-                status=302)
-        self.assertEqual(r.location, 'http://localhost/p/{}/admin/'.format(test_project_with_repo))
-        self.assertEqual(u'mymount', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mylabel', import_tool.post.call_args[1]['mount_label'])
-        self.assertEqual(u'poop', import_tool.post.call_args[1]['project_name'])
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
+        r = self.app.post(
+            '/p/{}/admin/ext/import/google-code-repo/create'.format(test_project_with_repo),
+            params,
+            status=302)
+        self.assertEqual(
+            r.location, 'http://localhost/p/{}/admin/'.format(test_project_with_repo))
+        self.assertEqual(
+            u'mymount', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mylabel', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual(
+            u'poop', import_tool.post.call_args[1]['project_name'])
 
     @patch('forgeimporters.google.code.GoogleCodeProjectExtractor')
     @patch('forgeimporters.base.import_tool')
@@ -118,11 +125,12 @@ class TestGoogleRepoImportController(TestController, TestCase):
         project.set_tool_data('GoogleRepoImporter', pending=1)
         ThreadLocalORMSession.flush_all()
         params = dict(gc_project_name='poop',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
-        r = self.app.post('/p/{}/admin/ext/import/google-code-repo/create'.format(test_project_with_repo),
-                params,
-                status=302).follow()
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
+        r = self.app.post(
+            '/p/{}/admin/ext/import/google-code-repo/create'.format(test_project_with_repo),
+            params,
+            status=302).follow()
         self.assertIn('Please wait and try again', r)
         self.assertEqual(import_tool.post.call_count, 0)


[28/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/ext/admin/admin_main.py
----------------------------------------------------------------------
diff --git a/Allura/allura/ext/admin/admin_main.py b/Allura/allura/ext/admin/admin_main.py
index f60bd39..607c151 100644
--- a/Allura/allura/ext/admin/admin_main.py
+++ b/Allura/allura/ext/admin/admin_main.py
@@ -52,13 +52,16 @@ from allura.lib.widgets.project_list import ProjectScreenshots
 
 log = logging.getLogger(__name__)
 
+
 class W:
     markdown_editor = ffw.MarkdownEdit()
     label_edit = ffw.LabelEdit()
-    mount_delete = ffw.Lightbox(name='mount_delete',trigger='a.mount_delete')
-    admin_modal = ffw.Lightbox(name='admin_modal',trigger='a.admin_modal')
-    install_modal = ffw.Lightbox(name='install_modal',trigger='a.install_trig')
-    explain_export_modal = ffw.Lightbox(name='explain_export',trigger='#why_export')
+    mount_delete = ffw.Lightbox(name='mount_delete', trigger='a.mount_delete')
+    admin_modal = ffw.Lightbox(name='admin_modal', trigger='a.admin_modal')
+    install_modal = ffw.Lightbox(
+        name='install_modal', trigger='a.install_trig')
+    explain_export_modal = ffw.Lightbox(
+        name='explain_export', trigger='#why_export')
     group_card = aw.GroupCard()
     permission_card = aw.PermissionCard()
     group_settings = aw.GroupSettings()
@@ -67,11 +70,11 @@ class W:
     screenshot_list = ProjectScreenshots()
     metadata_admin = aw.MetadataAdmin()
     audit = aw.AuditLog()
-    page_list=ffw.PageList()
-
+    page_list = ffw.PageList()
 
 
 class AdminApp(Application):
+
     '''This is the admin app.  It is pretty much required for
     a functioning allura project.
     '''
@@ -79,10 +82,10 @@ class AdminApp(Application):
     _installable_tools = None
     max_instances = 0
     tool_label = 'admin'
-    icons={
-        24:'images/admin_24.png',
-        32:'images/admin_32.png',
-        48:'images/admin_48.png'
+    icons = {
+        24: 'images/admin_24.png',
+        32: 'images/admin_32.png',
+        48: 'images/admin_48.png'
     }
     exportable = True
 
@@ -91,8 +94,9 @@ class AdminApp(Application):
         self.root = ProjectAdminController()
         self.api_root = ProjectAdminRestController()
         self.admin = AdminAppAdminController(self)
-        self.templates = pkg_resources.resource_filename('allura.ext.admin', 'templates')
-        self.sitemap = [ SitemapEntry('Admin','.')]
+        self.templates = pkg_resources.resource_filename(
+            'allura.ext.admin', 'templates')
+        self.sitemap = [SitemapEntry('Admin', '.')]
 
     def is_visible_to(self, user):
         '''Whether the user can view the app.'''
@@ -106,10 +110,11 @@ class AdminApp(Application):
             app = App(project, cfg)
             if app.installable:
                 tools.append(dict(name=name, app=App))
-            session(cfg).expunge(cfg)  # prevent from saving temporary config to db
+            # prevent from saving temporary config to db
+            session(cfg).expunge(cfg)
         tools.sort(key=lambda t: (t['app'].status_int(), t['app'].ordinal))
         return [t for t in tools
-            if t['app'].status in project.allowed_tool_status]
+                if t['app'].status in project.allowed_tool_status]
 
     @staticmethod
     def exportable_tools_for(project):
@@ -128,37 +133,40 @@ class AdminApp(Application):
     @h.exceptionless([], log)
     def sidebar_menu(self):
         links = []
-        admin_url = c.project.url()+'admin/'
-
+        admin_url = c.project.url() + 'admin/'
 
         if c.project.is_nbhd_project:
-            links.append(SitemapEntry('Add Project', c.project.url()+'add_project', ui_icon=g.icons['plus']))
-            nbhd_admin_url = c.project.neighborhood.url()+'_admin/'
+            links.append(SitemapEntry('Add Project', c.project.url()
+                         + 'add_project', ui_icon=g.icons['plus']))
+            nbhd_admin_url = c.project.neighborhood.url() + '_admin/'
             links = links + [
-                     SitemapEntry('Neighborhood'),
-                     SitemapEntry('Overview', nbhd_admin_url+'overview'),
-                     SitemapEntry('Awards', nbhd_admin_url+'accolades')]
+                SitemapEntry('Neighborhood'),
+                SitemapEntry('Overview', nbhd_admin_url + 'overview'),
+                SitemapEntry('Awards', nbhd_admin_url + 'accolades')]
         else:
-            links += [SitemapEntry('Metadata', admin_url+'overview'),]
+            links += [SitemapEntry('Metadata', admin_url + 'overview'), ]
             if c.project.neighborhood.name != "Users":
                 links += [
-                    SitemapEntry('Screenshots', admin_url+'screenshots'),
-                    SitemapEntry('Categorization', admin_url+'trove')
+                    SitemapEntry('Screenshots', admin_url + 'screenshots'),
+                    SitemapEntry('Categorization', admin_url + 'trove')
                 ]
-        links.append(SitemapEntry('Tools', admin_url+'tools'))
+        links.append(SitemapEntry('Tools', admin_url + 'tools'))
         if asbool(config.get('bulk_export_enabled', True)):
             links.append(SitemapEntry('Export', admin_url + 'export'))
         if c.project.is_root and has_access(c.project, 'admin')():
-            links.append(SitemapEntry('User Permissions', admin_url+'groups/'))
+            links.append(
+                SitemapEntry('User Permissions', admin_url + 'groups/'))
         if not c.project.is_root and has_access(c.project, 'admin')():
-            links.append(SitemapEntry('Permissions', admin_url+'permissions/'))
+            links.append(
+                SitemapEntry('Permissions', admin_url + 'permissions/'))
         if len(c.project.neighborhood_invitations):
-            links.append(SitemapEntry('Invitation(s)', admin_url+'invitations'))
-        links.append(SitemapEntry('Audit Trail', admin_url+ 'audit/'))
+            links.append(
+                SitemapEntry('Invitation(s)', admin_url + 'invitations'))
+        links.append(SitemapEntry('Audit Trail', admin_url + 'audit/'))
         if c.project.is_nbhd_project:
-            links.append(SitemapEntry('Statistics', nbhd_admin_url+ 'stats/'))
+            links.append(SitemapEntry('Statistics', nbhd_admin_url + 'stats/'))
             links.append(None)
-            links.append(SitemapEntry('Help', nbhd_admin_url+ 'help/'))
+            links.append(SitemapEntry('Help', nbhd_admin_url + 'help/'))
 
         for ep_name in sorted(g.entry_points['admin'].keys()):
             admin_extension = g.entry_points['admin'][ep_name]
@@ -245,10 +253,13 @@ class ProjectAdminController(BaseController):
     @expose('jinja:allura.ext.admin:templates/project_trove.html')
     def trove(self):
         c.label_edit = W.label_edit
-        base_troves = M.TroveCategory.query.find(dict(trove_parent_id=0)).sort('fullname').all()
-        topic_trove = M.TroveCategory.query.get(trove_parent_id=0,shortname='topic')
-        license_trove = M.TroveCategory.query.get(trove_parent_id=0,shortname='license')
-        return dict(base_troves=base_troves,license_trove=license_trove,topic_trove=topic_trove)
+        base_troves = M.TroveCategory.query.find(
+            dict(trove_parent_id=0)).sort('fullname').all()
+        topic_trove = M.TroveCategory.query.get(
+            trove_parent_id=0, shortname='topic')
+        license_trove = M.TroveCategory.query.get(
+            trove_parent_id=0, shortname='license')
+        return dict(base_troves=base_troves, license_trove=license_trove, topic_trove=topic_trove)
 
     @without_trailing_slash
     @expose('jinja:allura.ext.admin:templates/project_tools.html')
@@ -262,7 +273,8 @@ class ProjectAdminController(BaseController):
         return dict(
             mounts=mounts,
             installable_tools=AdminApp.installable_tools_for(c.project),
-            roles=M.ProjectRole.query.find(dict(project_id=c.project.root_project._id)).sort('_id').all(),
+            roles=M.ProjectRole.query.find(
+                dict(project_id=c.project.root_project._id)).sort('_id').all(),
             categories=M.ProjectCategory.query.find(dict(parent_id=None)).sort('label').all())
 
     @expose()
@@ -272,7 +284,8 @@ class ProjectAdminController(BaseController):
             grouping_threshold = int(grouping_threshold)
             if grouping_threshold < 1:
                 raise ValueError('Invalid threshold')
-            c.project.set_tool_data('allura', grouping_threshold=grouping_threshold)
+            c.project.set_tool_data(
+                'allura', grouping_threshold=grouping_threshold)
         except ValueError as e:
             flash('Invalid threshold', 'error')
         redirect('tools')
@@ -353,22 +366,26 @@ class ProjectAdminController(BaseController):
             c.project.removal = removal
             c.project.removal_changed_date = datetime.utcnow()
         if 'delete_icon' in kw:
-            M.ProjectFile.query.remove(dict(project_id=c.project._id, category='icon'))
+            M.ProjectFile.query.remove(
+                dict(project_id=c.project._id, category='icon'))
             M.AuditLog.log('remove project icon')
             h.log_action(log, 'remove project icon').info('')
             g.post_event('project_updated')
             redirect('overview')
         elif 'delete' in kw:
-            allow_project_delete = asbool(config.get('allow_project_delete', True))
+            allow_project_delete = asbool(
+                config.get('allow_project_delete', True))
             if allow_project_delete or not c.project.is_root:
                 M.AuditLog.log('delete project')
                 h.log_action(log, 'delete project').info('')
-                plugin.ProjectRegistrationProvider.get().delete_project(c.project, c.user)
+                plugin.ProjectRegistrationProvider.get().delete_project(
+                    c.project, c.user)
             redirect('overview')
         elif 'undelete' in kw:
             h.log_action(log, 'undelete project').info('')
             M.AuditLog.log('undelete project')
-            plugin.ProjectRegistrationProvider.get().undelete_project(c.project, c.user)
+            plugin.ProjectRegistrationProvider.get().undelete_project(
+                c.project, c.user)
             redirect('overview')
         if name != c.project.name:
             h.log_action(log, 'change project name').info('')
@@ -389,7 +406,8 @@ class ProjectAdminController(BaseController):
             c.project.category_id = category
         if external_homepage != c.project.external_homepage:
             h.log_action(log, 'change external home page').info('')
-            M.AuditLog.log('change external home page to %s', external_homepage)
+            M.AuditLog.log('change external home page to %s',
+                           external_homepage)
             c.project.external_homepage = external_homepage
         if support_page != c.project.support_page:
             h.log_action(log, 'change project support page').info('')
@@ -398,31 +416,37 @@ class ProjectAdminController(BaseController):
         old_twitter = c.project.social_account('Twitter')
         if not old_twitter or twitter_handle != old_twitter.accounturl:
             h.log_action(log, 'change project twitter handle').info('')
-            M.AuditLog.log('change project twitter handle to %s', twitter_handle)
+            M.AuditLog.log('change project twitter handle to %s',
+                           twitter_handle)
             c.project.set_social_account('Twitter', twitter_handle)
         old_facebook = c.project.social_account('Facebook')
         if not old_facebook or facebook_page != old_facebook.accounturl:
             if not facebook_page or 'facebook.com' in urlparse(facebook_page).netloc:
                 h.log_action(log, 'change project facebook page').info('')
-                M.AuditLog.log('change project facebook page to %s', facebook_page)
+                M.AuditLog.log(
+                    'change project facebook page to %s', facebook_page)
                 c.project.set_social_account('Facebook', facebook_page)
         if support_page_url != c.project.support_page_url:
             h.log_action(log, 'change project support page url').info('')
-            M.AuditLog.log('change project support page url to %s', support_page_url)
+            M.AuditLog.log('change project support page url to %s',
+                           support_page_url)
             c.project.support_page_url = support_page_url
         if moved_to_url != c.project.moved_to_url:
             h.log_action(log, 'change project moved to url').info('')
             M.AuditLog.log('change project moved to url to %s', moved_to_url)
             c.project.moved_to_url = moved_to_url
         if export_controlled != c.project.export_controlled:
-            h.log_action(log, 'change project export controlled status').info('')
-            M.AuditLog.log('change project export controlled status to %s', export_controlled)
+            h.log_action(
+                log, 'change project export controlled status').info('')
+            M.AuditLog.log(
+                'change project export controlled status to %s', export_controlled)
             c.project.export_controlled = not not export_controlled
             if not export_controlled:
                 export_control_type = None
         if export_control_type != c.project.export_control_type:
             h.log_action(log, 'change project export control type').info('')
-            M.AuditLog.log('change project export control type to %s', export_control_type)
+            M.AuditLog.log('change project export control type to %s',
+                           export_control_type)
             c.project.export_control_type = export_control_type
         if tracking_id != c.project.tracking_id:
             h.log_action(log, 'change project tracking ID').info('')
@@ -431,21 +455,22 @@ class ProjectAdminController(BaseController):
 
         if icon is not None and icon != '':
             if c.project.icon:
-                M.ProjectFile.remove(dict(project_id=c.project._id, category='icon'))
+                M.ProjectFile.remove(
+                    dict(project_id=c.project._id, category='icon'))
             M.AuditLog.log('update project icon')
             M.ProjectFile.save_image(
                 icon.filename, icon.file, content_type=icon.type,
-                square=True, thumbnail_size=(48,48),
-                thumbnail_meta=dict(project_id=c.project._id,category='icon'))
+                square=True, thumbnail_size=(48, 48),
+                thumbnail_meta=dict(project_id=c.project._id, category='icon'))
         g.post_event('project_updated')
         flash('Saved', 'success')
         redirect('overview')
 
     def _add_trove(self, type, new_trove):
-        current_troves = getattr(c.project,'trove_%s'%type)
+        current_troves = getattr(c.project, 'trove_%s' % type)
         trove_obj = M.TroveCategory.query.get(trove_cat_id=int(new_trove))
         error_msg = None
-        if type in ['license','audience','developmentstatus','language'] and len(current_troves) >= 6:
+        if type in ['license', 'audience', 'developmentstatus', 'language'] and len(current_troves) >= 6:
             error_msg = 'You may not have more than 6 of this category.'
         elif type in ['topic'] and len(current_troves) >= 3:
             error_msg = 'You may not have more than 3 of this category.'
@@ -453,7 +478,8 @@ class ProjectAdminController(BaseController):
             if trove_obj._id not in current_troves:
                 current_troves.append(trove_obj._id)
                 M.AuditLog.log('add trove %s: %s', type, trove_obj.fullpath)
-                ThreadLocalORMSession.flush_all()  # just in case the event handling is super fast
+                # just in case the event handling is super fast
+                ThreadLocalORMSession.flush_all()
                 c.project.last_updated = datetime.utcnow()
                 g.post_event('project_updated')
             else:
@@ -465,7 +491,7 @@ class ProjectAdminController(BaseController):
     def add_trove_js(self, type, new_trove, **kw):
         require_access(c.project, 'update')
         trove_obj, error_msg = self._add_trove(type, new_trove)
-        return dict(trove_full_path = trove_obj.fullpath, trove_cat_id = trove_obj.trove_cat_id, error_msg=error_msg)
+        return dict(trove_full_path=trove_obj.fullpath, trove_cat_id=trove_obj.trove_cat_id, error_msg=error_msg)
 
     @expose()
     @require_post()
@@ -473,7 +499,7 @@ class ProjectAdminController(BaseController):
         require_access(c.project, 'update')
         trove_obj, error_msg = self._add_trove(type, new_trove)
         if error_msg:
-            flash(error_msg,'error')
+            flash(error_msg, 'error')
         redirect('trove')
 
     @expose()
@@ -481,11 +507,12 @@ class ProjectAdminController(BaseController):
     def delete_trove(self, type, trove, **kw):
         require_access(c.project, 'update')
         trove_obj = M.TroveCategory.query.get(trove_cat_id=int(trove))
-        current_troves = getattr(c.project,'trove_%s'%type)
+        current_troves = getattr(c.project, 'trove_%s' % type)
         if trove_obj is not None and trove_obj._id in current_troves:
             M.AuditLog.log('remove trove %s: %s', type, trove_obj.fullpath)
             current_troves.remove(trove_obj._id)
-            ThreadLocalORMSession.flush_all()  # just in case the event handling is super fast
+            # just in case the event handling is super fast
+            ThreadLocalORMSession.flush_all()
             c.project.last_updated = datetime.utcnow()
             g.post_event('project_updated')
         redirect('trove')
@@ -497,7 +524,8 @@ class ProjectAdminController(BaseController):
         require_access(c.project, 'update')
         screenshots = c.project.get_screenshots()
         if len(screenshots) >= 6:
-            flash('You may not have more than 6 screenshots per project.','error')
+            flash('You may not have more than 6 screenshots per project.',
+                  'error')
         elif screenshot is not None and screenshot != '':
             M.AuditLog.log('add screenshot')
             sort = 1 + max([ss.sort or 0 for ss in screenshots] or [0])
@@ -509,8 +537,8 @@ class ProjectAdminController(BaseController):
                     category='screenshot',
                     caption=caption,
                     sort=sort),
-                square=True, thumbnail_size=(150,150),
-                thumbnail_meta=dict(project_id=c.project._id,category='screenshot_thumb'))
+                square=True, thumbnail_size=(150, 150),
+                thumbnail_meta=dict(project_id=c.project._id, category='screenshot_thumb'))
             g.post_event('project_updated')
         redirect('screenshots')
 
@@ -536,7 +564,8 @@ class ProjectAdminController(BaseController):
         require_access(c.project, 'update')
         if id is not None and id != '':
             M.AuditLog.log('remove screenshot')
-            M.ProjectFile.query.remove(dict(project_id=c.project._id, _id=ObjectId(id)))
+            M.ProjectFile.query.remove(
+                dict(project_id=c.project._id, _id=ObjectId(id)))
             g.post_event('project_updated')
         redirect('screenshots')
 
@@ -545,7 +574,8 @@ class ProjectAdminController(BaseController):
     def edit_screenshot(self, id=None, caption=None, **kw):
         require_access(c.project, 'update')
         if id is not None and id != '':
-            M.ProjectFile.query.get(project_id=c.project._id, _id=ObjectId(id)).caption=caption
+            M.ProjectFile.query.get(
+                project_id=c.project._id, _id=ObjectId(id)).caption = caption
             g.post_event('project_updated')
         redirect('screenshots')
 
@@ -578,12 +608,15 @@ class ProjectAdminController(BaseController):
                 p.ordinal = int(sp['ordinal'])
         if tools:
             for p in tools:
-                c.project.app_config(p['mount_point']).options.ordinal = int(p['ordinal'])
+                c.project.app_config(
+                    p['mount_point']).options.ordinal = int(p['ordinal'])
         redirect('tools')
 
     def _update_mounts(self, subproject=None, tool=None, new=None, **kw):
-        if subproject is None: subproject = []
-        if tool is None: tool = []
+        if subproject is None:
+            subproject = []
+        if tool is None:
+            tool = []
         for sp in subproject:
             p = M.Project.query.get(shortname=sp['shortname'],
                                     neighborhood_id=c.project.neighborhood_id)
@@ -594,7 +627,8 @@ class ProjectAdminController(BaseController):
                     'delete subproject %s', sp['shortname'],
                     meta=dict(name=sp['shortname']))
                 p.removal = 'deleted'
-                plugin.ProjectRegistrationProvider.get().delete_project(p, c.user)
+                plugin.ProjectRegistrationProvider.get().delete_project(
+                    p, c.user)
             elif not new:
                 M.AuditLog.log('update subproject %s', sp['shortname'])
                 p.name = sp['name']
@@ -620,7 +654,7 @@ class ProjectAdminController(BaseController):
                 M.AuditLog.log('create subproject %s', mount_point)
                 h.log_action(log, 'create subproject').info(
                     'create subproject %s', mount_point,
-                    meta=dict(mount_point=mount_point,name=new['mount_label']))
+                    meta=dict(mount_point=mount_point, name=new['mount_label']))
                 sp = c.project.new_subproject(mount_point)
                 sp.name = new['mount_label']
                 sp.ordinal = int(new['ordinal'])
@@ -635,7 +669,8 @@ class ProjectAdminController(BaseController):
                 h.log_action(log, 'install tool').info(
                     'install tool %s', mount_point,
                     meta=dict(tool_type=ep_name, mount_point=mount_point, mount_label=new['mount_label']))
-                c.project.install_app(ep_name, mount_point, mount_label=new['mount_label'], ordinal=new['ordinal'])
+                c.project.install_app(
+                    ep_name, mount_point, mount_label=new['mount_label'], ordinal=new['ordinal'])
         g.post_event('project_updated')
 
     @h.vardec
@@ -660,7 +695,8 @@ class ProjectAdminController(BaseController):
                 flash(str(e), 'error')
                 redirect('.')
             else:
-                flash('Export scheduled.  You will recieve an email with download instructions when complete.', 'ok')
+                flash(
+                    'Export scheduled.  You will recieve an email with download instructions when complete.', 'ok')
                 redirect('export')
 
         exportable_tools = AdminApp.exportable_tools_for(c.project)
@@ -671,6 +707,7 @@ class ProjectAdminController(BaseController):
 
 
 class ProjectAdminRestController(BaseController):
+
     """
     Exposes RESTful APi for project admin actions.
     """
@@ -703,8 +740,9 @@ class ProjectAdminRestController(BaseController):
         if not asbool(config.get('bulk_export_enabled', True)):
             raise exc.HTTPNotFound()
         if not tools:
-            raise exc.HTTPBadRequest('Must give at least one tool mount point to export')
-        tools = aslist(tools,',')
+            raise exc.HTTPBadRequest(
+                'Must give at least one tool mount point to export')
+        tools = aslist(tools, ',')
         exportable_tools = AdminApp.exportable_tools_for(c.project)
         allowed = set(t.options.mount_point for t in exportable_tools)
         if not set(tools).issubset(allowed):
@@ -717,9 +755,9 @@ class ProjectAdminRestController(BaseController):
         filename = c.project.bulk_export_filename()
         export_tasks.bulk_export.post(tools, filename, send_email=send_email)
         return {
-                'status': 'in progress',
-                'filename': filename,
-            }
+            'status': 'in progress',
+            'filename': filename,
+        }
 
     @expose('json:')
     def export_status(self, **kw):
@@ -781,18 +819,19 @@ class ProjectAdminRestController(BaseController):
         if order is None:
             order = 'last'
         mounts = [{
-                'ordinal': ac.options.ordinal,
-                'label': ac.options.mount_label,
-                'mount': ac.options.mount_point,
-                'type': ac.tool_name.lower(),
-            } for ac in c.project.app_configs]
-        subs = {p.shortname: p for p in M.Project.query.find({'parent_id': c.project._id})}
+            'ordinal': ac.options.ordinal,
+            'label': ac.options.mount_label,
+            'mount': ac.options.mount_point,
+            'type': ac.tool_name.lower(),
+        } for ac in c.project.app_configs]
+        subs = {p.shortname:
+                p for p in M.Project.query.find({'parent_id': c.project._id})}
         for sub in subs.values():
             mounts.append({
-                    'ordinal': sub.ordinal,
-                    'mount': sub.shortname,
-                    'type': 'sub-project',
-                })
+                'ordinal': sub.ordinal,
+                'mount': sub.shortname,
+                'type': 'sub-project',
+            })
         mounts.sort(key=itemgetter('ordinal'))
         if order == 'first':
             ordinal = 0
@@ -831,7 +870,7 @@ class ProjectAdminRestController(BaseController):
         return {'success': True,
                 'info': 'Tool %s with mount_point %s and mount_label %s was created.'
                         % (tool, mount_point, mount_label)
-        }
+                }
 
 
 class PermissionsController(BaseController):
@@ -857,18 +896,20 @@ class PermissionsController(BaseController):
             new_group_ids = args.get('new', [])
             group_ids = args.get('value', [])
             if isinstance(new_group_ids, basestring):
-                new_group_ids = [ new_group_ids ]
+                new_group_ids = [new_group_ids]
             if isinstance(group_ids, basestring):
-                group_ids = [ group_ids ]
+                group_ids = [group_ids]
             # make sure the admin group has the admin permission
             if perm == 'admin':
                 if c.project.is_root:
                     pid = c.project._id
                 else:
                     pid = c.project.parent_id
-                admin_group_id = str(M.ProjectRole.query.get(project_id=pid, name='Admin')._id)
+                admin_group_id = str(
+                    M.ProjectRole.query.get(project_id=pid, name='Admin')._id)
                 if admin_group_id not in group_ids + new_group_ids:
-                    flash('You cannot remove the admin group from the admin permission.','warning')
+                    flash(
+                        'You cannot remove the admin group from the admin permission.', 'warning')
                     group_ids.append(admin_group_id)
             permissions[perm] = []
             role_ids = map(ObjectId, group_ids + new_group_ids)
@@ -876,23 +917,24 @@ class PermissionsController(BaseController):
         c.project.acl = []
         for perm, role_ids in permissions.iteritems():
             role_names = lambda ids: ','.join(sorted(
-                    pr.name for pr in M.ProjectRole.query.find(dict(_id={'$in':ids}))))
+                pr.name for pr in M.ProjectRole.query.find(dict(_id={'$in': ids}))))
             old_role_ids = old_permissions.get(perm, [])
             if old_role_ids != role_ids:
                 M.AuditLog.log('updated "%s" permissions: "%s" => "%s"',
-                               perm,role_names(old_role_ids), role_names(role_ids))
+                               perm, role_names(old_role_ids), role_names(role_ids))
             c.project.acl += [M.ACE.allow(rid, perm) for rid in role_ids]
         g.post_event('project_updated')
         redirect('.')
 
     def _index_permissions(self):
         permissions = dict(
-            (p,[]) for p in c.project.permissions)
+            (p, []) for p in c.project.permissions)
         for ace in c.project.acl:
             if ace.access == M.ACE.ALLOW:
                 permissions[ace.permission].append(ace.role_id)
         return permissions
 
+
 class GroupsController(BaseController):
 
     def _check_security(self):
@@ -900,7 +942,7 @@ class GroupsController(BaseController):
 
     def _index_permissions(self):
         permissions = dict(
-            (p,[]) for p in c.project.permissions)
+            (p, []) for p in c.project.permissions)
         for ace in c.project.acl:
             if ace.access == M.ACE.ALLOW:
                 permissions[ace.permission].append(ace.role_id)
@@ -908,14 +950,15 @@ class GroupsController(BaseController):
 
     def _map_group_permissions(self):
         roles = c.project.named_roles
-        permissions=self._index_permissions()
+        permissions = self._index_permissions()
         permissions_by_role = dict()
         auth_role = M.ProjectRole.authenticated()
         anon_role = M.ProjectRole.anonymous()
-        for role in roles+[auth_role, anon_role]:
+        for role in roles + [auth_role, anon_role]:
             permissions_by_role[str(role._id)] = []
             for perm in permissions:
-                perm_info = dict(has="no", text="Does not have permission %s" % perm, name=perm)
+                perm_info = dict(has="no", text="Does not have permission %s" %
+                                 perm, name=perm)
                 role_ids = permissions[perm]
                 if role._id in role_ids:
                     perm_info['text'] = "Has permission %s" % perm
@@ -923,15 +966,18 @@ class GroupsController(BaseController):
                 else:
                     for r in role.child_roles():
                         if r._id in role_ids:
-                            perm_info['text'] = "Inherited permission %s from %s" % (perm, r.name)
+                            perm_info['text'] = "Inherited permission %s from %s" % (
+                                perm, r.name)
                             perm_info['has'] = "inherit"
                             break
                 if perm_info['has'] == "no":
                     if anon_role._id in role_ids:
-                        perm_info['text'] = "Inherited permission %s from Anonymous" % perm
+                        perm_info[
+                            'text'] = "Inherited permission %s from Anonymous" % perm
                         perm_info['has'] = "inherit"
                     elif auth_role._id in role_ids and role != anon_role:
-                        perm_info['text'] = "Inherited permission %s from Authenticated" % perm
+                        perm_info[
+                            'text'] = "Inherited permission %s from Authenticated" % perm
                         perm_info['has'] = "inherit"
                 permissions_by_role[str(role._id)].append(perm_info)
         return permissions_by_role
@@ -968,14 +1014,16 @@ class GroupsController(BaseController):
     @require_post()
     @h.vardec
     def change_perm(self, role_id, permission, allow="true", **kw):
-        if allow=="true":
-            M.AuditLog.log('granted permission %s to group %s', permission, M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
+        if allow == "true":
+            M.AuditLog.log('granted permission %s to group %s', permission,
+                           M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
             c.project.acl.append(M.ACE.allow(ObjectId(role_id), permission))
         else:
             admin_group_id = str(M.ProjectRole.by_name('Admin')._id)
             if admin_group_id == role_id and permission == 'admin':
                 return dict(error='You cannot remove the admin permission from the admin group.')
-            M.AuditLog.log('revoked permission %s from group %s', permission, M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
+            M.AuditLog.log('revoked permission %s from group %s', permission,
+                           M.ProjectRole.query.get(_id=ObjectId(role_id)).name)
             c.project.acl.remove(M.ACE.allow(ObjectId(role_id), permission))
         g.post_event('project_updated')
         return self._map_group_permissions()
@@ -985,7 +1033,7 @@ class GroupsController(BaseController):
     @require_post()
     @h.vardec
     def add_user(self, role_id, username, **kw):
-        if not username or username=='*anonymous':
+        if not username or username == '*anonymous':
             return dict(error='You must choose a user to add.')
         group = M.ProjectRole.query.get(_id=ObjectId(role_id))
         user = M.User.by_username(username.strip())
@@ -1036,9 +1084,9 @@ class GroupsController(BaseController):
             user_ids = pr.get('value', [])
             new_users = pr.get('new', [])
             if isinstance(user_ids, basestring):
-                user_ids = [ user_ids ]
+                user_ids = [user_ids]
             if isinstance(new_users, basestring):
-                new_users = [ new_users ]
+                new_users = [new_users]
             # Handle new users in groups
             user_added = False
             for username in new_users:
@@ -1047,9 +1095,10 @@ class GroupsController(BaseController):
                     flash('User %s not found' % username, 'error')
                     redirect('.')
                 if not user._id:
-                    continue # never add anon users to groups
+                    continue  # never add anon users to groups
                 M.AuditLog.log('add user %s to %s', username, group.name)
-                M.ProjectRole.by_user(user, upsert=True).roles.append(group._id)
+                M.ProjectRole.by_user(
+                    user, upsert=True).roles.append(group._id)
                 user_added = True
             # Make sure we aren't removing all users from the Admin group
             if group.name == u'Admin' and not (user_ids or user_added):
@@ -1060,10 +1109,12 @@ class GroupsController(BaseController):
             user_ids = set(
                 uid and ObjectId(uid)
                 for uid in user_ids)
-            for role in M.ProjectRole.query.find(dict(user_id={'$ne':None}, roles=group._id)):
+            for role in M.ProjectRole.query.find(dict(user_id={'$ne': None}, roles=group._id)):
                 if role.user_id and role.user_id not in user_ids:
-                    role.roles = [ rid for rid in role.roles if rid != group._id ]
-                    M.AuditLog.log('remove user %s from %s', role.user.username, group.name)
+                    role.roles = [
+                        rid for rid in role.roles if rid != group._id]
+                    M.AuditLog.log('remove user %s from %s',
+                                   role.user.username, group.name)
         g.post_event('project_updated')
         redirect('.')
 
@@ -1093,6 +1144,7 @@ class GroupsController(BaseController):
     def _lookup(self, name, *remainder):
         return GroupController(name), remainder
 
+
 class GroupController(BaseController):
 
     def __init__(self, name):
@@ -1132,6 +1184,7 @@ class GroupController(BaseController):
         flash('%s updated' % name)
         redirect('..')
 
+
 class AuditController(BaseController):
 
     @with_trailing_slash
@@ -1146,7 +1199,7 @@ class AuditController(BaseController):
         if count > limit:
             q = q.limit(limit)
         else:
-            limit=count
+            limit = count
         c.widget = W.audit
         return dict(
             entries=q.all(),
@@ -1154,6 +1207,8 @@ class AuditController(BaseController):
             page=page,
             count=count)
 
+
 class AdminAppAdminController(DefaultAdminController):
+
     '''Administer the admin app'''
     pass

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/ext/admin/widgets.py
----------------------------------------------------------------------
diff --git a/Allura/allura/ext/admin/widgets.py b/Allura/allura/ext/admin/widgets.py
index 8eed0be..7457d28 100644
--- a/Allura/allura/ext/admin/widgets.py
+++ b/Allura/allura/ext/admin/widgets.py
@@ -30,6 +30,7 @@ from allura.lib.widgets import form_fields as ffw
 
 from bson import ObjectId
 
+
 class CardField(ew._Jinja2Widget):
     template = 'jinja:allura.ext.admin:templates/admin_widgets/card_field.html'
     sort_key = None
@@ -74,8 +75,10 @@ width: 148px;
     });
 });''')
 
+
 class GroupCard(CardField):
-    new_item=ew.InputField(field_type='text', attrs=dict(placeholder='type a username'))
+    new_item = ew.InputField(
+        field_type='text', attrs=dict(placeholder='type a username'))
     sort_key = 'user.username'
 
     def item_display(self, item):
@@ -87,6 +90,7 @@ class GroupCard(CardField):
     def role_name(self, role_id):
         return M.ProjectRole.query.get(_id=ObjectId(role_id)).name
 
+
 class _GroupSelect(ew.SingleSelectField):
 
     def options(self):
@@ -94,11 +98,12 @@ class _GroupSelect(ew.SingleSelectField):
         anon_role = M.ProjectRole.anonymous()
         options = [
             ew.Option(py_value=role._id, label=role.name)
-            for role in c.project.named_roles ]
+            for role in c.project.named_roles]
         options.append(ew.Option(py_value=auth_role._id, label=auth_role.name))
         options.append(ew.Option(py_value=anon_role._id, label=anon_role.name))
         return options
 
+
 class PermissionCard(CardField):
     new_item = _GroupSelect()
     sort_key = 'name'
@@ -111,7 +116,7 @@ class PermissionCard(CardField):
 
 
 class GroupSettings(ff.CsrfForm):
-    submit_text=None
+    submit_text = None
 
     @property
     def hidden_fields(self):
@@ -126,27 +131,32 @@ class GroupSettings(ff.CsrfForm):
         save = ew.SubmitButton(label='Save')
         delete = ew.SubmitButton(label='Delete Group')
 
+
 class NewGroupSettings(ff.AdminForm):
-    submit_text='Save'
+    submit_text = 'Save'
+
     class fields(ew_core.NameList):
         name = ew.InputField(label='Name')
 
+
 class ScreenshotAdmin(ff.AdminForm):
-    defaults=dict(
+    defaults = dict(
         ff.AdminForm.defaults,
         enctype='multipart/form-data')
 
     @property
     def fields(self):
         fields = [
-            ew.InputField(name='screenshot', field_type='file', label='New Screenshot'),
+            ew.InputField(name='screenshot', field_type='file',
+                          label='New Screenshot'),
             ew.InputField(name='caption', field_type="text", label='Caption')
         ]
         return fields
 
+
 class MetadataAdmin(ff.AdminForm):
     template = 'jinja:allura.ext.admin:templates/admin_widgets/metadata_admin.html'
-    defaults=dict(
+    defaults = dict(
         ff.AdminForm.defaults,
         show_export_control=False,
         enctype='multipart/form-data')
@@ -155,14 +165,14 @@ class MetadataAdmin(ff.AdminForm):
         name = ew.InputField(field_type='text',
                              label='Name',
                              validator=formencode.All(
-                                fev.UnicodeString(not_empty=True, max=40),
-                                V.MaxBytesValidator(max=40)),
+                                 fev.UnicodeString(not_empty=True, max=40),
+                                 V.MaxBytesValidator(max=40)),
                              attrs=dict(maxlength=40,
                                         title="This is the publicly viewable name of the project, and will appear on project listings. It should be what you want to see as the project title in search listing."))
         summary = ew.InputField(field_type="text", label='Short Summary',
                                 validator=formencode.All(
-                                   fev.UnicodeString(max=70),
-                                   V.MaxBytesValidator(max=70)),
+                                    fev.UnicodeString(max=70),
+                                    V.MaxBytesValidator(max=70)),
                                 attrs=dict(maxlength=70))
         short_description = ew.TextArea(label='Full Description',
                                         validator=formencode.All(
@@ -173,24 +183,31 @@ class MetadataAdmin(ff.AdminForm):
         external_homepage = ew.InputField(field_type="text", label='Homepage',
                                           validator=fev.URL(add_http=True))
         support_page = ew.InputField(field_type="text", label='Support Page')
-        support_page_url = ew.InputField(field_type="text", label='Support Page URL',
-                                         validator=fev.URL(add_http=True, if_empty=''))
+        support_page_url = ew.InputField(
+            field_type="text", label='Support Page URL',
+            validator=fev.URL(add_http=True, if_empty=''))
         removal = ew.InputField(field_type="text", label='Removal')
-        moved_to_url = ew.InputField(field_type="text", label='Moved Project to URL',
-                                     validator=fev.URL(add_http=True, if_empty=''))
-        export_controlled = ew.InputField(field_type="text", label='Export Control')
-        export_control_type = ew.InputField(field_type="text", label='Export Control Type')
+        moved_to_url = ew.InputField(
+            field_type="text", label='Moved Project to URL',
+            validator=fev.URL(add_http=True, if_empty=''))
+        export_controlled = ew.InputField(
+            field_type="text", label='Export Control')
+        export_control_type = ew.InputField(
+            field_type="text", label='Export Control Type')
         delete = ew.InputField(field_type="hidden", label='Delete')
         delete_icon = ew.InputField(field_type="hidden", label='Delete Icon')
         undelete = ew.InputField(field_type="hidden", label='Undelete')
-        tracking_id = ew.InputField(field_type="text", label="Analytics Tracking ID")
-        twitter_handle = ew.InputField(field_type="text", label='Twitter Handle')
+        tracking_id = ew.InputField(
+            field_type="text", label="Analytics Tracking ID")
+        twitter_handle = ew.InputField(
+            field_type="text", label='Twitter Handle')
         facebook_page = ew.InputField(field_type="text", label='Facebook page',
                                       validator=fev.URL(add_http=True))
 
+
 class AuditLog(ew_core.Widget):
-    template='jinja:allura.ext.admin:templates/widgets/audit.html'
-    defaults=dict(
+    template = 'jinja:allura.ext.admin:templates/widgets/audit.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         entries=None,
         limit=None,
@@ -198,8 +215,8 @@ class AuditLog(ew_core.Widget):
         count=0)
 
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
 
     def resources(self):
         for f in self.fields:
@@ -209,14 +226,15 @@ class AuditLog(ew_core.Widget):
 
 class BlockUser(ffw.Lightbox):
     defaults = dict(
-            ffw.Lightbox.defaults,
-            name='block-user-modal',
-            trigger='a.block-user',
-            content_template='allura.ext.admin:templates/widgets/block_user.html')
+        ffw.Lightbox.defaults,
+        name='block-user-modal',
+        trigger='a.block-user',
+        content_template='allura.ext.admin:templates/widgets/block_user.html')
+
 
 class BlockList(ffw.Lightbox):
     defaults = dict(
-            ffw.Lightbox.defaults,
-            name='block-list-modal',
-            trigger='a.block-list',
-            content_template='allura.ext.admin:templates/widgets/block_list.html')
+        ffw.Lightbox.defaults,
+        name='block-list-modal',
+        trigger='a.block-list',
+        content_template='allura.ext.admin:templates/widgets/block_list.html')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/ext/project_home/project_main.py
----------------------------------------------------------------------
diff --git a/Allura/allura/ext/project_home/project_main.py b/Allura/allura/ext/project_home/project_main.py
index 76ec1d2..378ca0a 100644
--- a/Allura/allura/ext/project_home/project_main.py
+++ b/Allura/allura/ext/project_home/project_main.py
@@ -35,12 +35,12 @@ log = logging.getLogger(__name__)
 class ProjectHomeApp(Application):
     __version__ = version.__version__
     tool_label = 'home'
-    default_mount_label='Project Home'
+    default_mount_label = 'Project Home'
     max_instances = 0
-    icons={
-        24:'images/home_24.png',
-        32:'images/home_32.png',
-        48:'images/home_48.png'
+    icons = {
+        24: 'images/home_24.png',
+        32: 'images/home_32.png',
+        48: 'images/home_48.png'
     }
 
     def __init__(self, project, config):
@@ -57,20 +57,20 @@ class ProjectHomeApp(Application):
         '''Apps should provide their entries to be added to the main nav
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
         '''
-        return [ SitemapEntry(
-                self.config.options.mount_label,
-                '..')]
+        return [SitemapEntry(
+            self.config.options.mount_label,
+            '..')]
 
     @property
     @h.exceptionless([], log)
     def sitemap(self):
         menu_id = 'Home'
         return [
-            SitemapEntry('Home', '..') ]
+            SitemapEntry('Home', '..')]
 
     @h.exceptionless([], log)
     def sidebar_menu(self):
-        return [ ]
+        return []
 
     def admin_menu(self):
         return []
@@ -81,7 +81,7 @@ class ProjectHomeApp(Application):
         if pr:
             self.config.acl = [
                 model.ACE.allow(pr._id, perm)
-                for perm in self.permissions ]
+                for perm in self.permissions]
 
 
 class ProjectHomeController(BaseController):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/ext/search/search_main.py
----------------------------------------------------------------------
diff --git a/Allura/allura/ext/search/search_main.py b/Allura/allura/ext/search/search_main.py
index 920c29f..99c5dfc 100644
--- a/Allura/allura/ext/search/search_main.py
+++ b/Allura/allura/ext/search/search_main.py
@@ -31,34 +31,38 @@ from allura.controllers import BaseController
 
 log = logging.getLogger(__name__)
 
+
 class SearchApp(Application):
+
     '''This is the HelloWorld application for Allura, showing
     all the rich, creamy goodness that is installable apps.
     '''
     __version__ = version.__version__
     max_instances = 0
     hidden = True
-    sitemap=[]
+    sitemap = []
 
     def __init__(self, project, config):
         Application.__init__(self, project, config)
         self.root = SearchController()
-        self.templates = pkg_resources.resource_filename('allura.ext.search', 'templates')
+        self.templates = pkg_resources.resource_filename(
+            'allura.ext.search', 'templates')
 
-    def main_menu(self): # pragma no cover
+    def main_menu(self):  # pragma no cover
         return []
 
-    def sidebar_menu(self): # pragma no cover
-        return [ ]
+    def sidebar_menu(self):  # pragma no cover
+        return []
 
-    def admin_menu(self): # pragma no cover
+    def admin_menu(self):  # pragma no cover
         return []
 
     def install(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
 
     def uninstall(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
+
 
 class SearchController(BaseController):
 
@@ -70,10 +74,10 @@ class SearchController(BaseController):
         c.search_results = SearchResults()
         c.help_modal = SearchHelp(comments=False)
         pids = [c.project._id] + [
-            p._id for p in c.project.subprojects ]
+            p._id for p in c.project.subprojects]
         project_match = ' OR '.join(
             'project_id_s:%s' % pid
-            for pid in pids )
+            for pid in pids)
         search_params = kw
         search_params.update({
             'q': q,

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/ext/user_profile/user_main.py
----------------------------------------------------------------------
diff --git a/Allura/allura/ext/user_profile/user_main.py b/Allura/allura/ext/user_profile/user_main.py
index a668430..a9ab929 100644
--- a/Allura/allura/ext/user_profile/user_main.py
+++ b/Allura/allura/ext/user_profile/user_main.py
@@ -49,10 +49,10 @@ class UserProfileApp(Application):
     __version__ = version.__version__
     tool_label = 'Profile'
     max_instances = 0
-    icons={
-        24:'images/home_24.png',
-        32:'images/home_32.png',
-        48:'images/home_48.png'
+    icons = {
+        24: 'images/home_24.png',
+        32: 'images/home_32.png',
+        48: 'images/home_48.png'
     }
 
     def __init__(self, user, config):
@@ -81,9 +81,9 @@ class UserProfileApp(Application):
         if pr:
             self.config.acl = [
                 ACE.allow(pr._id, perm)
-                for perm in self.permissions ]
+                for perm in self.permissions]
 
-    def uninstall(self, project): # pragma no cover
+    def uninstall(self, project):  # pragma no cover
         pass
 
 
@@ -99,12 +99,12 @@ class UserProfileController(BaseController, FeedController):
 
         if not (from_user and from_user.get_pref('email_address')):
             flash('In order to send messages, you must have an email address '
-                    'associated with your account.', 'info')
+                  'associated with your account.', 'info')
             redirect(request.referer)
 
         if not (to_user and to_user.get_pref('email_address')):
             flash('This user can not receive messages because they do not have '
-                    'an email address associated with their account.', 'info')
+                  'an email address associated with their account.', 'info')
             redirect(request.referer)
 
         if to_user.get_pref('disable_user_messages'):
@@ -157,11 +157,11 @@ class UserProfileController(BaseController, FeedController):
         if cc:
             cc = c.user.get_pref('email_address')
         if c.user.can_send_user_message():
-            c.user.send_user_message(c.project.user_project_of, subject, message, cc)
+            c.user.send_user_message(
+                c.project.user_project_of, subject, message, cc)
             flash("Message sent.")
         else:
             flash("You can't send more than %i messages per %i seconds" % (
                 c.user.user_message_max_messages,
                 c.user.user_message_time_interval), 'error')
         return redirect(c.project.user_project_of.url())
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/AsciiDammit.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/AsciiDammit.py b/Allura/allura/lib/AsciiDammit.py
index 47236f1..e4ea156 100644
--- a/Allura/allura/lib/AsciiDammit.py
+++ b/Allura/allura/lib/AsciiDammit.py
@@ -26,135 +26,136 @@ import re
 import string
 import types
 
-CHARS = { '\x80' : ('EUR', 'euro'),
-          '\x81' : ' ',
-          '\x82' : (',', 'sbquo'),
-          '\x83' : ('f', 'fnof'),
-          '\x84' : (',,', 'bdquo'),
-          '\x85' : ('...', 'hellip'),
-          '\x86' : ('+', 'dagger'),
-          '\x87' : ('++', 'Dagger'),
-          '\x88' : ('^', 'caret'),
-          '\x89' : '%',
-          '\x8A' : ('S', 'Scaron'),
-          '\x8B' : ('<', 'lt;'),
-          '\x8C' : ('OE', 'OElig'),
-          '\x8D' : '?',
-          '\x8E' : 'Z',
-          '\x8F' : '?',
-          '\x90' : '?',
-          '\x91' : ("'", 'lsquo'),
-          '\x92' : ("'", 'rsquo'),
-          '\x93' : ('"', 'ldquo'),
-          '\x94' : ('"', 'rdquo'),
-          '\x95' : ('*', 'bull'),
-          '\x96' : ('-', 'ndash'),
-          '\x97' : ('--', 'mdash'),
-          '\x98' : ('~', 'tilde'),
-          '\x99' : ('(TM)', 'trade'),
-          '\x9a' : ('s', 'scaron'),
-          '\x9b' : ('>', 'gt'),
-          '\x9c' : ('oe', 'oelig'),
-          '\x9d' : '?',
-          '\x9e' : 'z',
-          '\x9f' : ('Y', 'Yuml'),
-          '\xa0' : (' ', 'nbsp'),
-          '\xa1' : ('!', 'iexcl'),
-          '\xa2' : ('c', 'cent'),
-          '\xa3' : ('GBP', 'pound'),
-          '\xa4' : ('$', 'curren'), #This approximation is especially lame.
-          '\xa5' : ('YEN', 'yen'),
-          '\xa6' : ('|', 'brvbar'),
-          '\xa7' : ('S', 'sect'),
-          '\xa8' : ('..', 'uml'),
-          '\xa9' : ('', 'copy'),
-          '\xaa' : ('(th)', 'ordf'),
-          '\xab' : ('<<', 'laquo'),
-          '\xac' : ('!', 'not'),
-          '\xad' : (' ', 'shy'),
-          '\xae' : ('(R)', 'reg'),
-          '\xaf' : ('-', 'macr'),
-          '\xb0' : ('o', 'deg'),
-          '\xb1' : ('+-', 'plusmm'),
-          '\xb2' : ('2', 'sup2'),
-          '\xb3' : ('3', 'sup3'),
-          '\xb4' : ("'", 'acute'),
-          '\xb5' : ('u', 'micro'),
-          '\xb6' : ('P', 'para'),
-          '\xb7' : ('*', 'middot'),
-          '\xb8' : (',', 'cedil'),
-          '\xb9' : ('1', 'sup1'),
-          '\xba' : ('(th)', 'ordm'),
-          '\xbb' : ('>>', 'raquo'),
-          '\xbc' : ('1/4', 'frac14'),
-          '\xbd' : ('1/2', 'frac12'),
-          '\xbe' : ('3/4', 'frac34'),
-          '\xbf' : ('?', 'iquest'),
-          '\xc0' : ('A', "Agrave"),
-          '\xc1' : ('A', "Aacute"),
-          '\xc2' : ('A', "Acirc"),
-          '\xc3' : ('A', "Atilde"),
-          '\xc4' : ('A', "Auml"),
-          '\xc5' : ('A', "Aring"),
-          '\xc6' : ('AE', "Aelig"),
-          '\xc7' : ('C', "Ccedil"),
-          '\xc8' : ('E', "Egrave"),
-          '\xc9' : ('E', "Eacute"),
-          '\xca' : ('E', "Ecirc"),
-          '\xcb' : ('E', "Euml"),
-          '\xcc' : ('I', "Igrave"),
-          '\xcd' : ('I', "Iacute"),
-          '\xce' : ('I', "Icirc"),
-          '\xcf' : ('I', "Iuml"),
-          '\xd0' : ('D', "Eth"),
-          '\xd1' : ('N', "Ntilde"),
-          '\xd2' : ('O', "Ograve"),
-          '\xd3' : ('O', "Oacute"),
-          '\xd4' : ('O', "Ocirc"),
-          '\xd5' : ('O', "Otilde"),
-          '\xd6' : ('O', "Ouml"),
-          '\xd7' : ('*', "times"),
-          '\xd8' : ('O', "Oslash"),
-          '\xd9' : ('U', "Ugrave"),
-          '\xda' : ('U', "Uacute"),
-          '\xdb' : ('U', "Ucirc"),
-          '\xdc' : ('U', "Uuml"),
-          '\xdd' : ('Y', "Yacute"),
-          '\xde' : ('b', "Thorn"),
-          '\xdf' : ('B', "szlig"),
-          '\xe0' : ('a', "agrave"),
-          '\xe1' : ('a', "aacute"),
-          '\xe2' : ('a', "acirc"),
-          '\xe3' : ('a', "atilde"),
-          '\xe4' : ('a', "auml"),
-          '\xe5' : ('a', "aring"),
-          '\xe6' : ('ae', "aelig"),
-          '\xe7' : ('c', "ccedil"),
-          '\xe8' : ('e', "egrave"),
-          '\xe9' : ('e', "eacute"),
-          '\xea' : ('e', "ecirc"),
-          '\xeb' : ('e', "euml"),
-          '\xec' : ('i', "igrave"),
-          '\xed' : ('i', "iacute"),
-          '\xee' : ('i', "icirc"),
-          '\xef' : ('i', "iuml"),
-          '\xf0' : ('o', "eth"),
-          '\xf1' : ('n', "ntilde"),
-          '\xf2' : ('o', "ograve"),
-          '\xf3' : ('o', "oacute"),
-          '\xf4' : ('o', "ocirc"),
-          '\xf5' : ('o', "otilde"),
-          '\xf6' : ('o', "ouml"),
-          '\xf7' : ('/', "divide"),
-          '\xf8' : ('o', "oslash"),
-          '\xf9' : ('u', "ugrave"),
-          '\xfa' : ('u', "uacute"),
-          '\xfb' : ('u', "ucirc"),
-          '\xfc' : ('u', "uuml"),
-          '\xfd' : ('y', "yacute"),
-          '\xfe' : ('b', "thorn"),
-          '\xff' : ('y', "yuml"),
-          }
+CHARS = {'\x80': ('EUR', 'euro'),
+         '\x81': ' ',
+         '\x82': (',', 'sbquo'),
+         '\x83': ('f', 'fnof'),
+         '\x84': (',,', 'bdquo'),
+         '\x85': ('...', 'hellip'),
+         '\x86': ('+', 'dagger'),
+         '\x87': ('++', 'Dagger'),
+         '\x88': ('^', 'caret'),
+         '\x89': '%',
+         '\x8A': ('S', 'Scaron'),
+         '\x8B': ('<', 'lt;'),
+         '\x8C': ('OE', 'OElig'),
+         '\x8D': '?',
+         '\x8E': 'Z',
+         '\x8F': '?',
+         '\x90': '?',
+         '\x91': ("'", 'lsquo'),
+         '\x92': ("'", 'rsquo'),
+         '\x93': ('"', 'ldquo'),
+         '\x94': ('"', 'rdquo'),
+         '\x95': ('*', 'bull'),
+         '\x96': ('-', 'ndash'),
+         '\x97': ('--', 'mdash'),
+         '\x98': ('~', 'tilde'),
+         '\x99': ('(TM)', 'trade'),
+         '\x9a': ('s', 'scaron'),
+         '\x9b': ('>', 'gt'),
+         '\x9c': ('oe', 'oelig'),
+         '\x9d': '?',
+         '\x9e': 'z',
+         '\x9f': ('Y', 'Yuml'),
+         '\xa0': (' ', 'nbsp'),
+         '\xa1': ('!', 'iexcl'),
+         '\xa2': ('c', 'cent'),
+         '\xa3': ('GBP', 'pound'),
+         '\xa4': ('$', 'curren'),  # This approximation is especially lame.
+         '\xa5': ('YEN', 'yen'),
+         '\xa6': ('|', 'brvbar'),
+         '\xa7': ('S', 'sect'),
+         '\xa8': ('..', 'uml'),
+         '\xa9': ('', 'copy'),
+         '\xaa': ('(th)', 'ordf'),
+         '\xab': ('<<', 'laquo'),
+         '\xac': ('!', 'not'),
+         '\xad': (' ', 'shy'),
+         '\xae': ('(R)', 'reg'),
+         '\xaf': ('-', 'macr'),
+         '\xb0': ('o', 'deg'),
+         '\xb1': ('+-', 'plusmm'),
+         '\xb2': ('2', 'sup2'),
+         '\xb3': ('3', 'sup3'),
+         '\xb4': ("'", 'acute'),
+         '\xb5': ('u', 'micro'),
+         '\xb6': ('P', 'para'),
+         '\xb7': ('*', 'middot'),
+         '\xb8': (',', 'cedil'),
+         '\xb9': ('1', 'sup1'),
+         '\xba': ('(th)', 'ordm'),
+         '\xbb': ('>>', 'raquo'),
+         '\xbc': ('1/4', 'frac14'),
+         '\xbd': ('1/2', 'frac12'),
+         '\xbe': ('3/4', 'frac34'),
+         '\xbf': ('?', 'iquest'),
+         '\xc0': ('A', "Agrave"),
+         '\xc1': ('A', "Aacute"),
+         '\xc2': ('A', "Acirc"),
+         '\xc3': ('A', "Atilde"),
+         '\xc4': ('A', "Auml"),
+         '\xc5': ('A', "Aring"),
+         '\xc6': ('AE', "Aelig"),
+         '\xc7': ('C', "Ccedil"),
+         '\xc8': ('E', "Egrave"),
+         '\xc9': ('E', "Eacute"),
+         '\xca': ('E', "Ecirc"),
+         '\xcb': ('E', "Euml"),
+         '\xcc': ('I', "Igrave"),
+         '\xcd': ('I', "Iacute"),
+         '\xce': ('I', "Icirc"),
+         '\xcf': ('I', "Iuml"),
+         '\xd0': ('D', "Eth"),
+         '\xd1': ('N', "Ntilde"),
+         '\xd2': ('O', "Ograve"),
+         '\xd3': ('O', "Oacute"),
+         '\xd4': ('O', "Ocirc"),
+         '\xd5': ('O', "Otilde"),
+         '\xd6': ('O', "Ouml"),
+         '\xd7': ('*', "times"),
+         '\xd8': ('O', "Oslash"),
+         '\xd9': ('U', "Ugrave"),
+         '\xda': ('U', "Uacute"),
+         '\xdb': ('U', "Ucirc"),
+         '\xdc': ('U', "Uuml"),
+         '\xdd': ('Y', "Yacute"),
+         '\xde': ('b', "Thorn"),
+         '\xdf': ('B', "szlig"),
+         '\xe0': ('a', "agrave"),
+         '\xe1': ('a', "aacute"),
+         '\xe2': ('a', "acirc"),
+         '\xe3': ('a', "atilde"),
+         '\xe4': ('a', "auml"),
+         '\xe5': ('a', "aring"),
+         '\xe6': ('ae', "aelig"),
+         '\xe7': ('c', "ccedil"),
+         '\xe8': ('e', "egrave"),
+         '\xe9': ('e', "eacute"),
+         '\xea': ('e', "ecirc"),
+         '\xeb': ('e', "euml"),
+         '\xec': ('i', "igrave"),
+         '\xed': ('i', "iacute"),
+         '\xee': ('i', "icirc"),
+         '\xef': ('i', "iuml"),
+         '\xf0': ('o', "eth"),
+         '\xf1': ('n', "ntilde"),
+         '\xf2': ('o', "ograve"),
+         '\xf3': ('o', "oacute"),
+         '\xf4': ('o', "ocirc"),
+         '\xf5': ('o', "otilde"),
+         '\xf6': ('o', "ouml"),
+         '\xf7': ('/', "divide"),
+         '\xf8': ('o', "oslash"),
+         '\xf9': ('u', "ugrave"),
+         '\xfa': ('u', "uacute"),
+         '\xfb': ('u', "ucirc"),
+         '\xfc': ('u', "uuml"),
+         '\xfd': ('y', "yacute"),
+         '\xfe': ('b', "thorn"),
+         '\xff': ('y', "yuml"),
+         }
+
 
 def _makeRE(limit):
     """Returns a regular expression object that will match special characters
@@ -163,20 +164,23 @@ def _makeRE(limit):
 ALL = _makeRE('ff')
 ONLY_WINDOWS = _makeRE('9f')
 
+
 def _replHTML(match):
     "Replace the matched character with its HTML equivalent."
     return _repl(match, 1)
 
+
 def _repl(match, html=0):
     "Replace the matched character with its HTML or ASCII equivalent."
     g = match.group(0)
-    a = CHARS.get(g,g)
+    a = CHARS.get(g, g)
     if type(a) == types.TupleType:
         a = a[html]
         if html:
             a = '&' + a + ';'
     return a
 
+
 def _dammit(t, html=0, fixWindowsOnly=0):
     "Turns ISO-Latin-1 into an ASCII representation, dammit."
 
@@ -189,14 +193,17 @@ def _dammit(t, html=0, fixWindowsOnly=0):
 
     return re.sub(r, m, t)
 
+
 def asciiDammit(t, fixWindowsOnly=0):
     "Turns ISO-Latin-1 into a plain ASCII approximation, dammit."
     return _dammit(t, 0, fixWindowsOnly)
 
+
 def htmlDammit(t, fixWindowsOnly=0):
     "Turns ISO-Latin-1 into plain ASCII with HTML codes, dammit."
     return _dammit(t, 1, fixWindowsOnly=fixWindowsOnly)
 
+
 def demoronise(t):
     """Helper method named in honor of the original smart quotes
     remover, The Demoroniser:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/app_globals.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/app_globals.py b/Allura/allura/lib/app_globals.py
index 6c94870..8bc929e 100644
--- a/Allura/allura/lib/app_globals.py
+++ b/Allura/allura/lib/app_globals.py
@@ -52,9 +52,9 @@ from ming.utils import LazyProperty
 import allura.tasks.event_tasks
 from allura import model as M
 from allura.lib.markdown_extensions import (
-        ForgeExtension,
-        CommitMessageExtension,
-        )
+    ForgeExtension,
+    CommitMessageExtension,
+)
 from allura.eventslistener import PostEvent
 
 from allura.lib import gravatar, plugin, utils
@@ -69,9 +69,11 @@ log = logging.getLogger(__name__)
 
 
 class ForgeMarkdown(markdown.Markdown):
+
     def convert(self, source):
         if len(source) > asint(config.get('markdown_render_max_length', 40000)):
-            # if text is too big, markdown can take a long time to process it, so we return it as a plain text
+            # if text is too big, markdown can take a long time to process it,
+            # so we return it as a plain text
             log.info('Text is too big. Skipping markdown processing')
             escaped = cgi.escape(h.really_unicode(source))
             return h.html.literal(u'<pre>%s</pre>' % escaped)
@@ -97,8 +99,9 @@ class ForgeMarkdown(markdown.Markdown):
         cache_field_name = field_name + '_cache'
         cache = getattr(artifact, cache_field_name, None)
         if not cache:
-            log.warn('Skipping Markdown caching - Missing cache field "%s" on class %s',
-                    field_name, artifact.__class__.__name__)
+            log.warn(
+                'Skipping Markdown caching - Missing cache field "%s" on class %s',
+                field_name, artifact.__class__.__name__)
             return self.convert(source_text)
 
         md5 = None
@@ -117,7 +120,7 @@ class ForgeMarkdown(markdown.Markdown):
         except ValueError:
             threshold = None
             log.warn('Skipping Markdown caching - The value for config param '
-                    '"markdown_cache_threshold" must be a float.')
+                     '"markdown_cache_threshold" must be a float.')
 
         if threshold != None and render_time > threshold:
             if md5 is None:
@@ -127,6 +130,7 @@ class ForgeMarkdown(markdown.Markdown):
 
 
 class Globals(object):
+
     """Container for objects available throughout the life of the application.
 
     One instance of Globals is created during application initialization and
@@ -137,8 +141,10 @@ class Globals(object):
 
     def __init__(self):
         self.__dict__ = self.__shared_state
-        if self.__shared_state: return
-        self.allura_templates = pkg_resources.resource_filename('allura', 'templates')
+        if self.__shared_state:
+            return
+        self.allura_templates = pkg_resources.resource_filename(
+            'allura', 'templates')
         # Setup SOLR
         self.solr_server = aslist(config.get('solr.server'), ',')
         # skip empty strings in case of extra commas
@@ -147,11 +153,12 @@ class Globals(object):
         if asbool(config.get('solr.mock')):
             self.solr = self.solr_short_timeout = MockSOLR()
         elif self.solr_server:
-            self.solr = make_solr_from_config(self.solr_server, self.solr_query_server)
+            self.solr = make_solr_from_config(
+                self.solr_server, self.solr_query_server)
             self.solr_short_timeout = make_solr_from_config(
                 self.solr_server, self.solr_query_server,
                 timeout=int(config.get('solr.short_timeout', 10)))
-        else: # pragma no cover
+        else:  # pragma no cover
             self.solr = None
             self.solr_short_timeout = None
         self.use_queue = asbool(config.get('use_queue', False))
@@ -159,7 +166,8 @@ class Globals(object):
         # Load login/logout urls; only used for SFX logins
         self.login_url = config.get('auth.login_url', '/auth/')
         self.logout_url = config.get('auth.logout_url', '/auth/logout')
-        self.login_fragment_url = config.get('auth.login_fragment_url', '/auth/login_fragment')
+        self.login_fragment_url = config.get(
+            'auth.login_fragment_url', '/auth/login_fragment')
 
         # Setup Gravatar
         self.gravatar = gravatar.url
@@ -231,6 +239,7 @@ class Globals(object):
             return d
 
         class entry_point_loading_dict(dict):
+
             def __missing__(self, key):
                 self[key] = _cache_eps(key)
                 return self[key]
@@ -245,7 +254,7 @@ class Globals(object):
             stats=_cache_eps('allura.stats'),
             site_stats=_cache_eps('allura.site_stats'),
             admin=_cache_eps('allura.admin'),
-            )
+        )
 
         # Zarkov logger
         self._zarkov = None
@@ -272,18 +281,25 @@ class Globals(object):
             return activitystream.director()
         else:
             class NullActivityStreamDirector(object):
+
                 def connect(self, *a, **kw):
                     pass
+
                 def disconnect(self, *a, **kw):
                     pass
+
                 def is_connected(self, *a, **kw):
                     return False
+
                 def create_activity(self, *a, **kw):
                     pass
+
                 def create_timeline(self, *a, **kw):
                     pass
+
                 def create_timelines(self, *a, **kw):
                     pass
+
                 def get_timeline(self, *a, **kw):
                     return []
             return NullActivityStreamDirector()
@@ -307,9 +323,9 @@ class Globals(object):
         allura.tasks.event_tasks.event.post(topic, *args, **kwargs)
 
     def zarkov_event(
-        self, event_type,
-        user=None, neighborhood=None, project=None, app=None,
-        extra=None):
+            self, event_type,
+            user=None, neighborhood=None, project=None, app=None,
+            extra=None):
         context = dict(
             user=None,
             neighborhood=None, project=None, tool=None,
@@ -322,7 +338,8 @@ class Globals(object):
         user = user or getattr(c, 'user', None)
         project = project or getattr(c, 'project', None)
         app = app or getattr(c, 'app', None)
-        if user: context['user'] = user.username
+        if user:
+            context['user'] = user.username
         if project:
             context.update(
                 project=project.shortname,
@@ -346,7 +363,7 @@ class Globals(object):
         except Exception, ex:
             self._zarkov = None
             log.error('Error sending zarkov event(%r): %r', ex, dict(
-                    type=event_type, context=context, extra=extra))
+                type=event_type, context=context, extra=extra))
 
     @LazyProperty
     def theme(self):
@@ -380,7 +397,8 @@ class Globals(object):
             if c.user in (None, M.User.anonymous()):
                 try:
                     limit = session['results_per_page']
-                except (KeyError, TypeError):  # TypeError if no session registered for thread
+                # TypeError if no session registered for thread
+                except (KeyError, TypeError):
                     limit = default
             else:
                 limit = c.user.get_pref('results_per_page') or default
@@ -403,26 +421,33 @@ class Globals(object):
             return h.html.literal('<em>Empty file</em>')
         # Don't use line numbers for diff highlight's, as per [#1484]
         if lexer == 'diff':
-            formatter = pygments.formatters.HtmlFormatter(cssclass='codehilite', linenos=False)
+            formatter = pygments.formatters.HtmlFormatter(
+                cssclass='codehilite', linenos=False)
         else:
             formatter = self.pygments_formatter
         if lexer is None:
             try:
-                lexer = pygments.lexers.get_lexer_for_filename(filename, encoding='chardet')
+                lexer = pygments.lexers.get_lexer_for_filename(
+                    filename, encoding='chardet')
             except pygments.util.ClassNotFound:
-                # no highlighting, but we should escape, encode, and wrap it in a <pre>
+                # no highlighting, but we should escape, encode, and wrap it in
+                # a <pre>
                 text = h.really_unicode(text)
                 text = cgi.escape(text)
                 return h.html.literal(u'<pre>' + text + u'</pre>')
         else:
-            lexer = pygments.lexers.get_lexer_by_name(lexer, encoding='chardet')
+            lexer = pygments.lexers.get_lexer_by_name(
+                lexer, encoding='chardet')
         return h.html.literal(pygments.highlight(text, lexer, formatter))
 
     def forge_markdown(self, **kwargs):
         '''return a markdown.Markdown object on which you can call convert'''
         return ForgeMarkdown(
-                extensions=['codehilite', ForgeExtension(**kwargs), 'tables', 'toc', 'nl2br'], # 'fenced_code'
-                output_format='html4')
+            # 'fenced_code'
+            extensions=['codehilite',
+                        ForgeExtension(
+                            **kwargs), 'tables', 'toc', 'nl2br'],
+            output_format='html4')
 
     @property
     def markdown(self):
@@ -444,7 +469,7 @@ class Globals(object):
         """
         app = getattr(c, 'app', None)
         return ForgeMarkdown(extensions=[CommitMessageExtension(app), 'nl2br'],
-                output_format='html4')
+                             output_format='html4')
 
     @property
     def production_mode(self):
@@ -485,7 +510,7 @@ class Globals(object):
             for size in (24, 32, 48):
                 url = self.theme.app_icon_url(tool_name.lower(), size)
                 css += '.ui-icon-tool-%s-%i {background: url(%s) no-repeat;}\n' % (
-                        tool_name, size, url)
+                    tool_name, size, url)
         return css, hashlib.md5(css).hexdigest()
 
     @property
@@ -580,10 +605,13 @@ class Globals(object):
     def year(self):
         return datetime.datetime.utcnow().year
 
+
 class Icon(object):
+
     def __init__(self, char, css):
         self.char = char
         self.css = css
 
+
 def connect_amqp(config):
     return

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/async.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/async.py b/Allura/allura/lib/async.py
index 6e03d6c..d299160 100644
--- a/Allura/allura/lib/async.py
+++ b/Allura/allura/lib/async.py
@@ -20,6 +20,7 @@ from Queue import Queue
 
 log = logging.getLogger(__name__)
 
+
 class Connection(object):
 
     def __init__(self, hostname, port, userid, password, vhost):
@@ -37,6 +38,7 @@ class Connection(object):
         self._conn = self._connection_pool.acquire()
         self.queue = self._conn.SimpleQueue('task')
 
+
 class MockAMQ(object):
 
     def __init__(self, globals):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/base.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/base.py b/Allura/allura/lib/base.py
index 409b77c..bac6a2b 100644
--- a/Allura/allura/lib/base.py
+++ b/Allura/allura/lib/base.py
@@ -24,7 +24,9 @@ from tg import TGController, config
 
 __all__ = ['WsgiDispatchController']
 
+
 class WsgiDispatchController(TGController):
+
     """
     Base class for the controllers in the application.
 
@@ -43,13 +45,15 @@ class WsgiDispatchController(TGController):
     def __call__(self, environ, start_response):
         try:
             self._setup_request()
-            response = super(WsgiDispatchController, self).__call__(environ, start_response)
+            response = super(WsgiDispatchController, self).__call__(
+                environ, start_response)
             return self.cleanup_iterator(response)
         except exc.HTTPException, err:
             return err(environ, start_response)
 
     def cleanup_iterator(self, response):
-        for chunk in response: yield chunk
+        for chunk in response:
+            yield chunk
         self._cleanup_request()
 
     def _get_dispatchable(self, url_path):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/custom_middleware.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/custom_middleware.py b/Allura/allura/lib/custom_middleware.py
index 940c534..9cb20da 100644
--- a/Allura/allura/lib/custom_middleware.py
+++ b/Allura/allura/lib/custom_middleware.py
@@ -38,13 +38,15 @@ log = logging.getLogger(__name__)
 
 tool_entry_points = list(h.iter_entry_points('allura'))
 
+
 class StaticFilesMiddleware(object):
+
     '''Custom static file middleware
 
     Map everything in allura/public/nf/* to <script_name>/*
     For each plugin, map everything <module>/nf/<ep_name>/* to <script_name>/<ep_name>/*
     '''
-    CACHE_MAX_AGE=60*60*24*365
+    CACHE_MAX_AGE = 60 * 60 * 24 * 365
 
     def __init__(self, app, script_name=''):
         self.app = app
@@ -74,16 +76,18 @@ class StaticFilesMiddleware(object):
                         ep.name.lower(),
                         filename))
                 return fileapp.FileApp(file_path, [
-                        ('Access-Control-Allow-Origin', '*')])
+                    ('Access-Control-Allow-Origin', '*')])
         filename = environ['PATH_INFO'][len(self.script_name):]
         file_path = pkg_resources.resource_filename(
             'allura', os.path.join(
                 'public', 'nf',
                 filename))
         return fileapp.FileApp(file_path, [
-                ('Access-Control-Allow-Origin', '*')])
+            ('Access-Control-Allow-Origin', '*')])
+
 
 class LoginRedirectMiddleware(object):
+
     '''Actually converts a 401 into a 302 so we can do a redirect to a different
     app for login.  (StatusCodeRedirect does a WSGI-only redirect which cannot
     go to a URL not managed by the WSGI stack).'''
@@ -110,13 +114,16 @@ class LoginRedirectMiddleware(object):
         start_response(status, headers, exc_info)
         return app_iter
 
+
 class CSRFMiddleware(object):
+
     '''On POSTs, looks for a special field name that matches the value of a given
     cookie.  If this field is missing, the cookies are cleared to anonymize the
     request.'''
 
     def __init__(self, app, cookie_name, param_name=None):
-        if param_name is None: param_name = cookie_name
+        if param_name is None:
+            param_name = cookie_name
         self._app = app
         self._param_name = param_name
         self._cookie_name = cookie_name
@@ -131,7 +138,8 @@ class CSRFMiddleware(object):
             if cookie != param:
                 log.warning('CSRF attempt detected, %r != %r', cookie, param)
                 environ.pop('HTTP_COOKIE', None)
-        def session_start_response(status, headers, exc_info = None):
+
+        def session_start_response(status, headers, exc_info=None):
             if dict(headers).get('Content-Type', '').startswith('text/html'):
                 headers.append(
                     ('Set-cookie',
@@ -139,7 +147,9 @@ class CSRFMiddleware(object):
             return start_response(status, headers, exc_info)
         return self._app(environ, session_start_response)
 
+
 class SSLMiddleware(object):
+
     'Verify the https/http schema is correct'
 
     def __init__(self, app, no_redirect_pattern=None, force_ssl_pattern=None):
@@ -168,8 +178,10 @@ class SSLMiddleware(object):
         # This SFUSER check is SourceForge-specific (to require all logged-in users to use https)
         # BUT has the additional affect of not forcing SSL for regular Allura instances
         # This is important for local development, at least.  When we remove SFUSER (perhaps by requiring SSL everywhere),
-        # we can use `no_redirect.pattern = .` for local development to work without SSL
-        force_ssl = req.cookies.get('SFUSER') or self._force_ssl_re.match(environ['PATH_INFO'])
+        # we can use `no_redirect.pattern = .` for local development to work
+        # without SSL
+        force_ssl = req.cookies.get(
+            'SFUSER') or self._force_ssl_re.match(environ['PATH_INFO'])
         if not secure and force_ssl:
             resp = exc.HTTPFound(location='https://' + srv_path)
         elif secure and not force_ssl:
@@ -179,7 +191,9 @@ class SSLMiddleware(object):
             resp = self.app
         return resp(environ, start_response)
 
+
 class AlluraTimerMiddleware(TimerMiddleware):
+
     def timers(self):
         import genshi
         import jinja2
@@ -191,42 +205,50 @@ class AlluraTimerMiddleware(TimerMiddleware):
         import activitystream
 
         return self.entry_point_timers() + [
-            Timer('activitystream.director.{method_name}', allura.model.timeline.Director,
+            Timer(
+                'activitystream.director.{method_name}', allura.model.timeline.Director,
                 'create_activity', 'create_timeline', 'get_timeline'),
-            Timer('activitystream.aggregator.{method_name}', allura.model.timeline.Aggregator, '*'),
-            Timer('activitystream.node_manager.{method_name}', activitystream.managers.NodeManager, '*'),
-            Timer('activitystream.activity_manager.{method_name}', activitystream.managers.ActivityManager, '*'),
+            Timer('activitystream.aggregator.{method_name}',
+                  allura.model.timeline.Aggregator, '*'),
+            Timer('activitystream.node_manager.{method_name}',
+                  activitystream.managers.NodeManager, '*'),
+            Timer('activitystream.activity_manager.{method_name}',
+                  activitystream.managers.ActivityManager, '*'),
             Timer('jinja', jinja2.Template, 'render', 'stream', 'generate'),
             Timer('markdown', markdown.Markdown, 'convert'),
             Timer('ming', ming.odm.odmsession.ODMCursor, 'next',  # FIXME: this may captures timings ok, but is misleading for counts
                   debug_each_call=False),
             Timer('ming', ming.odm.odmsession.ODMSession, 'flush', 'find'),
             Timer('ming', ming.schema.Document, 'validate',
-                debug_each_call=False),
+                  debug_each_call=False),
             Timer('ming', ming.schema.FancySchemaItem, '_validate_required',
-                '_validate_fast_missing', '_validate_optional',
-                debug_each_call=False),
+                  '_validate_fast_missing', '_validate_optional',
+                  debug_each_call=False),
             Timer('mongo', pymongo.collection.Collection, 'count', 'find',
-                'find_one'),
+                  'find_one'),
             Timer('mongo', pymongo.cursor.Cursor, 'count', 'distinct',
-                '_refresh'),
+                  '_refresh'),
             # urlopen and socket io may or may not overlap partially
             Timer('render', genshi.Stream, 'render'),
             Timer('repo.Blob.{method_name}', allura.model.repo.Blob, '*'),
             Timer('repo.Commit.{method_name}', allura.model.repo.Commit, '*'),
-            Timer('repo.LastCommit.{method_name}', allura.model.repo.LastCommit, '*'),
+            Timer('repo.LastCommit.{method_name}',
+                  allura.model.repo.LastCommit, '*'),
             Timer('repo.Tree.{method_name}', allura.model.repo.Tree, '*'),
             Timer('socket_read', socket._fileobject, 'read', 'readline',
-                'readlines', debug_each_call=False),
+                  'readlines', debug_each_call=False),
             Timer('socket_write', socket._fileobject, 'write', 'writelines',
-                'flush', debug_each_call=False),
+                  'flush', debug_each_call=False),
             Timer('solr', pysolr.Solr, 'add', 'delete', 'search', 'commit'),
             Timer('template', genshi.template.Template, '_prepare', '_parse',
-                'generate'),
+                  'generate'),
             Timer('urlopen', urllib2, 'urlopen'),
-            Timer('base_repo_tool.{method_name}', allura.model.repository.RepositoryImplementation, 'last_commit_ids'),
+            Timer('base_repo_tool.{method_name}',
+                  allura.model.repository.RepositoryImplementation, 'last_commit_ids'),
             Timer('_diffs_copied', allura.model.repo.Commit, '_diffs_copied'),
-            Timer('sequencematcher.{method_name}', allura.model.repo.SequenceMatcher, 'ratio', 'quick_ratio', 'real_quick_ratio'),
+            Timer(
+                'sequencematcher.{method_name}', allura.model.repo.SequenceMatcher,
+                'ratio', 'quick_ratio', 'real_quick_ratio'),
             Timer('unified_diff', allura.model.repo, 'unified_diff'),
         ] + [Timer('sidebar', ep.load(), 'sidebar_menu') for ep in tool_entry_points]
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/decorators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/decorators.py b/Allura/allura/lib/decorators.py
index 8604a92..28ad5bf 100644
--- a/Allura/allura/lib/decorators.py
+++ b/Allura/allura/lib/decorators.py
@@ -56,7 +56,7 @@ def task(*args, **kw):
             delay = kwargs.pop('delay', 0)
             project = getattr(c, 'project', None)
             cm = (h.notifications_disabled if project and
-                    kw.get('notifications_disabled') else h.null_contextmanager)
+                  kw.get('notifications_disabled') else h.null_contextmanager)
             with cm(project):
                 from allura import model as M
                 return M.MonQTask.post(func, args, kwargs, delay=delay)
@@ -68,7 +68,9 @@ def task(*args, **kw):
         return task_(args[0])
     return task_
 
+
 class event_handler(object):
+
     '''Decorator to register event handlers'''
     listeners = defaultdict(set)
 
@@ -80,6 +82,7 @@ class event_handler(object):
             self.listeners[t].add(func)
         return func
 
+
 class require_post(object):
 
     def __init__(self, redir=None):
@@ -90,18 +93,20 @@ class require_post(object):
             if request.method != 'POST':
                 if self.redir is not None:
                     redirect(self.redir)
-                raise exc.HTTPMethodNotAllowed(headers={'Allow':'POST'})
+                raise exc.HTTPMethodNotAllowed(headers={'Allow': 'POST'})
         before_validate(check_method)(func)
         return func
 
-class log_action(object): # pragma no cover
+
+class log_action(object):  # pragma no cover
 
     def __init__(self,
                  logger=None,
                  level=logging.INFO,
                  msg=None,
                  *args, **kwargs):
-        if logger is None: logger = logging
+        if logger is None:
+            logger = logging
         self._logger = logger
         self._level = level
         self._msg = msg
@@ -119,7 +124,7 @@ class log_action(object): # pragma no cover
         self._extra_proto.update(action=func.__name__)
         if self._msg is None:
             self._msg = func.__name__
-        result = lambda *args,**kwargs: self._wrapper(*args,**kwargs)
+        result = lambda *args, **kwargs: self._wrapper(*args, **kwargs)
         # assert not hasattr(func, 'decoration')
         if hasattr(func, 'decoration'):
             result.decoration = func.decoration
@@ -166,9 +171,9 @@ class log_action(object): # pragma no cover
                          user_id=user.id)
         # Save the project info
         if (result
-            and isinstance(result, dict)
-            and 'p' in result
-            and result['p'] is not None):
+                and isinstance(result, dict)
+                and 'p' in result
+                and result['p'] is not None):
             extra.update(
                 source=result['p']['source'],
                 project_name=result['p']['shortname'],
@@ -184,16 +189,18 @@ class log_action(object): # pragma no cover
         extra['referer_link'] = referer_link
         return extra
 
+
 def Property(function):
     '''Decorator to easily assign descriptors based on sub-function names
     See <http://code.activestate.com/recipes/410698-property-decorator-for-python-24/>
     '''
     keys = 'fget', 'fset', 'fdel'
-    func_locals = {'doc':function.__doc__}
+    func_locals = {'doc': function.__doc__}
+
     def probeFunc(frame, event, arg):
         if event == 'return':
             locals = frame.f_locals
-            func_locals.update(dict((k,locals.get(k)) for k in keys))
+            func_locals.update(dict((k, locals.get(k)) for k in keys))
             sys.settrace(None)
         return probeFunc
     sys.settrace(probeFunc)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/exceptions.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/exceptions.py b/Allura/allura/lib/exceptions.py
index 7611dd7..8a33658 100644
--- a/Allura/allura/lib/exceptions.py
+++ b/Allura/allura/lib/exceptions.py
@@ -17,38 +17,77 @@
 
 from formencode import Invalid
 
-class ForgeError(Exception): pass
+
+class ForgeError(Exception):
+    pass
+
 
 class ProjectConflict(ForgeError, Invalid):
 
-    # support the single string constructor in addition to full set of params that Invalid.__init__ requires
+    # support the single string constructor in addition to full set of params
+    # that Invalid.__init__ requires
     def __init__(self, msg, value=None, state=None, error_list=None, error_dict=None):
-        super(ProjectConflict, self).__init__(msg, value, state, error_list, error_dict)
+        super(ProjectConflict, self).__init__(
+            msg, value, state, error_list, error_dict)
+
+
+class ProjectShortnameInvalid(ForgeError, Invalid):
+    pass
+
+
+class ProjectOverlimitError(ForgeError):
+    pass
+
+
+class ProjectRatelimitError(ForgeError):
+    pass
+
+
+class ToolError(ForgeError):
+    pass
+
+
+class NoSuchProjectError(ForgeError):
+    pass
+
 
+class NoSuchNeighborhoodError(ForgeError):
+    pass
+
+
+class NoSuchGlobalsError(ForgeError):
+    pass
+
+
+class MailError(ForgeError):
+    pass
+
+
+class AddressException(MailError):
+    pass
+
+
+class NoSuchNBFeatureError(ForgeError):
+    pass
+
+
+class InvalidNBFeatureValueError(ForgeError):
+    pass
 
-class ProjectShortnameInvalid(ForgeError, Invalid): pass
-class ProjectOverlimitError(ForgeError): pass
-class ProjectRatelimitError(ForgeError): pass
-class ToolError(ForgeError): pass
-class NoSuchProjectError(ForgeError): pass
-class NoSuchNeighborhoodError(ForgeError): pass
-class NoSuchGlobalsError(ForgeError): pass
-class MailError(ForgeError): pass
-class AddressException(MailError): pass
-class NoSuchNBFeatureError(ForgeError): pass
-class InvalidNBFeatureValueError(ForgeError): pass
 
 class CompoundError(ForgeError):
+
     def __repr__(self):
-        return '<%s>\n%s\n</%s>'  % (
+        return '<%s>\n%s\n</%s>' % (
             self.__class__.__name__,
             '\n'.join(map(repr, self.args)),
             self.__class__.__name__)
+
     def format_error(self):
         import traceback
-        parts = [ '<%s>\n' % self.__class__.__name__ ]
-        for tp,val,tb in self.args:
-            for line in traceback.format_exception(tp,val,tb):
+        parts = ['<%s>\n' % self.__class__.__name__]
+        for tp, val, tb in self.args:
+            for line in traceback.format_exception(tp, val, tb):
                 parts.append('    ' + line)
-        parts.append('</%s>\n' % self.__class__.__name__ )
+        parts.append('</%s>\n' % self.__class__.__name__)
         return ''.join(parts)


[32/36] git commit: PEP8 cleanup

Posted by jo...@apache.org.
PEP8 cleanup

Signed-off-by: Tim Van Steenburgh <tv...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/c93733ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/c93733ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/c93733ac

Branch: refs/heads/cj/6484
Commit: c93733acb492b4c9238ac8ec32136bb95f1a87d9
Parents: 37ecc5e
Author: Tim Van Steenburgh <tv...@gmail.com>
Authored: Fri Jan 10 18:18:51 2014 +0000
Committer: Tim Van Steenburgh <tv...@gmail.com>
Committed: Fri Jan 10 18:18:51 2014 +0000

----------------------------------------------------------------------
 Allura/allura/app.py                            |   97 +-
 Allura/allura/command/base.py                   |   21 +-
 Allura/allura/command/create_neighborhood.py    |   23 +-
 .../allura/command/create_trove_categories.py   | 2770 ++++++++++++------
 Allura/allura/command/reclone_repo.py           |   14 +-
 Allura/allura/command/script.py                 |   17 +-
 .../allura/command/set_neighborhood_features.py |   31 +-
 Allura/allura/command/show_models.py            |  101 +-
 Allura/allura/command/smtp_server.py            |    6 +-
 Allura/allura/command/taskd.py                  |   40 +-
 Allura/allura/command/taskd_cleanup.py          |   52 +-
 Allura/allura/config/app_cfg.py                 |    3 +-
 Allura/allura/config/environment.py             |    3 +-
 Allura/allura/config/middleware.py              |   37 +-
 Allura/allura/config/resources.py               |    1 +
 Allura/allura/controllers/attachments.py        |   12 +-
 Allura/allura/controllers/auth.py               |  146 +-
 Allura/allura/controllers/base.py               |    2 +
 .../allura/controllers/basetest_project_root.py |   30 +-
 Allura/allura/controllers/discuss.py            |  147 +-
 Allura/allura/controllers/feed.py               |   11 +-
 Allura/allura/controllers/project.py            |  203 +-
 Allura/allura/controllers/repository.py         |  136 +-
 Allura/allura/controllers/rest.py               |   44 +-
 Allura/allura/controllers/root.py               |   23 +-
 Allura/allura/controllers/search.py             |   34 +-
 Allura/allura/controllers/site_admin.py         |   81 +-
 Allura/allura/controllers/static.py             |    2 +-
 Allura/allura/controllers/task.py               |    4 +-
 Allura/allura/controllers/template.py           |    3 +-
 Allura/allura/controllers/trovecategories.py    |   66 +-
 Allura/allura/eventslistener.py                 |   10 +-
 Allura/allura/ext/admin/admin_main.py           |  279 +-
 Allura/allura/ext/admin/widgets.py              |   80 +-
 Allura/allura/ext/project_home/project_main.py  |   22 +-
 Allura/allura/ext/search/search_main.py         |   24 +-
 Allura/allura/ext/user_profile/user_main.py     |   20 +-
 Allura/allura/lib/AsciiDammit.py                |  267 +-
 Allura/allura/lib/app_globals.py                |   82 +-
 Allura/allura/lib/async.py                      |    2 +
 Allura/allura/lib/base.py                       |    8 +-
 Allura/allura/lib/custom_middleware.py          |   66 +-
 Allura/allura/lib/decorators.py                 |   27 +-
 Allura/allura/lib/exceptions.py                 |   77 +-
 Allura/allura/lib/gravatar.py                   |   12 +-
 Allura/allura/lib/helpers.py                    |  166 +-
 Allura/allura/lib/macro.py                      |  137 +-
 Allura/allura/lib/mail_util.py                  |   45 +-
 Allura/allura/lib/markdown_extensions.py        |  123 +-
 Allura/allura/lib/oid_helper.py                 |   13 +-
 Allura/allura/lib/package_path_loader.py        |   24 +-
 Allura/allura/lib/patches.py                    |   20 +-
 Allura/allura/lib/plugin.py                     |  182 +-
 Allura/allura/lib/repository.py                 |   99 +-
 Allura/allura/lib/rest_api.py                   |   56 +-
 Allura/allura/lib/search.py                     |   42 +-
 Allura/allura/lib/security.py                   |   78 +-
 Allura/allura/lib/solr.py                       |    5 +-
 Allura/allura/lib/spam/__init__.py              |    2 +
 Allura/allura/lib/spam/akismetfilter.py         |   18 +-
 Allura/allura/lib/spam/mollomfilter.py          |    7 +-
 Allura/allura/lib/stats.py                      |    9 +-
 Allura/allura/lib/utils.py                      |   94 +-
 Allura/allura/lib/validators.py                 |   91 +-
 Allura/allura/lib/widgets/analytics.py          |    5 +-
 Allura/allura/lib/widgets/auth_widgets.py       |   19 +-
 Allura/allura/lib/widgets/discuss.py            |  213 +-
 Allura/allura/lib/widgets/form_fields.py        |  165 +-
 Allura/allura/lib/widgets/forms.py              |  274 +-
 Allura/allura/lib/widgets/macros.py             |   33 +-
 Allura/allura/lib/widgets/oauth_widgets.py      |   20 +-
 Allura/allura/lib/widgets/project_list.py       |   19 +-
 Allura/allura/lib/widgets/repo.py               |   39 +-
 Allura/allura/lib/widgets/search.py             |   14 +-
 Allura/allura/lib/widgets/subscriptions.py      |   50 +-
 Allura/allura/lib/widgets/user_profile.py       |    3 +-
 Allura/allura/lib/zarkov_helpers.py             |   12 +-
 Allura/allura/model/artifact.py                 |  186 +-
 Allura/allura/model/attachments.py              |   19 +-
 Allura/allura/model/auth.py                     |  332 ++-
 Allura/allura/model/discuss.py                  |   87 +-
 Allura/allura/model/filesystem.py               |   53 +-
 Allura/allura/model/index.py                    |   71 +-
 Allura/allura/model/monq_model.py               |   46 +-
 Allura/allura/model/neighborhood.py             |   77 +-
 Allura/allura/model/notification.py             |  265 +-
 Allura/allura/model/oauth.py                    |   47 +-
 Allura/allura/model/openid_model.py             |   28 +-
 Allura/allura/model/project.py                  |  322 +-
 Allura/allura/model/repo.py                     |  168 +-
 Allura/allura/model/repo_refresh.py             |  208 +-
 Allura/allura/model/repository.py               |  217 +-
 Allura/allura/model/session.py                  |   27 +-
 Allura/allura/model/stats.py                    |  184 +-
 Allura/allura/model/timeline.py                 |   14 +-
 Allura/allura/model/types.py                    |   10 +-
 Allura/allura/scripts/refresh_last_commits.py   |   78 +-
 Allura/allura/scripts/refreshrepo.py            |  121 +-
 Allura/allura/scripts/scripttask.py             |    4 +-
 Allura/allura/scripts/trac_export.py            |   60 +-
 Allura/allura/scripts/update_checkout_url.py    |    5 +-
 Allura/allura/tasks/admin_tasks.py              |    4 +-
 Allura/allura/tasks/event_tasks.py              |    2 +-
 Allura/allura/tasks/export_tasks.py             |   21 +-
 Allura/allura/tasks/index_tasks.py              |   11 +-
 Allura/allura/tasks/mail_tasks.py               |   46 +-
 Allura/allura/tasks/notification_tasks.py       |    1 +
 Allura/allura/tasks/repo_tasks.py               |   45 +-
 Allura/allura/tests/__init__.py                 |    2 +
 Allura/allura/tests/decorators.py               |   10 +-
 Allura/allura/tests/functional/__init__.py      |    2 +-
 Allura/allura/tests/functional/test_admin.py    |  764 ++---
 Allura/allura/tests/functional/test_auth.py     |  854 +++---
 Allura/allura/tests/functional/test_discuss.py  |  134 +-
 Allura/allura/tests/functional/test_feeds.py    |   29 +-
 Allura/allura/tests/functional/test_gravatar.py |    3 +-
 Allura/allura/tests/functional/test_home.py     |   23 +-
 .../tests/functional/test_neighborhood.py       |  297 +-
 Allura/allura/tests/functional/test_rest.py     |   76 +-
 .../tests/functional/test_rest_api_tickets.py   |    9 +-
 Allura/allura/tests/functional/test_root.py     |   82 +-
 Allura/allura/tests/functional/test_search.py   |    1 -
 .../allura/tests/functional/test_site_admin.py  |   32 +-
 Allura/allura/tests/functional/test_static.py   |    1 +
 .../tests/functional/test_user_profile.py       |   45 +-
 Allura/allura/tests/model/test_artifact.py      |   35 +-
 Allura/allura/tests/model/test_auth.py          |   57 +-
 Allura/allura/tests/model/test_discussion.py    |   67 +-
 Allura/allura/tests/model/test_filesystem.py    |   29 +-
 Allura/allura/tests/model/test_monq.py          |    4 +-
 Allura/allura/tests/model/test_neighborhood.py  |    5 +-
 Allura/allura/tests/model/test_notification.py  |  189 +-
 Allura/allura/tests/model/test_openid.py        |   17 +-
 Allura/allura/tests/model/test_project.py       |   17 +-
 Allura/allura/tests/model/test_repo.py          |  411 +--
 Allura/allura/tests/test_app.py                 |   15 +-
 Allura/allura/tests/test_commands.py            |  152 +-
 Allura/allura/tests/test_decorators.py          |    1 +
 Allura/allura/tests/test_diff.py                |    4 +-
 Allura/allura/tests/test_dispatch.py            |    6 +-
 Allura/allura/tests/test_globals.py             |  149 +-
 Allura/allura/tests/test_helpers.py             |  125 +-
 Allura/allura/tests/test_mail_util.py           |   69 +-
 Allura/allura/tests/test_markdown.py            |   37 +-
 Allura/allura/tests/test_plugin.py              |   24 +-
 Allura/allura/tests/test_scripttask.py          |    2 +
 Allura/allura/tests/test_security.py            |   59 +-
 Allura/allura/tests/test_tasks.py               |   78 +-
 Allura/allura/tests/test_utils.py               |   24 +-
 Allura/allura/tests/test_validators.py          |   14 +-
 Allura/allura/tests/test_zarkov_helpers.py      |   26 +-
 Allura/allura/tests/unit/__init__.py            |    1 +
 .../test_discussion_moderation_controller.py    |   10 +-
 Allura/allura/tests/unit/factories.py           |    3 +-
 Allura/allura/tests/unit/patches.py             |    1 -
 Allura/allura/tests/unit/spam/test_akismet.py   |   46 +-
 Allura/allura/tests/unit/spam/test_mollom.py    |   30 +-
 .../allura/tests/unit/spam/test_spam_filter.py  |    2 +
 Allura/allura/tests/unit/test_app.py            |    1 -
 .../allura/tests/unit/test_helpers/test_ago.py  |    8 +-
 .../tests/unit/test_helpers/test_set_context.py |   13 +-
 Allura/allura/tests/unit/test_mixins.py         |    4 +-
 .../tests/unit/test_package_path_loader.py      |  130 +-
 Allura/allura/tests/unit/test_project.py        |   12 +-
 Allura/allura/tests/unit/test_repo.py           |   45 +-
 Allura/allura/tests/unit/test_session.py        |    9 +-
 Allura/allura/tests/unit/test_sitemapentry.py   |    1 +
 Allura/allura/tests/unit/test_solr.py           |   33 +-
 Allura/allura/websetup/__init__.py              |    1 +
 Allura/allura/websetup/bootstrap.py             |  101 +-
 Allura/allura/websetup/schema.py                |    5 +-
 Allura/docs/conf.py                             |   18 +-
 Allura/ez_setup/__init__.py                     |   67 +-
 Allura/ldap-setup.py                            |   25 +-
 Allura/ldap-userconfig.py                       |    3 +
 Allura/setup.py                                 |   16 +-
 Allura/test-light.py                            |    7 +-
 AlluraTest/alluratest/controller.py             |   37 +-
 AlluraTest/alluratest/test_syntax.py            |   26 +-
 AlluraTest/alluratest/validation.py             |   34 +-
 AlluraTest/setup.py                             |    8 +-
 ForgeActivity/forgeactivity/config/resources.py |    1 +
 ForgeActivity/forgeactivity/main.py             |   79 +-
 .../forgeactivity/tests/functional/test_root.py |   59 +-
 ForgeActivity/forgeactivity/widgets/follow.py   |    4 +-
 ForgeActivity/setup.py                          |    3 +-
 ForgeBlog/forgeblog/command/base.py             |    1 +
 ForgeBlog/forgeblog/command/rssfeeds.py         |   37 +-
 ForgeBlog/forgeblog/main.py                     |   69 +-
 ForgeBlog/forgeblog/model/blog.py               |   74 +-
 .../forgeblog/tests/functional/test_rest.py     |   57 +-
 .../forgeblog/tests/functional/test_root.py     |   11 +-
 ForgeBlog/forgeblog/tests/test_app.py           |    9 +-
 ForgeBlog/forgeblog/tests/test_commands.py      |   53 +-
 ForgeBlog/forgeblog/tests/test_roles.py         |    3 +
 ForgeBlog/forgeblog/tests/unit/__init__.py      |   10 +-
 .../forgeblog/tests/unit/test_blog_post.py      |    7 +-
 ForgeBlog/forgeblog/widgets.py                  |   31 +-
 ForgeBlog/setup.py                              |    6 +-
 ForgeChat/forgechat/command.py                  |   38 +-
 ForgeChat/forgechat/main.py                     |   46 +-
 ForgeChat/forgechat/model/chat.py               |   15 +-
 ForgeChat/setup.py                              |    6 +-
 .../forgediscussion/controllers/forum.py        |   42 +-
 .../forgediscussion/controllers/root.py         |   64 +-
 ForgeDiscussion/forgediscussion/forum_main.py   |  106 +-
 .../forgediscussion/import_support.py           |   44 +-
 ForgeDiscussion/forgediscussion/model/forum.py  |   73 +-
 ForgeDiscussion/forgediscussion/tasks.py        |    2 +
 .../tests/functional/test_forum.py              |  329 ++-
 .../tests/functional/test_forum_admin.py        |  168 +-
 .../tests/functional/test_import.py             |   43 +-
 .../tests/functional/test_rest.py               |   21 +-
 .../forgediscussion/tests/test_app.py           |   14 +-
 .../forgediscussion/tests/test_forum_roles.py   |    3 +
 ForgeDiscussion/forgediscussion/utils.py        |   30 +-
 .../forgediscussion/widgets/admin.py            |   40 +-
 .../forgediscussion/widgets/forum_widgets.py    |  167 +-
 ForgeDiscussion/setup.py                        |    6 +-
 ForgeGit/forgegit/git_main.py                   |   26 +-
 ForgeGit/forgegit/model/git_repo.py             |  162 +-
 ForgeGit/forgegit/tests/__init__.py             |    2 +-
 ForgeGit/forgegit/tests/functional/test_auth.py |    7 +-
 .../tests/functional/test_controllers.py        |  135 +-
 .../forgegit/tests/model/test_repository.py     |  242 +-
 ForgeGit/forgegit/tests/test_git_app.py         |    1 +
 ForgeGit/forgegit/tests/test_tasks.py           |    4 +-
 ForgeGit/setup.py                               |    6 +-
 ForgeImporters/docs/conf.py                     |   18 +-
 ForgeImporters/forgeimporters/base.py           |  110 +-
 ForgeImporters/forgeimporters/forge/__init__.py |    1 -
 ForgeImporters/forgeimporters/forge/tracker.py  |  146 +-
 .../forgeimporters/github/__init__.py           |   33 +-
 ForgeImporters/forgeimporters/github/code.py    |   48 +-
 ForgeImporters/forgeimporters/github/project.py |   10 +-
 .../forgeimporters/github/tests/test_code.py    |   62 +-
 .../forgeimporters/github/tests/test_tracker.py |   18 +-
 .../forgeimporters/github/tests/test_wiki.py    |   82 +-
 ForgeImporters/forgeimporters/github/tracker.py |   98 +-
 ForgeImporters/forgeimporters/github/utils.py   |    8 +-
 ForgeImporters/forgeimporters/github/wiki.py    |   95 +-
 .../forgeimporters/google/__init__.py           |  140 +-
 ForgeImporters/forgeimporters/google/code.py    |   68 +-
 ForgeImporters/forgeimporters/google/project.py |    1 +
 .../forgeimporters/google/tests/__init__.py     |    1 -
 .../forgeimporters/google/tests/test_code.py    |   72 +-
 ForgeImporters/forgeimporters/google/tracker.py |  128 +-
 .../forgeimporters/tests/forge/__init__.py      |    1 -
 .../forgeimporters/tests/forge/test_tracker.py  |  367 +--
 .../forgeimporters/tests/github/__init__.py     |    1 -
 .../tests/github/functional/__init__.py         |    1 -
 .../tests/github/functional/test_github.py      |   13 +-
 .../tests/github/test_extractor.py              |   20 +-
 .../forgeimporters/tests/github/test_tracker.py |  136 +-
 .../forgeimporters/tests/google/__init__.py     |    1 -
 .../tests/google/functional/__init__.py         |    1 -
 .../tests/google/functional/test_tracker.py     |  358 +--
 .../tests/google/test_extractor.py              |  387 +--
 .../forgeimporters/tests/google/test_tracker.py |  322 +-
 .../forgeimporters/tests/test_base.py           |  141 +-
 ForgeImporters/forgeimporters/trac/__init__.py  |    1 -
 ForgeImporters/forgeimporters/trac/project.py   |    1 +
 .../forgeimporters/trac/tests/__init__.py       |    1 -
 .../forgeimporters/trac/tests/test_tickets.py   |  163 +-
 ForgeImporters/forgeimporters/trac/tickets.py   |   96 +-
 ForgeLink/forgelink/link_main.py                |   32 +-
 .../forgelink/tests/functional/test_rest.py     |   16 +-
 .../forgelink/tests/functional/test_root.py     |    2 +-
 ForgeLink/setup.py                              |    6 +-
 ForgeSVN/forgesvn/controllers.py                |    4 +-
 ForgeSVN/forgesvn/model/svn.py                  |  239 +-
 ForgeSVN/forgesvn/svn_main.py                   |   42 +-
 ForgeSVN/forgesvn/tests/__init__.py             |    2 +-
 ForgeSVN/forgesvn/tests/functional/test_auth.py |    8 +-
 .../tests/functional/test_controllers.py        |   38 +-
 .../forgesvn/tests/model/test_repository.py     |  276 +-
 .../tests/model/test_svnimplementation.py       |   28 +-
 ForgeSVN/forgesvn/tests/test_svn_app.py         |    1 +
 ForgeSVN/forgesvn/tests/test_tasks.py           |    6 +-
 ForgeSVN/setup.py                               |    6 +-
 ForgeShortUrl/forgeshorturl/main.py             |   11 +-
 ForgeShortUrl/forgeshorturl/model/shorturl.py   |   12 +-
 .../forgeshorturl/tests/functional/test.py      |   29 +-
 .../forgeshorturl/widgets/short_url.py          |    8 +-
 .../forgetracker/command/fix_discussion.py      |   20 +-
 ForgeTracker/forgetracker/config/resources.py   |    3 +-
 ForgeTracker/forgetracker/import_support.py     |   59 +-
 ForgeTracker/forgetracker/model/ticket.py       |  365 ++-
 ForgeTracker/forgetracker/plugins.py            |    1 -
 .../forgetracker/scripts/import_tracker.py      |   63 +-
 .../tests/command/test_fix_discussion.py        |    7 +-
 .../tests/functional/test_import.py             |   52 +-
 .../forgetracker/tests/functional/test_rest.py  |   82 +-
 .../forgetracker/tests/functional/test_root.py  | 1021 ++++---
 ForgeTracker/forgetracker/tests/test_app.py     |   10 +-
 .../forgetracker/tests/test_tracker_roles.py    |    4 +
 .../forgetracker/tests/unit/__init__.py         |   10 +-
 .../tests/unit/test_globals_model.py            |   23 +-
 .../tests/unit/test_milestone_controller.py     |    3 +-
 .../tests/unit/test_root_controller.py          |    7 +-
 .../unit/test_ticket_custom_fields_form.py      |    6 +-
 .../forgetracker/tests/unit/test_ticket_form.py |    2 +-
 .../tests/unit/test_ticket_model.py             |   85 +-
 ForgeTracker/forgetracker/tracker_main.py       |  488 +--
 ForgeTracker/forgetracker/widgets/admin.py      |   21 +-
 .../forgetracker/widgets/admin_custom_fields.py |   77 +-
 ForgeTracker/forgetracker/widgets/bin_form.py   |   14 +-
 .../forgetracker/widgets/ticket_form.py         |   65 +-
 .../forgetracker/widgets/ticket_search.py       |   30 +-
 ForgeTracker/setup.py                           |    6 +-
 ForgeUserStats/forgeuserstats/__init__.py       |    1 -
 .../forgeuserstats/controllers/__init__.py      |    1 -
 .../forgeuserstats/controllers/userstats.py     |   76 +-
 ForgeUserStats/forgeuserstats/main.py           |   36 +-
 ForgeUserStats/forgeuserstats/model/__init__.py |    1 -
 ForgeUserStats/forgeuserstats/model/stats.py    |   16 +-
 ForgeUserStats/forgeuserstats/tests/__init__.py |    1 -
 .../forgeuserstats/tests/test_model.py          |  172 +-
 .../forgeuserstats/tests/test_stats.py          |   93 +-
 .../forgeuserstats/widgets/__init__.py          |    1 -
 ForgeUserStats/forgeuserstats/widgets/forms.py  |   10 +-
 ForgeUserStats/setup.py                         |    6 +-
 ForgeWiki/forgewiki/converters.py               |    4 +-
 ForgeWiki/forgewiki/model/wiki.py               |   78 +-
 .../scripts/wiki2markdown/extractors.py         |   10 +-
 .../forgewiki/scripts/wiki2markdown/loaders.py  |   16 +-
 .../scripts/wiki2markdown/wiki2markdown.py      |   56 +-
 .../scripts/wiki_from_trac/__init__.py          |    2 +-
 .../scripts/wiki_from_trac/extractors.py        |    9 +-
 .../forgewiki/scripts/wiki_from_trac/loaders.py |    6 +-
 .../scripts/wiki_from_trac/wiki_from_trac.py    |   39 +-
 .../forgewiki/tests/functional/test_rest.py     |   15 +-
 .../forgewiki/tests/functional/test_root.py     |  339 ++-
 ForgeWiki/forgewiki/tests/test_app.py           |    9 +-
 ForgeWiki/forgewiki/tests/test_wiki2markdown.py |   18 +-
 ForgeWiki/forgewiki/tests/test_wiki_roles.py    |    4 +
 ForgeWiki/forgewiki/widgets/wiki.py             |    6 +-
 ForgeWiki/forgewiki/wiki_main.py                |  164 +-
 ForgeWiki/setup.py                              |    6 +-
 NoWarnings/nowarnings.py                        |    1 +
 fuse/accessfs.py                                |   53 +-
 run_tests                                       |   31 +-
 scripts/add_user_to_group.py                    |   14 +-
 scripts/allura_import.py                        |   52 +-
 scripts/changelog.py                            |    9 +-
 scripts/create-allura-sitemap.py                |   16 +-
 scripts/git-hooks/for-the-remote-repo/update    |   46 +-
 .../git-hooks/for-your-local-repo/commit-msg    |    5 +-
 scripts/git-mr                                  |   29 +-
 scripts/import_trove_categories.py              |   11 +-
 scripts/migrations/000-fix-tracker-fields.py    |   16 +-
 scripts/migrations/001-restore-labels.py        |   31 +-
 .../002-fix-tracker-thread-subjects.py          |   10 +-
 scripts/migrations/003-migrate_project_roles.py |    3 +-
 .../004-make-attachments-polymorphic.py         |    1 +
 ...005-remove_duplicate_ticket_notifications.py |   15 +-
 scripts/migrations/006-migrate-artifact-refs.py |    5 +-
 scripts/migrations/007-update-acls.py           |   64 +-
 .../migrations/008-remove-forumpost-subject.py  |    1 +
 scripts/migrations/010-fix-home-permissions.py  |   31 +-
 scripts/migrations/011-fix-subroles.py          |   33 +-
 scripts/migrations/012-uninstall-home.py        |   38 +-
 scripts/migrations/013-update-ordinals.py       |   10 +-
 .../015-add-neighborhood_id-to-blog-posts.py    |    3 +-
 scripts/migrations/018-add-svn-checkout-url.py  |    2 +-
 .../migrations/020-remove-wiki-title-slashes.py |    1 +
 .../migrations/022-change-anon-display-name.py  |    3 +-
 .../024-migrate-custom-profile-text.py          |   19 +-
 scripts/migrations/025-add-is-nbhd-project.py   |    7 +-
 scripts/migrations/026-install-activity-tool.py |    1 +
 .../027-change-ticket-write-permissions.py      |    5 +-
 scripts/migrations/028-remove-svn-trees.py      |    4 +-
 scripts/new_ticket.py                           |   15 +-
 scripts/open_relay.py                           |   11 +-
 scripts/perf/benchmark-scm.py                   |   53 +-
 scripts/perf/call_count.py                      |   23 +-
 scripts/perf/md_perf.py                         |   19 +-
 scripts/perf/sstress.py                         |   10 +-
 scripts/perf/test_git_lcd.py                    |    2 +-
 scripts/prep-scm-sandbox.py                     |   20 +-
 scripts/project-import.py                       |  131 +-
 scripts/publicize-neighborhood.py               |   22 +-
 scripts/recover-user-databases.py               |   11 +-
 scripts/rethumb.py                              |   42 +-
 scripts/scrub-allura-data.py                    |   20 +-
 scripts/setup-scm-server.py                     |   10 +-
 scripts/teamforge-import.py                     |  327 ++-
 scripts/test-branches-against-tickets.py        |   52 +-
 scripts/tracker-rip.py                          |   59 +-
 scripts/wiki-copy.py                            |   28 +-
 scripts/wiki-export.py                          |    2 +-
 scripts/wiki-post.py                            |   18 +-
 392 files changed, 15707 insertions(+), 10378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/app.py
----------------------------------------------------------------------
diff --git a/Allura/allura/app.py b/Allura/allura/app.py
index 85673c5..29acf12 100644
--- a/Allura/allura/app.py
+++ b/Allura/allura/app.py
@@ -46,9 +46,11 @@ config = ConfigProxy(common_suffix='forgemail.domain')
 
 
 class ConfigOption(object):
+
     """Definition of a configuration option for an :class:`Application`.
 
     """
+
     def __init__(self, name, ming_type, default, label=None):
         """Create a new ConfigOption.
 
@@ -67,14 +69,16 @@ class ConfigOption(object):
 
 
 class SitemapEntry(object):
+
     """A labeled URL, which may optionally have
     :class:`children <SitemapEntry>`.
 
     Used for generating trees of links.
 
     """
+
     def __init__(self, label, url=None, children=None, className=None,
-            ui_icon=None, small=None, tool_name=None, matching_urls=None):
+                 ui_icon=None, small=None, tool_name=None, matching_urls=None):
         """Create a new SitemapEntry.
 
         """
@@ -112,7 +116,8 @@ class SitemapEntry(object):
         l = ['<SitemapEntry ']
         l.append('    label=%r' % self.label)
         l.append('    url=%r' % self.url)
-        l.append('    children=%s' % repr(self.children).replace('\n', '\n    '))
+        l.append('    children=%s' %
+                 repr(self.children).replace('\n', '\n    '))
         l.append('>')
         return '\n'.join(l)
 
@@ -130,12 +135,12 @@ class SitemapEntry(object):
         if url is not None:
             url = basejoin(app.url, url)
         return SitemapEntry(lbl, url, [
-                ch.bind_app(app) for ch in self.children],
-                className=self.className,
-                ui_icon=self.ui_icon,
-                small=self.small,
-                tool_name=self.tool_name,
-                matching_urls=self.matching_urls)
+            ch.bind_app(app) for ch in self.children],
+            className=self.className,
+            ui_icon=self.ui_icon,
+            small=self.small,
+            tool_name=self.tool_name,
+            matching_urls=self.matching_urls)
 
     def extend(self, sitemap_entries):
         """Extend our children with ``sitemap_entries``.
@@ -167,6 +172,7 @@ class SitemapEntry(object):
 
 
 class Application(object):
+
     """
     The base Allura pluggable application
 
@@ -239,9 +245,9 @@ class Application(object):
     ordinal = 0
     hidden = False
     icons = {
-        24:'images/admin_24.png',
-        32:'images/admin_32.png',
-        48:'images/admin_48.png'
+        24: 'images/admin_24.png',
+        32: 'images/admin_32.png',
+        48: 'images/admin_48.png'
     }
 
     def __init__(self, project, app_config_object):
@@ -331,7 +337,8 @@ class Application(object):
         :rtype: bool
 
         """
-        tools_list = [tool.tool_name.lower() for tool in self.project.app_configs]
+        tools_list = [tool.tool_name.lower()
+                      for tool in self.project.app_configs]
         return tools_list.count(self.config.tool_name.lower()) < self.max_instances
 
     @classmethod
@@ -349,7 +356,7 @@ class Application(object):
 
         """
         re = (h.re_relaxed_tool_mount_point if cls.relaxed_mount_points
-                else h.re_tool_mount_point)
+              else h.re_tool_mount_point)
         return re.match(mount_point)
 
     @classmethod
@@ -373,7 +380,7 @@ class Application(object):
         if resource:
             resource_path = os.path.join('nf', resource)
             url = (g.forge_static(resource) if cls.has_resource(resource_path)
-                    else g.theme_href(resource))
+                   else g.theme_href(resource))
         return url
 
     @classmethod
@@ -430,10 +437,10 @@ class Application(object):
         """
         if user and user != model.User.anonymous():
             model.Mailbox.subscribe(
-                    type='direct',
-                    user_id=user._id,
-                    project_id=self.project._id,
-                    app_config_id=self.config._id)
+                type='direct',
+                user_id=user._id,
+                project_id=self.project._id,
+                app_config_id=self.config._id)
 
     @classmethod
     def default_options(cls):
@@ -460,13 +467,14 @@ class Application(object):
 
     def uninstall(self, project=None, project_id=None):
         'Whatever logic is required to tear down a tool'
-        if project_id is None: project_id = project._id
+        if project_id is None:
+            project_id = project._id
         # De-index all the artifacts belonging to this tool in one fell swoop
         g.solr.delete(q='project_id_s:"%s" AND mount_point_s:"%s"' % (
-                project_id, self.config.options['mount_point']))
+            project_id, self.config.options['mount_point']))
         for d in model.Discussion.query.find({
-                'project_id':project_id,
-                'app_config_id':self.config._id}):
+                'project_id': project_id,
+                'app_config_id': self.config._id}):
             d.delete()
         self.config.delete()
         session(self.config).flush()
@@ -526,13 +534,17 @@ class Application(object):
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
 
         """
-        admin_url = c.project.url()+'admin/'+self.config.options.mount_point+'/'
+        admin_url = c.project.url() + 'admin/' + \
+            self.config.options.mount_point + '/'
         links = []
         if self.permissions and has_access(c.project, 'admin')():
-            links.append(SitemapEntry('Permissions', admin_url + 'permissions'))
+            links.append(
+                SitemapEntry('Permissions', admin_url + 'permissions'))
         if force_options or len(self.config_options) > 3:
-            links.append(SitemapEntry('Options', admin_url + 'options', className='admin_modal'))
-        links.append(SitemapEntry('Label', admin_url + 'edit_label', className='admin_modal'))
+            links.append(
+                SitemapEntry('Options', admin_url + 'options', className='admin_modal'))
+        links.append(
+            SitemapEntry('Label', admin_url + 'edit_label', className='admin_modal'))
         return links
 
     def handle_message(self, topic, message):
@@ -571,7 +583,8 @@ class Application(object):
             fp = StringIO(message['payload'])
             self.AttachmentClass.save_attachment(
                 message['filename'], fp,
-                content_type=message.get('content_type', 'application/octet-stream'),
+                content_type=message.get(
+                    'content_type', 'application/octet-stream'),
                 discussion_id=thd.discussion_id,
                 thread_id=thd._id,
                 post_id=message_id,
@@ -580,16 +593,19 @@ class Application(object):
         # Handle duplicates
         post = self.PostClass.query.get(_id=message_id)
         if post:
-            log.info('Existing message_id %s found - saving this as text attachment' % message_id)
+            log.info(
+                'Existing message_id %s found - saving this as text attachment' %
+                message_id)
             fp = StringIO(message['payload'])
             post.attach(
                 'alternate', fp,
-                content_type=message.get('content_type', 'application/octet-stream'),
+                content_type=message.get(
+                    'content_type', 'application/octet-stream'),
                 discussion_id=thd.discussion_id,
                 thread_id=thd._id,
                 post_id=message_id)
         else:
-            text=message['payload'] or '--no text body--'
+            text = message['payload'] or '--no text body--'
             post = thd.post(
                 message_id=message_id,
                 parent_id=parent_id,
@@ -607,6 +623,7 @@ class Application(object):
 
 
 class DefaultAdminController(BaseController):
+
     """Provides basic admin functionality for an :class:`Application`.
 
     To add more admin functionality for your Application, extend this
@@ -619,6 +636,7 @@ class DefaultAdminController(BaseController):
                 self.admin = MyAdminController(self)
 
     """
+
     def __init__(self, app):
         """Instantiate this controller for an :class:`app <Application>`.
 
@@ -642,7 +660,8 @@ class DefaultAdminController(BaseController):
         user = model.User.by_username(username)
         if not user:
             return dict(error='User "%s" not found' % username)
-        ace = model.ACE.deny(model.ProjectRole.by_user(user, upsert=True)._id, perm, reason)
+        ace = model.ACE.deny(
+            model.ProjectRole.by_user(user, upsert=True)._id, perm, reason)
         if not model.ACL.contains(ace, self.app.acl):
             self.app.acl.append(ace)
             return dict(user_id=str(user._id), username=user.username, reason=reason)
@@ -744,7 +763,8 @@ class DefaultAdminController(BaseController):
                 redirect('..')
             for opt in self.app.config_options:
                 if opt in Application.config_options:
-                    continue  # skip base options (mount_point, mount_label, ordinal)
+                    # skip base options (mount_point, mount_label, ordinal)
+                    continue
                 val = kw.get(opt.name, '')
                 if opt.ming_type == bool:
                     val = asbool(val or False)
@@ -778,15 +798,16 @@ class DefaultAdminController(BaseController):
             del_group_ids = []
             group_ids = args.get('value', [])
             if isinstance(new_group_ids, basestring):
-                new_group_ids = [ new_group_ids ]
+                new_group_ids = [new_group_ids]
             if isinstance(group_ids, basestring):
-                group_ids = [ group_ids ]
+                group_ids = [group_ids]
 
             for acl in old_acl:
                 if (acl['permission'] == perm) and (str(acl['role_id']) not in group_ids) and acl['access'] != model.ACE.DENY:
                     del_group_ids.append(str(acl['role_id']))
 
-            get_role = lambda _id: model.ProjectRole.query.get(_id=ObjectId(_id))
+            get_role = lambda _id: model.ProjectRole.query.get(
+                _id=ObjectId(_id))
             groups = map(get_role, group_ids)
             new_groups = map(get_role, new_group_ids)
             del_groups = map(get_role, del_group_ids)
@@ -794,8 +815,10 @@ class DefaultAdminController(BaseController):
             if new_groups or del_groups:
                 model.AuditLog.log('updated "%s" permission: "%s" => "%s" for %s' % (
                     perm,
-                    ', '.join(map(lambda role: role.name, groups+del_groups)),
-                    ', '.join(map(lambda role: role.name, groups+new_groups)),
+                    ', '.join(
+                        map(lambda role: role.name, groups + del_groups)),
+                    ', '.join(
+                        map(lambda role: role.name, groups + new_groups)),
                     self.app.config.options['mount_point']))
 
             role_ids = map(ObjectId, group_ids + new_group_ids)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/base.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/base.py b/Allura/allura/command/base.py
index e7849f3..427362c 100644
--- a/Allura/allura/command/base.py
+++ b/Allura/allura/command/base.py
@@ -35,6 +35,7 @@ from allura.lib.helpers import iter_entry_points
 
 log = None
 
+
 @task
 def run_command(command, args):
     """Run paster command asynchronously"""
@@ -49,7 +50,10 @@ def run_command(command, args):
         raise Exception("Error parsing args: '%s'" % args)
     return command.run(arg_list)
 
-class EmptyClass(object): pass
+
+class EmptyClass(object):
+    pass
+
 
 class Command(command.Command):
     min_args = 1
@@ -58,6 +62,7 @@ class Command(command.Command):
     group_name = 'Allura'
 
     class __metaclass__(type):
+
         @property
         def __doc__(cls):
             return cls.parser.format_help()
@@ -84,17 +89,20 @@ class Command(command.Command):
     def basic_setup(self):
         global log, M
         if self.args[0]:
-            # Probably being called from the command line - load the config file
-            self.config = conf = appconfig('config:%s' % self.args[0],relative_to=os.getcwd())
+            # Probably being called from the command line - load the config
+            # file
+            self.config = conf = appconfig('config:%s' %
+                                           self.args[0], relative_to=os.getcwd())
             # ... logging does not understand section#subsection syntax
             logging_config = self.args[0].split('#')[0]
-            logging.config.fileConfig(logging_config, disable_existing_loggers=False)
+            logging.config.fileConfig(
+                logging_config, disable_existing_loggers=False)
             log = logging.getLogger('allura.command')
             log.info('Initialize command with config %r', self.args[0])
             load_environment(conf.global_conf, conf.local_conf)
             self.setup_globals()
             from allura import model
-            M=model
+            M = model
             ming.configure(**conf)
             if asbool(conf.get('activitystream.recording.enabled', False)):
                 activitystream.configure(**conf)
@@ -114,7 +122,8 @@ class Command(command.Command):
         self.registry.prepare()
         self.registry.register(pylons.tmpl_context, EmptyClass())
         self.registry.register(pylons.app_globals, self.globals)
-        self.registry.register(allura.credentials, allura.lib.security.Credentials())
+        self.registry.register(
+            allura.credentials, allura.lib.security.Credentials())
         pylons.tmpl_context.queued_messages = None
 
     def teardown_globals(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/create_neighborhood.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/create_neighborhood.py b/Allura/allura/command/create_neighborhood.py
index 14e9e14..db79bb0 100644
--- a/Allura/allura/command/create_neighborhood.py
+++ b/Allura/allura/command/create_neighborhood.py
@@ -23,31 +23,32 @@ from bson import ObjectId
 from allura import model as M
 from allura.lib import plugin, exceptions
 
+
 class CreateNeighborhoodCommand(base.Command):
-    min_args=3
-    max_args=None
+    min_args = 3
+    max_args = None
     usage = '<ini file> <neighborhood_shortname> <admin1> [<admin2>...]'
     summary = 'Create a new neighborhood with the listed admins'
     parser = base.Command.standard_parser(verbose=True)
 
     def command(self):
         self.basic_setup()
-        admins = [ M.User.by_username(un) for un in self.args[2:] ]
+        admins = [M.User.by_username(un) for un in self.args[2:]]
         shortname = self.args[1]
         n = M.Neighborhood(
             name=shortname,
             url_prefix='/' + shortname + '/',
-            features=dict(private_projects = False,
-                          max_projects = 500,
-                          css = 'none',
-                          google_analytics = False))
+            features=dict(private_projects=False,
+                          max_projects=500,
+                          css='none',
+                          google_analytics=False))
         project_reg = plugin.ProjectRegistrationProvider.get()
         project_reg.register_neighborhood_project(n, admins)
 
 
 class UpdateNeighborhoodCommand(base.Command):
-    min_args=3
-    max_args=None
+    min_args = 3
+    max_args = None
     usage = '<ini file> <neighborhood> <home_tool_active>'
     summary = 'Activate Home application for neighborhood\r\n' \
         '\t<neighborhood> - the neighborhood name or _id\r\n' \
@@ -64,8 +65,8 @@ class UpdateNeighborhoodCommand(base.Command):
         if not nb:
             nb = M.Neighborhood.query.get(_id=ObjectId(shortname))
         if nb is None:
-            raise exceptions.NoSuchNeighborhoodError("The neighborhood %s " \
-                "could not be found in the database" % shortname)
+            raise exceptions.NoSuchNeighborhoodError("The neighborhood %s "
+                                                     "could not be found in the database" % shortname)
         tool_value = self.args[2].lower()
         if tool_value[:1] == "t":
             home_tool_active = True


[07/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/import_support.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/import_support.py b/ForgeTracker/forgetracker/import_support.py
index b5daa12..c1865e9 100644
--- a/ForgeTracker/forgetracker/import_support.py
+++ b/ForgeTracker/forgetracker/import_support.py
@@ -41,10 +41,13 @@ except ImportError:
 
 log = logging.getLogger(__name__)
 
+
 class ImportException(Exception):
     pass
 
+
 class ResettableStream(object):
+
     '''Class supporting seeks within a header of otherwise
     unseekable stream.'''
 
@@ -92,9 +95,10 @@ class ResettableStream(object):
         else:
             return self.stream_pos
 
+
 class ImportSupport(object):
 
-    ATTACHMENT_SIZE_LIMIT = 1024*1024
+    ATTACHMENT_SIZE_LIMIT = 1024 * 1024
 
     def __init__(self):
         # Map JSON interchange format fields to Ticket fields
@@ -102,7 +106,8 @@ class ImportSupport(object):
         #   None - drop
         #   True - map as is
         #   (new_field_name, value_convertor(val)) - use new field name and convert JSON's value
-        #   handler(ticket, field, val) - arbitrary transform, expected to modify ticket in-place
+        # handler(ticket, field, val) - arbitrary transform, expected to modify
+        # ticket in-place
         self.FIELD_MAP = {
             'assigned_to': ('assigned_to_id', self.get_user_id),
             'class': None,
@@ -110,7 +115,8 @@ class ImportSupport(object):
             'date_updated': ('mod_date', self.parse_date),
             'description': True,
             'id': None,
-            'keywords': ('labels', lambda s: s.split()), # default way of handling, see below for overrides
+            # default way of handling, see below for overrides
+            'keywords': ('labels', lambda s: s.split()),
             'status': True,
             'submitter': ('reported_by_id', self.get_user_id),
             'summary': True,
@@ -121,7 +127,6 @@ class ImportSupport(object):
         self.errors = []
         self.options = {}
 
-
     def init_options(self, options_json):
         self.options = json.loads(options_json)
         opt_keywords = self.option('keywords_as', 'split_labels')
@@ -133,7 +138,6 @@ class ImportSupport(object):
     def option(self, name, default=None):
         return self.options.get(name, False)
 
-
     #
     # Field/value convertors
     #
@@ -158,7 +162,8 @@ class ImportSupport(object):
     def check_custom_field(self, field, value, ticket_status):
         field = c.app.globals.get_custom_field(field)
         if (field['type'] == 'select') and value:
-            field_options = h.split_select_field_options(h.really_unicode(field['options']))
+            field_options = h.split_select_field_options(
+                h.really_unicode(field['options']))
             if value not in field_options:
                 field['options'] = ' '.join([field['options'], value])
         elif (field['type'] == 'milestone') and value:
@@ -180,8 +185,10 @@ class ImportSupport(object):
     def custom(self, ticket, field, value, ticket_status):
         field = '_' + field
         if not c.app.has_custom_field(field):
-            log.warning('Custom field %s is not defined, defining as string', field)
-            c.app.globals.custom_fields.append(dict(name=field, label=field[1:].capitalize(), type='string'))
+            log.warning(
+                'Custom field %s is not defined, defining as string', field)
+            c.app.globals.custom_fields.append(
+                dict(name=field, label=field[1:].capitalize(), type='string'))
             ThreadLocalORMSession.flush_all()
         if 'custom_fields' not in ticket:
             ticket['custom_fields'] = {}
@@ -204,23 +211,26 @@ class ImportSupport(object):
                 new_f, conv = transform
                 remapped[new_f] = conv(v)
 
-        description = h.really_unicode(self.description_processing(remapped['description']))
+        description = h.really_unicode(
+            self.description_processing(remapped['description']))
         creator = owner = ''
         if ticket_dict.get('submitter') and not remapped.get('reported_by_id'):
             creator = u'*Originally created by:* {0}\n'.format(
-                    h.really_unicode(ticket_dict['submitter']))
+                h.really_unicode(ticket_dict['submitter']))
         if ticket_dict.get('assigned_to') and not remapped.get('assigned_to_id'):
             owner = u'*Originally owned by:* {0}\n'.format(
                     h.really_unicode(ticket_dict['assigned_to']))
         remapped['description'] = u'{0}{1}{2}{3}'.format(creator, owner,
-                '\n' if creator or owner else '', description)
+                                                         '\n' if creator or owner else '', description)
 
         ticket_num = ticket_dict['id']
         existing_ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                          ticket_num=ticket_num)
+                                              ticket_num=ticket_num)
         if existing_ticket:
             ticket_num = c.app.globals.next_ticket_num()
-            self.warnings.append('Ticket #%s: Ticket with this id already exists, using next available id: %s' % (ticket_dict['id'], ticket_num))
+            self.warnings.append(
+                'Ticket #%s: Ticket with this id already exists, using next available id: %s' %
+                (ticket_dict['id'], ticket_num))
         else:
             if c.app.globals.last_ticket_num < ticket_num:
                 c.app.globals.last_ticket_num = ticket_num
@@ -245,22 +255,24 @@ class ImportSupport(object):
     def make_comment(self, thread, comment_dict):
         ts = self.parse_date(comment_dict['date'])
         author_id = self.get_user_id(comment_dict['submitter'])
-        text = h.really_unicode(self.comment_processing(comment_dict['comment']))
+        text = h.really_unicode(
+            self.comment_processing(comment_dict['comment']))
         if not author_id and comment_dict['submitter']:
             text = u'*Originally posted by:* {0}\n\n{1}'.format(
-                    h.really_unicode(comment_dict['submitter']), text)
+                h.really_unicode(comment_dict['submitter']), text)
         comment = thread.post(text=text, timestamp=ts)
         comment.author_id = author_id
 
     def make_attachment(self, org_ticket_id, ticket_id, att_dict):
         if att_dict['size'] > self.ATTACHMENT_SIZE_LIMIT:
-            self.errors.append('Ticket #%s: Attachment %s (@ %s) is too large, skipping' %
-                               (org_ticket_id, att_dict['filename'], att_dict['url']))
+            self.errors.append(
+                'Ticket #%s: Attachment %s (@ %s) is too large, skipping' %
+                (org_ticket_id, att_dict['filename'], att_dict['url']))
             return
         f = urlopen(att_dict['url'])
-        TM.TicketAttachment.save_attachment(att_dict['filename'], ResettableStream(f),
-                                            artifact_id=ticket_id)
-
+        TM.TicketAttachment.save_attachment(
+            att_dict['filename'], ResettableStream(f),
+            artifact_id=ticket_id)
 
     #
     # User handling
@@ -298,8 +310,8 @@ class ImportSupport(object):
         for foreign_user, allura_user in self.options['user_map'].iteritems():
             u = M.User.by_username(allura_user)
             if not u:
-                raise ImportException('User mapping %s:%s - target user does not exist' % (foreign_user, allura_user))
-
+                raise ImportException(
+                    'User mapping %s:%s - target user does not exist' % (foreign_user, allura_user))
 
     #
     # Main methods
@@ -354,7 +366,8 @@ option user_map to avoid losing username information. Unknown users: %s''' % unk
                 try:
                     self.make_attachment(a['id'], t._id, a_entry)
                 except Exception, e:
-                    self.warnings.append('Could not import attachment, skipped: %s' % e)
+                    self.warnings.append(
+                        'Could not import attachment, skipped: %s' % e)
             log.info('Imported ticket: %d', t.ticket_num)
         c.app.globals.invalidate_bin_counts()
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/model/ticket.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/model/ticket.py b/ForgeTracker/forgetracker/model/ticket.py
index 8cde75a..5dfaec0 100644
--- a/ForgeTracker/forgetracker/model/ticket.py
+++ b/ForgeTracker/forgetracker/model/ticket.py
@@ -39,25 +39,25 @@ from ming.orm.ormsession import ThreadLocalORMSession
 from tg import config as tg_config
 
 from allura.model import (
-        ACE,
-        DENY_ALL,
-
-        AppConfig,
-        Artifact,
-        BaseAttachment,
-        Feed,
-        Mailbox,
-        MovedArtifact,
-        Notification,
-        ProjectRole,
-        Snapshot,
-        Thread,
-        User,
-        VersionedArtifact,
-        VotableArtifact,
-
-        artifact_orm_session,
-        project_orm_session,
+    ACE,
+    DENY_ALL,
+
+    AppConfig,
+    Artifact,
+    BaseAttachment,
+    Feed,
+    Mailbox,
+    MovedArtifact,
+    Notification,
+    ProjectRole,
+    Snapshot,
+    Thread,
+    User,
+    VersionedArtifact,
+    VotableArtifact,
+
+    artifact_orm_session,
+    project_orm_session,
 )
 from allura.model.timeline import ActivityObject
 from allura.model.notification import MailFooter
@@ -85,29 +85,32 @@ config = utils.ConfigProxy(
     common_suffix='forgemail.domain',
     new_solr='solr.use_new_types')
 
+
 class Globals(MappedClass):
 
     class __mongometa__:
         name = 'globals'
         session = project_orm_session
-        indexes = [ 'app_config_id' ]
+        indexes = ['app_config_id']
 
     type_s = 'Globals'
     _id = FieldProperty(schema.ObjectId)
-    app_config_id = ForeignIdProperty(AppConfig, if_missing=lambda:c.app.config._id)
+    app_config_id = ForeignIdProperty(
+        AppConfig, if_missing=lambda: c.app.config._id)
     app_config = RelationProperty(AppConfig, via='app_config_id')
     last_ticket_num = FieldProperty(int)
     status_names = FieldProperty(str)
     open_status_names = FieldProperty(str)
     closed_status_names = FieldProperty(str)
     milestone_names = FieldProperty(str, if_missing='')
-    custom_fields = FieldProperty([{str:None}])
-    _bin_counts = FieldProperty(schema.Deprecated) # {str:int})
+    custom_fields = FieldProperty([{str: None}])
+    _bin_counts = FieldProperty(schema.Deprecated)  # {str:int})
     _bin_counts_data = FieldProperty([dict(summary=str, hits=int)])
     _bin_counts_expire = FieldProperty(datetime)
     _bin_counts_invalidated = FieldProperty(datetime)
-    _milestone_counts = FieldProperty(schema.Deprecated) #[dict(name=str,hits=int,closed=int)])
-    _milestone_counts_expire = FieldProperty(schema.Deprecated) #datetime)
+    # [dict(name=str,hits=int,closed=int)])
+    _milestone_counts = FieldProperty(schema.Deprecated)
+    _milestone_counts_expire = FieldProperty(schema.Deprecated)  # datetime)
     show_in_search = FieldProperty({str: bool}, if_missing={'ticket_num': True,
                                                             'summary': True,
                                                             '_milestone': True,
@@ -122,7 +125,7 @@ class Globals(MappedClass):
     def next_ticket_num(self):
         gbl = Globals.query.find_and_modify(
             query=dict(app_config_id=self.app_config_id),
-            update={'$inc': { 'last_ticket_num': 1}},
+            update={'$inc': {'last_ticket_num': 1}},
             new=True)
         session(gbl).expunge(gbl)
         return gbl.last_ticket_num
@@ -145,7 +148,7 @@ class Globals(MappedClass):
 
     @property
     def not_closed_query(self):
-        return ' && '.join(['!status:'+name for name in self.set_of_closed_status_names])
+        return ' && '.join(['!status:' + name for name in self.set_of_closed_status_names])
 
     @property
     def not_closed_mongo_query(self):
@@ -154,11 +157,11 @@ class Globals(MappedClass):
 
     @property
     def closed_query(self):
-        return ' or '.join(['status:'+name for name in self.set_of_closed_status_names])
+        return ' or '.join(['status:' + name for name in self.set_of_closed_status_names])
 
     @property
     def milestone_fields(self):
-        return [ fld for fld in self.custom_fields if fld['type'] == 'milestone' ]
+        return [fld for fld in self.custom_fields if fld['type'] == 'milestone']
 
     def get_custom_field(self, name):
         for fld in self.custom_fields:
@@ -186,7 +189,9 @@ class Globals(MappedClass):
         for b in Bin.query.find(dict(
                 app_config_id=self.app_config_id)):
             if b.terms and '$USER' in b.terms:
-                continue  # skip queries with $USER variable, hits will be inconsistent for them
+                # skip queries with $USER variable, hits will be inconsistent
+                # for them
+                continue
             r = search_artifact(Ticket, b.terms, rows=0, short_timeout=False)
             hits = r is not None and r.hits or 0
             self._bin_counts_data.append(dict(summary=b.summary, hits=hits))
@@ -200,7 +205,8 @@ class Globals(MappedClass):
         if self._bin_counts_expire < datetime.utcnow():
             self.invalidate_bin_counts()
         for d in self._bin_counts_data:
-            if d['summary'] == name: return d
+            if d['summary'] == name:
+                return d
         return dict(summary=name, hits=0)
 
     def milestone_count(self, name):
@@ -265,13 +271,14 @@ class Globals(MappedClass):
             moved = ticket.move(tracker, notify=False)
             moved_tickets[moved._id] = moved
         mail = dict(
-            sender = c.project.app_instance(self.app_config).email_address,
-            fromaddr = str(c.user.email_address_header()),
-            reply_to = str(c.user.email_address_header()),
-            subject = '[%s:%s] Mass ticket moving by %s' % (c.project.shortname,
+            sender=c.project.app_instance(self.app_config).email_address,
+            fromaddr=str(c.user.email_address_header()),
+            reply_to=str(c.user.email_address_header()),
+            subject='[%s:%s] Mass ticket moving by %s' % (c.project.shortname,
                                                           self.app_config.options.mount_point,
                                                           c.user.display_name))
-        tmpl = g.jinja2_env.get_template('forgetracker:data/mass_move_report.html')
+        tmpl = g.jinja2_env.get_template(
+            'forgetracker:data/mass_move_report.html')
 
         tmpl_context = {
             'original_tracker': '%s:%s' % (c.project.shortname,
@@ -282,44 +289,49 @@ class Globals(MappedClass):
         }
         for user in users:
             tmpl_context['tickets'] = ({
-                    'original_num': original_ticket_nums[_id],
-                    'destination_num': moved_tickets[_id].ticket_num,
-                    'summary': moved_tickets[_id].summary
-                } for _id in filtered.get(user._id, []))
+                'original_num': original_ticket_nums[_id],
+                'destination_num': moved_tickets[_id].ticket_num,
+                'summary': moved_tickets[_id].summary
+            } for _id in filtered.get(user._id, []))
             mail.update(dict(
-                message_id = h.gen_message_id(),
-                text = tmpl.render(tmpl_context),
-                destinations = [str(user._id)]))
+                message_id=h.gen_message_id(),
+                text=tmpl.render(tmpl_context),
+                destinations=[str(user._id)]))
             mail_tasks.sendmail.post(**mail)
 
         if self.app_config.options.get('TicketMonitoringType') in (
                 'AllTicketChanges', 'AllPublicTicketChanges'):
-            monitoring_email = self.app_config.options.get('TicketMonitoringEmail')
+            monitoring_email = self.app_config.options.get(
+                'TicketMonitoringEmail')
             tmpl_context['tickets'] = [{
-                    'original_num': original_ticket_nums[_id],
-                    'destination_num': moved_tickets[_id].ticket_num,
-                    'summary': moved_tickets[_id].summary
-                } for _id, t in moved_tickets.iteritems()
-                  if (not t.private or
-                      self.app_config.options.get('TicketMonitoringType') ==
-                      'AllTicketChanges')]
+                'original_num': original_ticket_nums[_id],
+                'destination_num': moved_tickets[_id].ticket_num,
+                'summary': moved_tickets[_id].summary
+            } for _id, t in moved_tickets.iteritems()
+                if (not t.private or
+                    self.app_config.options.get('TicketMonitoringType') ==
+                    'AllTicketChanges')]
             if len(tmpl_context['tickets']) > 0:
                 mail.update(dict(
-                    message_id = h.gen_message_id(),
-                    text = tmpl.render(tmpl_context),
-                    destinations = [monitoring_email]))
+                    message_id=h.gen_message_id(),
+                    text=tmpl.render(tmpl_context),
+                    destinations=[monitoring_email]))
                 mail_tasks.sendmail.post(**mail)
 
-        moved_from = '%s/%s' % (c.project.shortname, self.app_config.options.mount_point)
-        moved_to = '%s/%s' % (tracker.project.shortname, tracker.options.mount_point)
+        moved_from = '%s/%s' % (c.project.shortname,
+                                self.app_config.options.mount_point)
+        moved_to = '%s/%s' % (tracker.project.shortname,
+                              tracker.options.mount_point)
         text = 'Tickets moved from %s to %s' % (moved_from, moved_to)
         Notification.post_user(c.user, None, 'flash', text=text)
 
     def update_tickets(self, **post_data):
         from forgetracker.tracker_main import get_change_text, get_label
         tickets = Ticket.query.find(dict(
-                _id={'$in':[ObjectId(id) for id in aslist(post_data['__ticket_ids'])]},
-                app_config_id=self.app_config_id)).all()
+            _id={'$in': [ObjectId(id)
+                         for id in aslist(
+                             post_data['__ticket_ids'])]},
+            app_config_id=self.app_config_id)).all()
 
         fields = set(['status', 'private'])
         values = {}
@@ -327,7 +339,8 @@ class Globals(MappedClass):
 
         for k in fields:
             v = post_data.get(k)
-            if v: values[k] = v
+            if v:
+                values[k] = v
         assigned_to = post_data.get('assigned_to')
         if assigned_to == '-':
             values['assigned_to_id'] = None
@@ -352,7 +365,8 @@ class Globals(MappedClass):
         for ticket in tickets:
             message = ''
             if labels:
-                values['labels'] = self.append_new_labels(ticket.labels, labels.split(','))
+                values['labels'] = self.append_new_labels(
+                    ticket.labels, labels.split(','))
             for k, v in sorted(values.iteritems()):
                 if k == 'assigned_to_id':
                     new_user = User.query.get(_id=v)
@@ -381,8 +395,8 @@ class Globals(MappedClass):
             for k, v in sorted(custom_values.iteritems()):
                 def cf_val(cf):
                     return ticket.get_custom_user(cf.name) \
-                           if cf.type == 'user' \
-                           else ticket.custom_fields.get(cf.name)
+                        if cf.type == 'user' \
+                        else ticket.custom_fields.get(cf.name)
                 cf = custom_fields[k]
                 old_value = cf_val(cf)
                 if cf.type == 'boolean':
@@ -400,19 +414,22 @@ class Globals(MappedClass):
                 ticket.commit()
 
         filtered_changes = self.filtered_by_subscription(changed_tickets)
-        users = User.query.find({'_id': {'$in': filtered_changes.keys()}}).all()
+        users = User.query.find(
+            {'_id': {'$in': filtered_changes.keys()}}).all()
+
         def changes_iter(user):
             for t_id in filtered_changes.get(user._id, []):
                 # mark changes text as safe, thus it wouldn't be escaped in plain-text emails
-                # html part of email is handled by markdown and it'll be properly escaped
+                # html part of email is handled by markdown and it'll be
+                # properly escaped
                 yield (changed_tickets[t_id], jinja2.Markup(changes[t_id]))
         mail = dict(
-            sender = c.project.app_instance(self.app_config).email_address,
-            fromaddr = str(c.user._id),
-            reply_to = tg_config['forgemail.return_path'],
-            subject = '[%s:%s] Mass edit changes by %s' % (c.project.shortname,
-                                                           self.app_config.options.mount_point,
-                                                           c.user.display_name),
+            sender=c.project.app_instance(self.app_config).email_address,
+            fromaddr=str(c.user._id),
+            reply_to=tg_config['forgemail.return_path'],
+            subject='[%s:%s] Mass edit changes by %s' % (c.project.shortname,
+                                                         self.app_config.options.mount_point,
+                                                         c.user.display_name),
         )
         tmpl = g.jinja2_env.get_template('forgetracker:data/mass_report.html')
         head = []
@@ -427,45 +444,50 @@ class Globals(MappedClass):
                 user = User.by_username(v)
                 v = user.display_name if user else v
             head.append('- **%s**: %s' % (cf.label, v))
-        tmpl_context = {'context': c, 'data': {'header': jinja2.Markup('\n'.join(['Mass edit changing:', ''] + head))}}
+        tmpl_context = {'context': c, 'data':
+                        {'header': jinja2.Markup('\n'.join(['Mass edit changing:', ''] + head))}}
         for user in users:
             tmpl_context['data'].update({'changes': changes_iter(user)})
             mail.update(dict(
-                message_id = h.gen_message_id(),
-                text = tmpl.render(tmpl_context),
-                destinations = [str(user._id)]))
+                message_id=h.gen_message_id(),
+                text=tmpl.render(tmpl_context),
+                destinations=[str(user._id)]))
             mail_tasks.sendmail.post(**mail)
 
         if self.app_config.options.get('TicketMonitoringType') in (
                 'AllTicketChanges', 'AllPublicTicketChanges'):
-            monitoring_email = self.app_config.options.get('TicketMonitoringEmail')
+            monitoring_email = self.app_config.options.get(
+                'TicketMonitoringEmail')
             visible_changes = []
             for t_id, t in changed_tickets.items():
                 if (not t.private or
                         self.app_config.options.get('TicketMonitoringType') ==
                         'AllTicketChanges'):
                     visible_changes.append(
-                            (changed_tickets[t_id], jinja2.Markup(changes[t_id])))
+                        (changed_tickets[t_id], jinja2.Markup(changes[t_id])))
             if visible_changes:
                 tmpl_context['data'].update({'changes': visible_changes})
                 mail.update(dict(
-                    message_id = h.gen_message_id(),
-                    text = tmpl.render(tmpl_context),
-                    destinations = [monitoring_email]))
+                    message_id=h.gen_message_id(),
+                    text=tmpl.render(tmpl_context),
+                    destinations=[monitoring_email]))
                 mail_tasks.sendmail.post(**mail)
 
         self.invalidate_bin_counts()
         ThreadLocalORMSession.flush_all()
-        app = '%s/%s' % (c.project.shortname, self.app_config.options.mount_point)
+        app = '%s/%s' % (c.project.shortname,
+                         self.app_config.options.mount_point)
         count = len(tickets)
-        text = 'Updated {} ticket{} in {}'.format(count, 's' if count != 1 else '', app)
+        text = 'Updated {} ticket{} in {}'.format(
+            count, 's' if count != 1 else '', app)
         Notification.post_user(c.user, None, 'flash', text=text)
 
     def filtered_by_subscription(self, tickets, project_id=None, app_config_id=None):
         p_id = project_id if project_id else c.project._id
         ac_id = app_config_id if app_config_id else self.app_config_id
         ticket_ids = tickets.keys()
-        tickets_index_id = {ticket.index_id(): t_id for t_id, ticket in tickets.iteritems()}
+        tickets_index_id = {
+            ticket.index_id(): t_id for t_id, ticket in tickets.iteritems()}
         subscriptions = Mailbox.query.find({
             'project_id': p_id,
             'app_config_id': ac_id,
@@ -527,7 +549,9 @@ class TicketHistory(Snapshot):
         result['text'] += pformat(result.values())
         return result
 
+
 class Bin(Artifact, ActivityObject):
+
     class __mongometa__:
         name = 'bin'
 
@@ -567,7 +591,9 @@ class Bin(Artifact, ActivityObject):
             sort=self.sort,
         )
 
+
 class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
+
     class __mongometa__:
         name = 'ticket'
         history_class = TicketHistory
@@ -575,10 +601,10 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             'ticket_num',
             ('app_config_id', 'custom_fields._milestone'),
             'import_id',
-            ]
+        ]
         unique_indexes = [
             ('app_config_id', 'ticket_num'),
-            ]
+        ]
 
     type_s = 'Ticket'
     _id = FieldProperty(schema.ObjectId)
@@ -588,11 +614,11 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
     summary = FieldProperty(str)
     description = FieldProperty(str, if_missing='')
     description_cache = FieldProperty(MarkdownCache)
-    reported_by_id = ForeignIdProperty(User, if_missing=lambda:c.user._id)
+    reported_by_id = ForeignIdProperty(User, if_missing=lambda: c.user._id)
     assigned_to_id = ForeignIdProperty(User, if_missing=None)
     milestone = FieldProperty(str, if_missing='')
     status = FieldProperty(str, if_missing='')
-    custom_fields = FieldProperty({str:None})
+    custom_fields = FieldProperty({str: None})
 
     reported_by = RelationProperty(User, via='reported_by_id')
 
@@ -621,7 +647,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
                 return ticket
             except OperationFailure, err:
                 if 'duplicate' in err.args[0]:
-                    log.warning('Try to create duplicate ticket %s', ticket.url())
+                    log.warning('Try to create duplicate ticket %s',
+                                ticket.url())
                     session(ticket).expunge(ticket)
                     continue
                 raise
@@ -644,7 +671,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             votes_down_i=self.votes_down,
             votes_total_i=(self.votes_up - self.votes_down),
             import_id_s=ImportIdConverter.get().simplify(self.import_id)
-            )
+        )
         for k, v in self.custom_fields.iteritems():
             # Pre solr-4.2.1 code expects all custom fields to be indexed
             # as strings.
@@ -655,7 +682,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             solr_type = self.app.globals.get_custom_field_solr_type(k)
             if solr_type:
                 result[k + solr_type] = (v or
-                        get_default_for_solr_type(solr_type))
+                                         get_default_for_solr_type(solr_type))
 
         if self.reported_by:
             result['reported_by_s'] = self.reported_by.username
@@ -682,7 +709,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             # can search on those instead of the old string-type solr fields.
             if config.get_bool('new_solr'):
                 solr_type = (c.app.globals.get_custom_field_solr_type(f)
-                        or solr_type)
+                             or solr_type)
             actual = solr_field.format(f, solr_type)
             q = q.replace(f + ':', actual + ':')
         return q
@@ -697,7 +724,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
 
     @property
     def assigned_to(self):
-        if self.assigned_to_id is None: return None
+        if self.assigned_to_id is None:
+            return None
         return User.query.get(_id=self.assigned_to_id)
 
     @property
@@ -714,7 +742,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return '%s@%s%s' % (self.ticket_num, domain, config.common_suffix)
 
     @property
@@ -737,8 +766,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
     def notify_post(self):
         monitoring_type = self.app_config.options.get('TicketMonitoringType')
         return monitoring_type == 'AllTicketChanges' or (
-                monitoring_type == 'AllPublicTicketChanges' and
-                not self.private)
+            monitoring_type == 'AllPublicTicketChanges' and
+            not self.private)
 
     def get_custom_user(self, custom_user_field_name):
         fld = None
@@ -750,7 +779,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             raise KeyError, 'Custom field "%s" does not exist.' % custom_user_field_name
         if fld.type != 'user':
             raise TypeError, 'Custom field "%s" is of type "%s"; expected ' \
-                             'type "user".' % (custom_user_field_name, fld.type)
+                             'type "user".' % (
+                                 custom_user_field_name, fld.type)
         username = self.custom_fields.get(custom_user_field_name)
         if not username:
             return None
@@ -766,12 +796,13 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
         if bool_flag:
             role_developer = ProjectRole.by_name('Developer')
             role_creator = ProjectRole.by_user(self.reported_by, upsert=True)
-            _allow_all = lambda role, perms: [ACE.allow(role._id, perm) for perm in perms]
+            _allow_all = lambda role, perms: [
+                ACE.allow(role._id, perm) for perm in perms]
             # maintain existing access for developers and the ticket creator,
             # but revoke all access for everyone else
             self.acl = _allow_all(role_developer, security.all_allowed(self, role_developer)) \
-                     + _allow_all(role_creator, security.all_allowed(self, role_creator)) \
-                     + [DENY_ALL]
+                + _allow_all(role_creator, security.all_allowed(self, role_creator)) \
+                + [DENY_ALL]
         else:
             self.acl = []
     private = property(_get_private, _set_private)
@@ -780,62 +811,67 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
         VersionedArtifact.commit(self)
         monitoring_email = self.app.config.options.get('TicketMonitoringEmail')
         if self.version > 1:
-            hist = TicketHistory.query.get(artifact_id=self._id, version=self.version-1)
+            hist = TicketHistory.query.get(
+                artifact_id=self._id, version=self.version - 1)
             old = hist.data
             changes = ['Ticket %s has been modified: %s' % (
-                    self.ticket_num, self.summary),
-                       'Edited By: %s (%s)' % (c.user.get_pref('display_name'), c.user.username)]
+                self.ticket_num, self.summary),
+                'Edited By: %s (%s)' % (c.user.get_pref('display_name'), c.user.username)]
             fields = [
                 ('Summary', old.summary, self.summary),
-                ('Status', old.status, self.status) ]
+                ('Status', old.status, self.status)]
             if old.status != self.status and self.status in c.app.globals.set_of_closed_status_names:
                 h.log_action(log, 'closed').info('')
-                g.statsUpdater.ticketEvent("closed", self, self.project, self.assigned_to)
+                g.statsUpdater.ticketEvent(
+                    "closed", self, self.project, self.assigned_to)
             for key in self.custom_fields:
-                fields.append((key, old.custom_fields.get(key, ''), self.custom_fields[key]))
+                fields.append(
+                    (key, old.custom_fields.get(key, ''), self.custom_fields[key]))
             for title, o, n in fields:
                 if o != n:
                     changes.append('%s updated: %r => %r' % (
-                            title, o, n))
+                        title, o, n))
             o = hist.assigned_to
             n = self.assigned_to
             if o != n:
                 changes.append('Owner updated: %r => %r' % (
-                        o and o.username, n and n.username))
+                    o and o.username, n and n.username))
                 self.subscribe(user=n)
                 g.statsUpdater.ticketEvent("assigned", self, self.project, n)
                 if o:
-                    g.statsUpdater.ticketEvent("revoked", self, self.project, o)
+                    g.statsUpdater.ticketEvent(
+                        "revoked", self, self.project, o)
             if old.description != self.description:
                 changes.append('Description updated:')
                 changes.append('\n'.join(
-                        difflib.unified_diff(
-                            a=old.description.split('\n'),
-                            b=self.description.split('\n'),
-                            fromfile='description-old',
-                            tofile='description-new')))
+                    difflib.unified_diff(
+                        a=old.description.split('\n'),
+                        b=self.description.split('\n'),
+                        fromfile='description-old',
+                        tofile='description-new')))
             description = '\n'.join(changes)
         else:
             self.subscribe()
             if self.assigned_to_id:
                 user = User.query.get(_id=self.assigned_to_id)
-                g.statsUpdater.ticketEvent("assigned", self, self.project, user)
+                g.statsUpdater.ticketEvent(
+                    "assigned", self, self.project, user)
                 self.subscribe(user=user)
             description = ''
             subject = self.email_subject
             Thread.new(discussion_id=self.app_config.discussion_id,
-                   ref_id=self.index_id())
+                       ref_id=self.index_id())
             # First ticket notification. Use persistend Message-ID (self.message_id()).
             # Thus we can group notification emails in one thread later.
             n = Notification.post(
-                    message_id=self.message_id(),
-                    artifact=self,
-                    topic='metadata',
-                    text=description,
-                    subject=subject)
+                message_id=self.message_id(),
+                artifact=self,
+                topic='metadata',
+                text=description,
+                subject=subject)
             if monitoring_email and n and (not self.private or
-                    self.app.config.options.get('TicketMonitoringType') in (
-                        'NewTicketsOnly', 'AllTicketChanges')):
+                                           self.app.config.options.get('TicketMonitoringType') in (
+                                               'NewTicketsOnly', 'AllTicketChanges')):
                 n.send_simple(monitoring_email)
         Feed.post(
             self,
@@ -852,7 +888,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
 
     def assigned_to_name(self):
         who = self.assigned_to
-        if who in (None, User.anonymous()): return 'nobody'
+        if who in (None, User.anonymous()):
+            return 'nobody'
         return who.get_pref('display_name')
 
     def update(self, ticket_form):
@@ -866,7 +903,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
              other_custom_fields).add(cf['name'])
             if cf['type'] == 'boolean' and 'custom_fields.' + cf['name'] not in ticket_form:
                 self.custom_fields[cf['name']] = 'False'
-        # this has to happen because the milestone custom field has special layout treatment
+        # this has to happen because the milestone custom field has special
+        # layout treatment
         if '_milestone' in ticket_form:
             other_custom_fields.add('_milestone')
             milestone = ticket_form.pop('_milestone', None)
@@ -885,7 +923,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             else:
                 setattr(self, k, v)
         if 'custom_fields' in ticket_form:
-            for k,v in ticket_form['custom_fields'].iteritems():
+            for k, v in ticket_form['custom_fields'].iteritems():
                 if k in custom_users:
                     # restrict custom user field values to project members
                     user = self.app_config.project.user_in_project(v)
@@ -903,7 +941,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             attach.app_config_id = app_config._id
             if attach.attachment_type == 'DiscussionAttachment':
                 attach.discussion_id = app_config.discussion_id
-            attach_thumb = BaseAttachment.query.get(filename=attach.filename, **attach_metadata)
+            attach_thumb = BaseAttachment.query.get(
+                filename=attach.filename, **attach_metadata)
             if attach_thumb:
                 if attach_thumb.attachment_type == 'DiscussionAttachment':
                     attach_thumb.discussion_id = app_config.discussion_id
@@ -933,18 +972,21 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
         messages = []
         for cf in skipped_fields:
             name = cf[0]
-            messages.append('- **%s**: %s' % (name, self.custom_fields.get(name, '')))
+            messages.append('- **%s**: %s' %
+                            (name, self.custom_fields.get(name, '')))
         for cf in user_fields:
             name = cf[0]
             username = self.custom_fields.get(name, None)
             user = app_config.project.user_in_project(username)
             if not user or user == User.anonymous():
-                messages.append('- **%s**: %s (user not in project)' % (name, username))
+                messages.append('- **%s**: %s (user not in project)' %
+                                (name, username))
                 self.custom_fields[name] = ''
         # special case: not custom user field (assigned_to_id)
         user = self.assigned_to
         if user and not app_config.project.user_in_project(user.username):
-            messages.append('- **assigned_to**: %s (user not in project)' % user.username)
+            messages.append('- **assigned_to**: %s (user not in project)' %
+                            user.username)
             self.assigned_to_id = None
 
         custom_fields = {}
@@ -965,18 +1007,22 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             new_url = app_config.url() + str(self.ticket_num) + '/'
             try:
                 session(self).flush(self)
-                h.log_action(log, 'moved').info('Ticket %s moved to %s' % (prior_url, new_url))
+                h.log_action(log, 'moved').info('Ticket %s moved to %s' %
+                                                (prior_url, new_url))
                 break
             except OperationFailure, err:
                 if 'duplicate' in err.args[0]:
-                    log.warning('Try to create duplicate ticket %s when moving from %s' % (new_url, prior_url))
+                    log.warning(
+                        'Try to create duplicate ticket %s when moving from %s' %
+                        (new_url, prior_url))
                     session(self).expunge(self)
                     continue
 
         attach_metadata['type'] = 'thumbnail'
         self._move_attach(attachments, attach_metadata, app_config)
 
-        # move ticket's discussion thread, thus all new commnets will go to a new ticket's feed
+        # move ticket's discussion thread, thus all new commnets will go to a
+        # new ticket's feed
         self.discussion_thread.app_config_id = app_config._id
         self.discussion_thread.discussion_id = app_config.discussion_id
         for post in self.discussion_thread.posts:
@@ -1015,19 +1061,23 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
                 parents_json.update(parent.__json__(self))
 
         return dict(parents_json,
-            created_date=self.created_date,
-            ticket_num=self.ticket_num,
-            summary=self.summary,
-            description=self.description,
-            reported_by=self.reported_by_username,
-            assigned_to=self.assigned_to_id and self.assigned_to_username or None,
-            reported_by_id=self.reported_by_id and str(self.reported_by_id) or None,
-            assigned_to_id=self.assigned_to_id and str(self.assigned_to_id) or None,
-            status=self.status,
-            private=self.private,
-            attachments=[dict(bytes=attach.length,
-                              url=h.absurl(attach.url())) for attach in self.attachments],
-            custom_fields=dict(self.custom_fields))
+                    created_date=self.created_date,
+                    ticket_num=self.ticket_num,
+                    summary=self.summary,
+                    description=self.description,
+                    reported_by=self.reported_by_username,
+                    assigned_to=self.assigned_to_id and self.assigned_to_username or None,
+                    reported_by_id=self.reported_by_id and str(
+                        self.reported_by_id) or None,
+                    assigned_to_id=self.assigned_to_id and str(
+                        self.assigned_to_id) or None,
+                    status=self.status,
+                    private=self.private,
+                    attachments=[dict(bytes=attach.length,
+                                      url=h.absurl(
+                                          attach.url(
+                                          ))) for attach in self.attachments],
+                    custom_fields=dict(self.custom_fields))
 
     @classmethod
     def paged_query(cls, app_config, user, query, limit=None, page=0, sort=None, deleted=False, **kw):
@@ -1037,7 +1087,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
         See also paged_search which does a solr search
         """
         limit, page, start = g.handle_paging(limit, page, default=25)
-        q = cls.query.find(dict(query, app_config_id=app_config._id, deleted=deleted))
+        q = cls.query.find(
+            dict(query, app_config_id=app_config._id, deleted=deleted))
         q = q.sort('ticket_num', pymongo.DESCENDING)
         if sort:
             field, direction = sort.split()
@@ -1055,7 +1106,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             if security.has_access(t, 'read', user, app_config.project.root_project):
                 tickets.append(t)
             else:
-                count = count -1
+                count = count - 1
 
         return dict(
             tickets=tickets,
@@ -1088,7 +1139,7 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
         count = 0
         tickets = []
         refined_sort = sort if sort else 'ticket_num_i desc'
-        if  'ticket_num_i' not in refined_sort:
+        if 'ticket_num_i' not in refined_sort:
             refined_sort += ',ticket_num_i asc'
         try:
             if q:
@@ -1106,7 +1157,8 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             # ticket_numbers is in sorted order
             ticket_numbers = [match['ticket_num_i'] for match in matches.docs]
             # but query, unfortunately, returns results in arbitrary order
-            query = cls.query.find(dict(app_config_id=app_config._id, ticket_num={'$in':ticket_numbers}))
+            query = cls.query.find(
+                dict(app_config_id=app_config._id, ticket_num={'$in': ticket_numbers}))
             # so stick all the results in a dictionary...
             ticket_for_num = {}
             for t in query:
@@ -1115,12 +1167,13 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
             tickets = []
             for tn in ticket_numbers:
                 if tn in ticket_for_num:
-                    show_deleted = show_deleted and security.has_access(ticket_for_num[tn], 'delete', user, app_config.project.root_project)
+                    show_deleted = show_deleted and security.has_access(
+                        ticket_for_num[tn], 'delete', user, app_config.project.root_project)
                     if (security.has_access(ticket_for_num[tn], 'read', user, app_config.project.root_project) and
-                        (show_deleted or ticket_for_num[tn].deleted==False)):
+                            (show_deleted or ticket_for_num[tn].deleted == False)):
                         tickets.append(ticket_for_num[tn])
                     else:
-                        count = count -1
+                        count = count - 1
         return dict(tickets=tickets,
                     count=count, q=q, limit=limit, page=page, sort=sort,
                     solr_error=solr_error, **kw)
@@ -1135,19 +1188,21 @@ class Ticket(VersionedArtifact, ActivityObject, VotableArtifact):
                     self.app.config.options.mount_point)))
         return super(Ticket, self).get_mail_footer(notification, toaddr)
 
+
 class TicketAttachment(BaseAttachment):
     thumbnail_size = (100, 100)
-    ArtifactType=Ticket
+    ArtifactType = Ticket
+
     class __mongometa__:
-        polymorphic_identity='TicketAttachment'
-    attachment_type=FieldProperty(str, if_missing='TicketAttachment')
+        polymorphic_identity = 'TicketAttachment'
+    attachment_type = FieldProperty(str, if_missing='TicketAttachment')
 
 
 class MovedTicket(MovedArtifact):
 
     class __mongometa__:
         session = artifact_orm_session
-        name='moved_ticket'
+        name = 'moved_ticket'
         indexes = [
             ('app_config_id', 'ticket_num'),
         ]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/plugins.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/plugins.py b/ForgeTracker/forgetracker/plugins.py
index 3afbca2..c610de8 100644
--- a/ForgeTracker/forgetracker/plugins.py
+++ b/ForgeTracker/forgetracker/plugins.py
@@ -21,4 +21,3 @@ from tg import config
 from pylons import app_globals as g
 
 log = logging.getLogger(__name__)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/scripts/import_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/scripts/import_tracker.py b/ForgeTracker/forgetracker/scripts/import_tracker.py
index c60a775..ba8e93f 100644
--- a/ForgeTracker/forgetracker/scripts/import_tracker.py
+++ b/ForgeTracker/forgetracker/scripts/import_tracker.py
@@ -25,8 +25,9 @@ from allura.lib.import_api import AlluraImportApiClient
 
 log = logging.getLogger(__name__)
 
+
 def import_tracker(cli, project, tool, import_options, doc_txt,
-        validate=True, verbose=False, cont=False):
+                   validate=True, verbose=False, cont=False):
     url = '/rest/p/' + project + '/' + tool
     if validate:
         url += '/validate_import'
@@ -35,7 +36,8 @@ def import_tracker(cli, project, tool, import_options, doc_txt,
 
     existing_map = {}
     if cont:
-        existing_tickets = cli.call('/rest/p/' + project + '/' + tool + '/')['tickets']
+        existing_tickets = cli.call(
+            '/rest/p/' + project + '/' + tool + '/')['tickets']
         for t in existing_tickets:
             existing_map[t['ticket_num']] = t['summary']
 
@@ -55,11 +57,12 @@ def import_tracker(cli, project, tool, import_options, doc_txt,
             if verbose:
                 print 'Ticket id %d already exists, skipping' % ticket_in['id']
             continue
-        doc_import={}
+        doc_import = {}
         doc_import['trackers'] = {}
         doc_import['trackers']['default'] = {}
         doc_import['trackers']['default']['artifacts'] = [ticket_in]
-        res = cli.call(url, doc=json.dumps(doc_import), options=json.dumps(import_options))
+        res = cli.call(url, doc=json.dumps(doc_import),
+                       options=json.dumps(import_options))
         assert res['status'] and not res['errors'], res['errors']
         if validate:
             if res['warnings']:
@@ -67,7 +70,9 @@ def import_tracker(cli, project, tool, import_options, doc_txt,
         else:
             print "Imported ticket id %s" % (ticket_in['id'])
 
+
 class ImportTracker(ScriptTask):
+
     @classmethod
     def execute(cls, options):
         user_map = {}
@@ -92,29 +97,43 @@ class ImportTracker(ScriptTask):
             finally:
                 f.close()
         import_options['user_map'] = user_map
-        cli = AlluraImportApiClient(options.base_url, options.api_key, options.secret_key, options.verbose)
+        cli = AlluraImportApiClient(
+            options.base_url, options.api_key, options.secret_key, options.verbose)
         doc_txt = open(options.file_data).read()
-        import_tracker(cli, options.project, options.tracker, import_options, doc_txt,
-                       validate=options.validate,
-                       verbose=options.verbose,
-                       cont=options.cont)
+        import_tracker(
+            cli, options.project, options.tracker, import_options, doc_txt,
+            validate=options.validate,
+            verbose=options.verbose,
+            cont=options.cont)
 
     @classmethod
     def parser(cls):
-        parser = argparse.ArgumentParser(description='import tickets from json')
+        parser = argparse.ArgumentParser(
+            description='import tickets from json')
         parser.add_argument('--nbhd', action='store', default='', dest='nbhd',
-                help='Restrict update to a particular neighborhood, e.g. /p/.')
-        parser.add_argument('-a', '--api-ticket', action='store', dest='api_key', help='API ticket')
-        parser.add_argument('-s', '--secret-key', action='store', dest='secret_key', help='Secret key')
-        parser.add_argument('-p', '--project', action='store', dest='project', help='Project to import to')
-        parser.add_argument('-t', '--tracker', action='store', dest='tracker', help='Tracker to import to')
-        parser.add_argument('-u', '--base-url', dest='base_url', default='https://sourceforge.net', help='Base Allura URL (https://sourceforge.net)')
-        parser.add_argument('-o', dest='import_opts', default=[], action='store',  help='Specify import option(s)', metavar='opt=val')
-        parser.add_argument('--user-map', dest='user_map_file', help='Map original users to SF.net users', metavar='JSON_FILE')
-        parser.add_argument('--file_data', dest='file_data', help='json file', metavar='JSON_FILE')
-        parser.add_argument('--validate', dest='validate', action='store_true', help='Validate import data')
-        parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose operation')
-        parser.add_argument('-c', '--continue', dest='cont', action='store_true', help='Continue import into existing tracker')
+                            help='Restrict update to a particular neighborhood, e.g. /p/.')
+        parser.add_argument('-a', '--api-ticket',
+                            action='store', dest='api_key', help='API ticket')
+        parser.add_argument('-s', '--secret-key', action='store',
+                            dest='secret_key', help='Secret key')
+        parser.add_argument('-p', '--project', action='store',
+                            dest='project', help='Project to import to')
+        parser.add_argument('-t', '--tracker', action='store',
+                            dest='tracker', help='Tracker to import to')
+        parser.add_argument('-u', '--base-url', dest='base_url',
+                            default='https://sourceforge.net', help='Base Allura URL (https://sourceforge.net)')
+        parser.add_argument('-o', dest='import_opts',
+                            default=[], action='store',  help='Specify import option(s)', metavar='opt=val')
+        parser.add_argument('--user-map', dest='user_map_file',
+                            help='Map original users to SF.net users', metavar='JSON_FILE')
+        parser.add_argument('--file_data', dest='file_data',
+                            help='json file', metavar='JSON_FILE')
+        parser.add_argument('--validate', dest='validate',
+                            action='store_true', help='Validate import data')
+        parser.add_argument('-v', '--verbose', dest='verbose',
+                            action='store_true', help='Verbose operation')
+        parser.add_argument('-c', '--continue', dest='cont',
+                            action='store_true', help='Continue import into existing tracker')
         return parser
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/command/test_fix_discussion.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/command/test_fix_discussion.py b/ForgeTracker/forgetracker/tests/command/test_fix_discussion.py
index d0cc4d3..8560556 100644
--- a/ForgeTracker/forgetracker/tests/command/test_fix_discussion.py
+++ b/ForgeTracker/forgetracker/tests/command/test_fix_discussion.py
@@ -26,7 +26,8 @@ from allura import model as M
 from forgetracker import model as TM
 
 
-test_config = pkg_resources.resource_filename('allura', '../test.ini') + '#main'
+test_config = pkg_resources.resource_filename(
+    'allura', '../test.ini') + '#main'
 
 
 def setUp(self):
@@ -62,13 +63,15 @@ def break_discussion():
     t.discussion_thread.add_post(text='comment 2')
     session(t).flush(t)
 
+
 def test_fix_discussion():
     break_discussion()
 
     tracker = M.AppConfig.query.find({'options.mount_point': 'bugs'}).first()
     t1 = TM.Ticket.query.get(ticket_num=1)
     t2 = TM.Ticket.query.get(ticket_num=2)
-    assert_not_equal(t1.discussion_thread.discussion.app_config_id, tracker._id)
+    assert_not_equal(
+        t1.discussion_thread.discussion.app_config_id, tracker._id)
     assert_not_equal(t2.discussion_thread.discussion_id, tracker.discussion_id)
 
     cmd = fix_discussion.FixDiscussion('fix-discussion')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/functional/test_import.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/functional/test_import.py b/ForgeTracker/forgetracker/tests/functional/test_import.py
index da26ecf..9d47543 100644
--- a/ForgeTracker/forgetracker/tests/functional/test_import.py
+++ b/ForgeTracker/forgetracker/tests/functional/test_import.py
@@ -45,7 +45,7 @@ class TestImportController(TestRestApiBase):
             assert 0, "form error?"
         return resp.follow()
 
-    def set_api_ticket(self, caps={'import': ['Projects','test']}):
+    def set_api_ticket(self, caps={'import': ['Projects', 'test']}):
         api_ticket = M.ApiTicket(user_id=c.user._id, capabilities=caps,
                                  expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
@@ -55,19 +55,19 @@ class TestImportController(TestRestApiBase):
     def test_no_capability(self):
         here_dir = os.path.dirname(__file__)
 
-        self.set_api_ticket({'import2': ['Projects','test']})
+        self.set_api_ticket({'import2': ['Projects', 'test']})
         resp = self.api_post('/rest/p/test/bugs/perform_import',
-            doc=open(here_dir + '/data/sf.json').read(), options='{}')
+                             doc=open(here_dir + '/data/sf.json').read(), options='{}')
         assert resp.status_int == 403
 
         self.set_api_ticket({'import': ['Projects', 'test2']})
         resp = self.api_post('/rest/p/test/bugs/perform_import',
-            doc=open(here_dir + '/data/sf.json').read(), options='{}')
+                             doc=open(here_dir + '/data/sf.json').read(), options='{}')
         assert resp.status_int == 403
 
         self.set_api_ticket({'import': ['Projects', 'test']})
         resp = self.api_post('/rest/p/test/bugs/perform_import',
-            doc=open(here_dir + '/data/sf.json').read(), options='{}')
+                             doc=open(here_dir + '/data/sf.json').read(), options='{}')
         assert resp.status_int == 200
 
     @staticmethod
@@ -79,9 +79,12 @@ class TestImportController(TestRestApiBase):
         assert_equal(from_api['description'], org['description'])
         assert_equal(from_api['summary'], org['summary'])
         assert_equal(from_api['ticket_num'], org['id'])
-        assert_equal(from_api['created_date'], self.time_normalize(org['date']))
-        assert_equal(from_api['mod_date'], self.time_normalize(org['date_updated']))
-        assert_equal(from_api['custom_fields']['_resolution'], org['resolution'])
+        assert_equal(from_api['created_date'],
+                     self.time_normalize(org['date']))
+        assert_equal(from_api['mod_date'],
+                     self.time_normalize(org['date_updated']))
+        assert_equal(from_api['custom_fields']
+                     ['_resolution'], org['resolution'])
         assert_false('_cc' in from_api['custom_fields'])
         assert_equal(from_api['custom_fields']['_private'], org['private'])
 
@@ -90,7 +93,7 @@ class TestImportController(TestRestApiBase):
         here_dir = os.path.dirname(__file__)
         doc_text = open(here_dir + '/data/sf.json').read()
         r = self.api_post('/rest/p/test/bugs/validate_import',
-            doc=doc_text, options='{}')
+                          doc=doc_text, options='{}')
         assert not r.json['errors']
 
     @td.with_tracker
@@ -99,16 +102,17 @@ class TestImportController(TestRestApiBase):
             custom_fields=[
                 dict(name='_resolution', label='Resolution', type='select',
                      options='oné "one and á half" two'),
-               ],
+            ],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
         here_dir = os.path.dirname(__file__)
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects','test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -116,7 +120,7 @@ class TestImportController(TestRestApiBase):
         doc_json = json.loads(doc_text)
         ticket_json = doc_json['trackers']['default']['artifacts'][0]
         r = self.api_post('/rest/p/test/bugs/perform_import',
-            doc=doc_text, options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
+                          doc=doc_text, options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
         assert r.json['status'], r.json
 
         ming.orm.ThreadLocalORMSession.flush_all()
@@ -130,8 +134,9 @@ class TestImportController(TestRestApiBase):
     @td.with_tracker
     def test_import(self):
         here_dir = os.path.dirname(__file__)
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects','test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
@@ -139,7 +144,7 @@ class TestImportController(TestRestApiBase):
         doc_json = json.loads(doc_text)
         ticket_json = doc_json['trackers']['default']['artifacts'][0]
         r = self.api_post('/rest/p/test/bugs/perform_import',
-            doc=doc_text, options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
+                          doc=doc_text, options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
         assert r.json['status']
         assert r.json['errors'] == []
 
@@ -147,7 +152,8 @@ class TestImportController(TestRestApiBase):
         M.MonQTask.run_ready()
         ming.orm.ThreadLocalORMSession.flush_all()
 
-        indexed_tickets = filter(lambda a: a['type_s'] == 'Ticket', g.solr.db.values())
+        indexed_tickets = filter(
+            lambda a: a['type_s'] == 'Ticket', g.solr.db.values())
         assert_equal(len(indexed_tickets), 1)
         assert_equal(indexed_tickets[0]['summary_t'], ticket_json['summary'])
         assert_equal(indexed_tickets[0]['ticket_num_i'], ticket_json['id'])
@@ -176,14 +182,15 @@ class TestImportController(TestRestApiBase):
 
         """
         here_dir = os.path.dirname(__file__)
-        api_ticket = M.ApiTicket(user_id=c.user._id, capabilities={'import': ['Projects','test']},
-                                 expires=datetime.utcnow() + timedelta(days=1))
+        api_ticket = M.ApiTicket(
+            user_id=c.user._id, capabilities={'import': ['Projects', 'test']},
+            expires=datetime.utcnow() + timedelta(days=1))
         ming.orm.session(api_ticket).flush()
         self.set_api_token(api_ticket)
 
         doc_text = open(here_dir + '/data/milestone-tickets.json').read()
         r = self.api_post('/rest/p/test/bugs/perform_import', doc=doc_text,
-            options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
+                          options='{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
         assert r.json['status'], r.json
 
         ming.orm.ThreadLocalORMSession.flush_all()
@@ -192,7 +199,8 @@ class TestImportController(TestRestApiBase):
 
         with h.push_context('test', mount_point='bugs', neighborhood='Projects'):
             for milestone_fld in c.app.globals.milestone_fields:
-                milestone_names = [ms['name'] for ms in milestone_fld['milestones']]
+                milestone_names = [ms['name']
+                                   for ms in milestone_fld['milestones']]
                 assert 'open_milestone' in milestone_names, milestone_names
                 assert 'closed_milestone' in milestone_names, milestone_names
                 for milestone in milestone_fld['milestones']:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/functional/test_rest.py b/ForgeTracker/forgetracker/tests/functional/test_rest.py
index aad1401..b7f4101 100644
--- a/ForgeTracker/forgetracker/tests/functional/test_rest.py
+++ b/ForgeTracker/forgetracker/tests/functional/test_rest.py
@@ -27,6 +27,7 @@ from alluratest.controller import TestRestApiBase
 
 from forgetracker import model as TM
 
+
 class TestTrackerApiBase(TestRestApiBase):
 
     def setUp(self):
@@ -34,8 +35,8 @@ class TestTrackerApiBase(TestRestApiBase):
         self.setup_with_tools()
 
     @td.with_tool('test', 'Tickets', 'bugs',
-            TicketMonitoringEmail='test@localhost',
-            TicketMonitoringType='AllTicketChanges')
+                  TicketMonitoringEmail='test@localhost',
+                  TicketMonitoringType='AllTicketChanges')
     def setup_with_tools(self):
         h.set_context('test', 'bugs', neighborhood='Projects')
         self.tracker_globals = c.app.globals
@@ -50,8 +51,8 @@ class TestTrackerApiBase(TestRestApiBase):
                 labels='',
                 description='',
                 assigned_to='',
-                **{'custom_fields._milestone':''})
-            )
+                **{'custom_fields._milestone': ''})
+        )
 
 
 class TestRestNewTicket(TestTrackerApiBase):
@@ -67,7 +68,7 @@ class TestRestNewTicket(TestTrackerApiBase):
                 labels='foo,bar',
                 description='descr',
                 assigned_to='',
-                **{'custom_fields._milestone':''}
+                **{'custom_fields._milestone': ''}
             ))
         json = ticket_view.json['ticket']
         assert json['status'] == 'open', json
@@ -80,6 +81,7 @@ class TestRestNewTicket(TestTrackerApiBase):
     def test_invalid_ticket(self):
         self.app.get('/rest/p/test/bugs/2', status=404)
 
+
 class TestRestUpdateTicket(TestTrackerApiBase):
 
     def setUp(self):
@@ -91,10 +93,11 @@ class TestRestUpdateTicket(TestTrackerApiBase):
         args = dict(self.ticket_args, summary='test update ticket', labels='',
                     assigned_to=self.ticket_args['assigned_to_id'] or '')
         for bad_key in ('ticket_num', 'assigned_to_id', 'created_date',
-                'reported_by', 'reported_by_id', '_id', 'votes_up', 'votes_down'):
+                        'reported_by', 'reported_by_id', '_id', 'votes_up', 'votes_down'):
             del args[bad_key]
         args['private'] = str(args['private'])
-        ticket_view = self.api_post('/rest/p/test/bugs/1/save', wrap_args='ticket_form', params=h.encode_keys(args))
+        ticket_view = self.api_post(
+            '/rest/p/test/bugs/1/save', wrap_args='ticket_form', params=h.encode_keys(args))
         assert ticket_view.status_int == 200, ticket_view.showbrowser()
         json = ticket_view.json['ticket']
         assert int(json['ticket_num']) == 1
@@ -112,13 +115,18 @@ class TestRestIndex(TestTrackerApiBase):
         assert len(tickets.json['tickets']) == 1, tickets.json
         assert (tickets.json['tickets'][0]
                 == dict(ticket_num=1, summary='test new ticket')), tickets.json['tickets'][0]
-        assert tickets.json['tracker_config']['options']['mount_point'] == 'bugs'
-        assert tickets.json['tracker_config']['options']['TicketMonitoringType'] == 'AllTicketChanges'
+        assert tickets.json['tracker_config'][
+            'options']['mount_point'] == 'bugs'
+        assert tickets.json['tracker_config']['options'][
+            'TicketMonitoringType'] == 'AllTicketChanges'
         assert not tickets.json['tracker_config']['options']['EnableVoting']
-        assert tickets.json['tracker_config']['options']['TicketMonitoringEmail'] == 'test@localhost'
-        assert tickets.json['tracker_config']['options']['mount_label'] == 'Tickets'
+        assert tickets.json['tracker_config']['options'][
+            'TicketMonitoringEmail'] == 'test@localhost'
+        assert tickets.json['tracker_config'][
+            'options']['mount_label'] == 'Tickets'
         assert tickets.json['saved_bins'][0]['sort'] == 'mod_date_dt desc'
-        assert tickets.json['saved_bins'][0]['terms'] == '!status:wont-fix && !status:closed'
+        assert tickets.json['saved_bins'][0][
+            'terms'] == '!status:wont-fix && !status:closed'
         assert tickets.json['saved_bins'][0]['summary'] == 'Changes'
         assert len(tickets.json['saved_bins'][0]) == 4
         assert tickets.json['milestones'][0]['name'] == '1.0'
@@ -126,10 +134,13 @@ class TestRestIndex(TestTrackerApiBase):
 
     def test_ticket_index_noauth(self):
         tickets = self.api_get('/rest/p/test/bugs', user='*anonymous')
-        assert 'TicketMonitoringEmail' not in tickets.json['tracker_config']['options']
+        assert 'TicketMonitoringEmail' not in tickets.json[
+            'tracker_config']['options']
         # make sure it didn't get removed from the db too
-        ticket_config = M.AppConfig.query.get(project_id=c.project._id, tool_name='tickets')
-        assert_equal(ticket_config.options.get('TicketMonitoringEmail'), 'test@localhost')
+        ticket_config = M.AppConfig.query.get(
+            project_id=c.project._id, tool_name='tickets')
+        assert_equal(ticket_config.options.get('TicketMonitoringEmail'),
+                     'test@localhost')
 
     @td.with_tool('test', 'Tickets', 'dummy')
     def test_move_ticket_redirect(self):
@@ -154,23 +165,30 @@ class TestRestDiscussion(TestTrackerApiBase):
         r = self.api_get('/rest/p/test/bugs/_discuss/')
         assert len(r.json['discussion']['threads']) == 1, r.json
         for t in r.json['discussion']['threads']:
-            r = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' % t['_id'])
+            r = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' %
+                             t['_id'])
             assert len(r.json['thread']['posts']) == 0, r.json
 
     def test_post(self):
-        discussion = self.api_get('/rest/p/test/bugs/_discuss/').json['discussion']
-        post = self.api_post('/rest/p/test/bugs/_discuss/thread/%s/new' % discussion['threads'][0]['_id'],
-                             text='This is a comment', wrap_args=None)
-        thread = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' % discussion['threads'][0]['_id'])
+        discussion = self.api_get(
+            '/rest/p/test/bugs/_discuss/').json['discussion']
+        post = self.api_post(
+            '/rest/p/test/bugs/_discuss/thread/%s/new' % discussion['threads'][0]['_id'],
+            text='This is a comment', wrap_args=None)
+        thread = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' %
+                              discussion['threads'][0]['_id'])
         assert len(thread.json['thread']['posts']) == 1, thread.json
         assert post.json['post']['text'] == 'This is a comment', post.json
         reply = self.api_post(
-            '/rest/p/test/bugs/_discuss/thread/%s/%s/reply' % (thread.json['thread']['_id'], post.json['post']['slug']),
+            '/rest/p/test/bugs/_discuss/thread/%s/%s/reply' % (thread.json['thread']
+                                                               ['_id'], post.json['post']['slug']),
             text='This is a reply', wrap_args=None)
         assert reply.json['post']['text'] == 'This is a reply', reply.json
-        thread = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' % discussion['threads'][0]['_id'])
+        thread = self.api_get('/rest/p/test/bugs/_discuss/thread/%s/' %
+                              discussion['threads'][0]['_id'])
         assert len(thread.json['thread']['posts']) == 2, thread.json
 
+
 class TestRestSearch(TestTrackerApiBase):
 
     @patch('forgetracker.model.Ticket.paged_search')
@@ -180,7 +198,7 @@ class TestRestSearch(TestTrackerApiBase):
         ])
         r = self.api_get('/rest/p/test/bugs/search')
         assert_equal(r.status_int, 200)
-        assert_equal(r.json, {'tickets':[
+        assert_equal(r.json, {'tickets': [
             {'summary': 'our test ticket', 'ticket_num': 5},
         ]})
 
@@ -188,18 +206,20 @@ class TestRestSearch(TestTrackerApiBase):
     def test_some_criteria(self, paged_search):
         q = 'labels:testing && status:open'
         paged_search.return_value = dict(tickets=[
-                TM.Ticket(ticket_num=5, summary='our test ticket'),
-            ],
+            TM.Ticket(ticket_num=5, summary='our test ticket'),
+        ],
             sort='status',
             limit=2,
             count=1,
             page=0,
             q=q,
         )
-        r = self.api_get('/rest/p/test/bugs/search', q=q, sort='status', limit='2')
+        r = self.api_get('/rest/p/test/bugs/search',
+                         q=q, sort='status', limit='2')
         assert_equal(r.status_int, 200)
-        assert_equal(r.json, {'limit': 2, 'q': q, 'sort':'status', 'count': 1,
-                               'page': 0, 'tickets':[
-                {'summary': 'our test ticket', 'ticket_num': 5},
-            ]
-        })
+        assert_equal(r.json, {'limit': 2, 'q': q, 'sort': 'status', 'count': 1,
+                              'page': 0, 'tickets': [
+                                  {'summary': 'our test ticket',
+                                   'ticket_num': 5},
+                              ]
+                              })


[29/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/project.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/project.py b/Allura/allura/controllers/project.py
index b0e5b8b..cdbba4c 100644
--- a/Allura/allura/controllers/project.py
+++ b/Allura/allura/controllers/project.py
@@ -54,16 +54,20 @@ from .static import NewForgeController
 
 log = logging.getLogger(__name__)
 
+
 class W:
     resize_editor = ffw.AutoResizeTextarea()
     project_summary = plw.ProjectSummary()
-    add_project = plugin.ProjectRegistrationProvider.get().add_project_widget(antispam=True)
+    add_project = plugin.ProjectRegistrationProvider.get().add_project_widget(
+        antispam=True)
     page_list = ffw.PageList()
     page_size = ffw.PageSize()
     project_select = ffw.NeighborhoodProjectSelect
     neighborhood_overview_form = ff.NeighborhoodOverviewForm()
 
+
 class NeighborhoodController(object):
+
     '''Manages a neighborhood of projects.
     '''
 
@@ -71,7 +75,8 @@ class NeighborhoodController(object):
         self.neighborhood = neighborhood
         self.neighborhood_name = self.neighborhood.name
         self.prefix = self.neighborhood.shortname_prefix
-        self.browse = NeighborhoodProjectBrowseController(neighborhood=self.neighborhood)
+        self.browse = NeighborhoodProjectBrowseController(
+            neighborhood=self.neighborhood)
         self._admin = NeighborhoodAdminController(self.neighborhood)
         self._moderate = NeighborhoodModerateController(self.neighborhood)
         self.import_project = ProjectImporterController(self.neighborhood)
@@ -84,17 +89,20 @@ class NeighborhoodController(object):
         pname = unquote(pname)
         provider = plugin.ProjectRegistrationProvider.get()
         try:
-            provider.shortname_validator.to_python(pname, check_allowed=False, neighborhood=self.neighborhood)
+            provider.shortname_validator.to_python(
+                pname, check_allowed=False, neighborhood=self.neighborhood)
         except Invalid:
             project = None
         else:
-            project = M.Project.query.get(shortname=self.prefix + pname, neighborhood_id=self.neighborhood._id)
+            project = M.Project.query.get(
+                shortname=self.prefix + pname, neighborhood_id=self.neighborhood._id)
         if project is None and self.prefix == 'u/':
             # create user-project if it is missing
             user = M.User.query.get(username=pname, disabled=False)
             if user:
                 project = self.neighborhood.register_project(
-                    plugin.AuthenticationProvider.get(request).user_project_shortname(user),
+                    plugin.AuthenticationProvider.get(
+                        request).user_project_shortname(user),
                     user=user, user_project=True)
         if project is None:
             # look for neighborhood tools matching the URL
@@ -141,10 +149,10 @@ class NeighborhoodController(object):
         c.page_list = W.page_list
         limit, page, start = g.handle_paging(limit, page)
         pq = M.Project.query.find(dict(
-                neighborhood_id=self.neighborhood._id,
-                deleted=False,
-                is_nbhd_project=False,
-                ))
+            neighborhood_id=self.neighborhood._id,
+            deleted=False,
+            is_nbhd_project=False,
+        ))
         if sort == 'alpha':
             pq.sort('name')
         else:
@@ -152,11 +160,13 @@ class NeighborhoodController(object):
         count = pq.count()
         nb_max_projects = self.neighborhood.get_max_projects()
         projects = pq.skip(start).limit(int(limit)).all()
-        categories = M.ProjectCategory.query.find({'parent_id':None}).sort('name').all()
+        categories = M.ProjectCategory.query.find(
+            {'parent_id': None}).sort('name').all()
         c.custom_sidebar_menu = []
         if h.has_access(self.neighborhood, 'register')() and (nb_max_projects is None or count < nb_max_projects):
             c.custom_sidebar_menu += [
-                SitemapEntry('Add a Project', self.neighborhood.url() + 'add_project', ui_icon=g.icons['plus']),
+                SitemapEntry('Add a Project', self.neighborhood.url()
+                             + 'add_project', ui_icon=g.icons['plus']),
                 SitemapEntry('')
             ]
         c.custom_sidebar_menu = c.custom_sidebar_menu + [
@@ -164,7 +174,8 @@ class NeighborhoodController(object):
         ]
         return dict(neighborhood=self.neighborhood,
                     title="Welcome to " + self.neighborhood.name,
-                    text=g.markdown.cached_convert(self.neighborhood, 'homepage'),
+                    text=g.markdown.cached_convert(
+                        self.neighborhood, 'homepage'),
                     projects=projects,
                     sort=sort,
                     limit=limit, page=page, count=count)
@@ -175,7 +186,8 @@ class NeighborhoodController(object):
         c.project = self.neighborhood.neighborhood_project
         require_access(self.neighborhood, 'register')
         c.add_project = W.add_project
-        form_data.setdefault('tools', [u'Wiki',u'Git',u'Tickets',u'Discussion'])
+        form_data.setdefault(
+            'tools', [u'Wiki', u'Git', u'Tickets', u'Discussion'])
         form_data['neighborhood'] = self.neighborhood.name
         return dict(neighborhood=self.neighborhood, form_data=form_data)
 
@@ -183,7 +195,7 @@ class NeighborhoodController(object):
     def suggest_name(self, project_name=''):
         provider = plugin.ProjectRegistrationProvider.get()
         return dict(suggested_name=provider.suggest_name(project_name,
-            self.neighborhood))
+                                                         self.neighborhood))
 
     @expose('json:')
     @validate(W.add_project)
@@ -195,27 +207,33 @@ class NeighborhoodController(object):
     @validate(W.add_project, error_handler=add_project)
     @utils.AntiSpam.validate('Spambot protection engaged')
     @require_post()
-    def register(self, project_unixname=None, project_description=None, project_name=None, neighborhood=None,
-                 private_project=None, tools=None, **kw):
+    def register(
+            self, project_unixname=None, project_description=None, project_name=None, neighborhood=None,
+            private_project=None, tools=None, **kw):
         require_access(self.neighborhood, 'register')
         if private_project:
             require_access(self.neighborhood, 'admin')
         neighborhood = M.Neighborhood.query.get(name=neighborhood)
 
-        project_description = h.really_unicode(project_description or '').encode('utf-8')
+        project_description = h.really_unicode(
+            project_description or '').encode('utf-8')
         project_name = h.really_unicode(project_name or '').encode('utf-8')
-        project_unixname = h.really_unicode(project_unixname or '').encode('utf-8').lower()
+        project_unixname = h.really_unicode(
+            project_unixname or '').encode('utf-8').lower()
         try:
             c.project = neighborhood.register_project(project_unixname,
-                    project_name=project_name, private_project=private_project)
+                                                      project_name=project_name, private_project=private_project)
         except exceptions.ProjectOverlimitError:
-            flash("You have exceeded the maximum number of projects you are allowed to create", 'error')
+            flash(
+                "You have exceeded the maximum number of projects you are allowed to create", 'error')
             redirect('add_project')
         except exceptions.ProjectRatelimitError:
-            flash("Project creation rate limit exceeded.  Please try again later.", 'error')
+            flash(
+                "Project creation rate limit exceeded.  Please try again later.", 'error')
             redirect('add_project')
         except Exception as e:
-            log.error('error registering project: %s', project_unixname, exc_info=True)
+            log.error('error registering project: %s',
+                      project_unixname, exc_info=True)
             flash('Internal Error. Please try again later.', 'error')
             redirect('add_project')
 
@@ -250,11 +268,13 @@ class NeighborhoodController(object):
 
 
 class NeighborhoodProjectBrowseController(ProjectBrowseController):
+
     def __init__(self, neighborhood=None, category_name=None, parent_category=None):
         self.neighborhood = neighborhood
-        super(NeighborhoodProjectBrowseController, self).__init__(category_name=category_name, parent_category=parent_category)
+        super(NeighborhoodProjectBrowseController, self).__init__(
+            category_name=category_name, parent_category=parent_category)
         self.nav_stub = '%sbrowse/' % self.neighborhood.url()
-        self.additional_filters = {'neighborhood_id':self.neighborhood._id}
+        self.additional_filters = {'neighborhood_id': self.neighborhood._id}
 
     @expose()
     def _lookup(self, category_name, *remainder):
@@ -268,7 +288,8 @@ class NeighborhoodProjectBrowseController(ProjectBrowseController):
         c.project_summary = W.project_summary
         c.page_list = W.page_list
         limit, page, start = g.handle_paging(limit, page)
-        projects, count = self._find_projects(sort=sort, limit=limit, start=start)
+        projects, count = self._find_projects(
+            sort=sort, limit=limit, start=start)
         title = self._build_title()
         c.custom_sidebar_menu = self._build_nav()
         return dict(projects=projects,
@@ -278,7 +299,9 @@ class NeighborhoodProjectBrowseController(ProjectBrowseController):
                     sort=sort,
                     limit=limit, page=page, count=count)
 
+
 class HostNeighborhoodController(WsgiDispatchController, NeighborhoodController):
+
     '''Neighborhood controller with support for use as a root controller, for
     instance, when using adobe.sourceforge.net (if this is allowed).
     '''
@@ -288,16 +311,19 @@ class HostNeighborhoodController(WsgiDispatchController, NeighborhoodController)
     nf = NewForgeController()
     search = SearchController()
 
+
 class ToolListController(object):
+
     """Renders a list of all tools of a given type in the current project."""
 
     @expose('jinja:allura:templates/tool_list.html')
     def _default(self, tool_name, *args, **kw):
         tool_name = tool_name.lower()
         entries = [e for e in c.project.sitemap()
-                if e.tool_name and e.tool_name.lower() == tool_name]
+                   if e.tool_name and e.tool_name.lower() == tool_name]
         return dict(entries=entries, type=g.entry_points['tool'][tool_name].tool_label if entries else None)
 
+
 class ProjectController(FeedController):
 
     def __init__(self):
@@ -309,18 +335,20 @@ class ProjectController(FeedController):
     def _nav(self):
         menu = []
         for s in c.project.grouped_navbar_entries():
-            entry = dict(name=s.label, url=s.url, icon=s.ui_icon, tool_name=s.tool_name)
+            entry = dict(name=s.label, url=s.url,
+                         icon=s.ui_icon, tool_name=s.tool_name)
             if s.children:
                 entry['children'] = [dict(name=child.label, url=child.url, icon=child.ui_icon, tool_name=child.tool_name)
-                                    for child in s.children]
+                                     for child in s.children]
             menu.append(entry)
         return dict(menu=menu)
 
     @expose()
     def _lookup(self, name, *remainder):
         name = unquote(name)
-        subproject = M.Project.query.get(shortname=c.project.shortname + '/' + name,
-                                         neighborhood_id=c.project.neighborhood_id)
+        subproject = M.Project.query.get(
+            shortname=c.project.shortname + '/' + name,
+            neighborhood_id=c.project.neighborhood_id)
         if subproject:
             c.project = subproject
             c.app = None
@@ -341,13 +369,14 @@ class ProjectController(FeedController):
         admins = []
         developers = []
         for user in c.project.users():
-            roles = M.ProjectRole.query.find({'_id': {'$in': M.ProjectRole.by_user(user).roles}})
+            roles = M.ProjectRole.query.find(
+                {'_id': {'$in': M.ProjectRole.by_user(user).roles}})
             roles = set([r.name for r in roles])
             u = dict(
-                    display_name=user.display_name,
-                    username=user.username,
-                    url=user.url(),
-                    roles=', '.join(sorted(roles)))
+                display_name=user.display_name,
+                username=user.username,
+                url=user.url(),
+                roles=', '.join(sorted(roles)))
             if 'Admin' in roles:
                 admins.append(u)
             elif 'Developer' in roles:
@@ -368,7 +397,8 @@ class ProjectController(FeedController):
     def index(self, **kw):
         mount = c.project.first_mount_visible(c.user)
         activity_enabled = config.get('activitystream.enabled', False)
-        activity_enabled = request.cookies.get('activitystream.enabled', activity_enabled)
+        activity_enabled = request.cookies.get(
+            'activitystream.enabled', activity_enabled)
         activity_enabled = asbool(activity_enabled)
         if mount is not None:
             if 'ac' in mount:
@@ -412,10 +442,10 @@ class ProjectController(FeedController):
             g.credentials,
             g.credentials.project_roles(project_id=c.project.root_project._id).named)
         users = M.User.query.find({
-                '_id': {'$in': named_roles.userids_that_reach},
-                'display_name': re.compile(r'(?i)%s' % re.escape(term)),
-                'disabled': False,
-            }).sort('username').limit(10).all()
+            '_id': {'$in': named_roles.userids_that_reach},
+            'display_name': re.compile(r'(?i)%s' % re.escape(term)),
+            'disabled': False,
+        }).sort('username').limit(10).all()
         return dict(
             users=[
                 dict(
@@ -433,6 +463,7 @@ class ProjectController(FeedController):
             } for u in c.project.users()]
         }
 
+
 class ScreenshotsController(object):
 
     @expose()
@@ -443,6 +474,7 @@ class ScreenshotsController(object):
             filename = unquote(request.path.rsplit('/', 1)[-1])
         return ScreenshotController(filename), args
 
+
 class ScreenshotController(object):
 
     def __init__(self, filename):
@@ -462,7 +494,8 @@ class ScreenshotController(object):
             project_id=c.project._id,
             category='screenshot',
             filename=self.filename)
-        if not f: raise exc.HTTPNotFound
+        if not f:
+            raise exc.HTTPNotFound
         return f
 
     @LazyProperty
@@ -471,9 +504,11 @@ class ScreenshotController(object):
             project_id=c.project._id,
             category='screenshot_thumb',
             filename=self.filename)
-        if not f: raise exc.HTTPNotFound
+        if not f:
+            raise exc.HTTPNotFound
         return f
 
+
 def set_nav(neighborhood):
     project = neighborhood.neighborhood_project
     if project:
@@ -485,7 +520,9 @@ def set_nav(neighborhood):
             SitemapEntry('Overview', admin_url + 'overview'),
             SitemapEntry('Awards', admin_url + 'accolades')]
 
+
 class NeighborhoodAdminController(object):
+
     def __init__(self, neighborhood):
         self.neighborhood = neighborhood
         self.awards = NeighborhoodAwardsController(self.neighborhood)
@@ -532,11 +569,14 @@ class NeighborhoodAdminController(object):
     @expose('jinja:allura:templates/neighborhood_admin_accolades.html')
     def accolades(self):
         set_nav(self.neighborhood)
-        awards = M.Award.query.find(dict(created_by_neighborhood_id=self.neighborhood._id)).all()
+        awards = M.Award.query.find(
+            dict(created_by_neighborhood_id=self.neighborhood._id)).all()
         awards_count = len(awards)
-        grants = M.AwardGrant.query.find(dict(granted_by_neighborhood_id=self.neighborhood._id))
+        grants = M.AwardGrant.query.find(
+            dict(granted_by_neighborhood_id=self.neighborhood._id))
         grants_count = grants.count()
-        c.project_select = W.project_select(self.neighborhood.url() + '_admin/project_search')
+        c.project_select = W.project_select(
+            self.neighborhood.url() + '_admin/project_search')
         return dict(
             awards=awards,
             awards_count=awards_count,
@@ -551,31 +591,31 @@ class NeighborhoodAdminController(object):
         nbhd = self.neighborhood
         c.project = nbhd.neighborhood_project
         h.log_if_changed(nbhd, 'name', name,
-                        'change neighborhood name to %s' % name)
+                         'change neighborhood name to %s' % name)
         nbhd_redirect = kw.pop('redirect', '')
         h.log_if_changed(nbhd, 'redirect', nbhd_redirect,
-                        'change neighborhood redirect to %s' % nbhd_redirect)
+                         'change neighborhood redirect to %s' % nbhd_redirect)
         h.log_if_changed(nbhd, 'homepage', homepage,
-                        'change neighborhood homepage to %s' % homepage)
+                         'change neighborhood homepage to %s' % homepage)
         h.log_if_changed(nbhd, 'css', css,
-                        'change neighborhood css to %s' % css)
+                         'change neighborhood css to %s' % css)
         h.log_if_changed(nbhd, 'project_template', project_template,
-                        'change neighborhood project template to %s'
-                        % project_template)
+                         'change neighborhood project template to %s'
+                         % project_template)
         allow_browse = kw.get('allow_browse', False)
         h.log_if_changed(nbhd, 'allow_browse', allow_browse,
-                        'change neighborhood allow browse to %s'
-                        % allow_browse)
+                         'change neighborhood allow browse to %s'
+                         % allow_browse)
         show_title = kw.get('show_title', False)
         h.log_if_changed(nbhd, 'show_title', show_title,
-                        'change neighborhood show title to %s' % show_title)
+                         'change neighborhood show title to %s' % show_title)
         project_list_url = kw.get('project_list_url', '')
         h.log_if_changed(nbhd, 'project_list_url', project_list_url,
-                        'change neighborhood project list url to %s'
-                        % project_list_url)
+                         'change neighborhood project list url to %s'
+                         % project_list_url)
         tracking_id = kw.get('tracking_id', '')
         h.log_if_changed(nbhd, 'tracking_id', tracking_id,
-                        'update neighborhood tracking_id')
+                         'update neighborhood tracking_id')
         anchored_tools = kw.get('anchored_tools', '')
         validate_tools = dict()
         result = True
@@ -585,13 +625,14 @@ class NeighborhoodAdminController(object):
                     (tool.split(':')[0].lower(), tool.split(':')[1])
                     for tool in anchored_tools.replace(' ', '').split(','))
             except Exception:
-                flash('Anchored tools "%s" is invalid' % anchored_tools,'error')
+                flash('Anchored tools "%s" is invalid' %
+                      anchored_tools, 'error')
                 result = False
 
-
         for tool in validate_tools.keys():
             if tool not in g.entry_points['tool']:
-                flash('Anchored tools "%s" is invalid' % anchored_tools,'error')
+                flash('Anchored tools "%s" is invalid' %
+                      anchored_tools, 'error')
                 result = False
         if result:
             h.log_if_changed(nbhd, 'anchored_tools', anchored_tools,
@@ -616,6 +657,7 @@ class NeighborhoodAdminController(object):
             neighborhood=self.neighborhood,
         )
 
+
 class NeighborhoodStatsController(object):
 
     def __init__(self, neighborhood):
@@ -624,14 +666,16 @@ class NeighborhoodStatsController(object):
     @with_trailing_slash
     @expose('jinja:allura:templates/neighborhood_stats.html')
     def index(self, **kw):
-        delete_count = M.Project.query.find(dict(neighborhood_id=self.neighborhood._id, deleted=True)).count()
+        delete_count = M.Project.query.find(
+            dict(neighborhood_id=self.neighborhood._id, deleted=True)).count()
         public_count = 0
         private_count = 0
         last_updated_30 = 0
         last_updated_60 = 0
         last_updated_90 = 0
         today_date = datetime.today()
-        if M.Project.query.find(dict(neighborhood_id=self.neighborhood._id, deleted=False)).count() < 20000: # arbitrary limit for efficiency
+        # arbitrary limit for efficiency
+        if M.Project.query.find(dict(neighborhood_id=self.neighborhood._id, deleted=False)).count() < 20000:
             for p in M.Project.query.find(dict(neighborhood_id=self.neighborhood._id, deleted=False)):
                 if p.private:
                     private_count = private_count + 1
@@ -660,7 +704,8 @@ class NeighborhoodStatsController(object):
     def adminlist(self, sort='alpha', limit=25, page=0, **kw):
         limit, page, start = g.handle_paging(limit, page)
 
-        pq = M.Project.query.find(dict(neighborhood_id=self.neighborhood._id, deleted=False))
+        pq = M.Project.query.find(
+            dict(neighborhood_id=self.neighborhood._id, deleted=False))
         if sort == 'alpha':
             pq.sort('name')
         else:
@@ -670,10 +715,12 @@ class NeighborhoodStatsController(object):
 
         entries = []
         for proj in projects:
-            admin_role = M.ProjectRole.query.get(project_id=proj.root_project._id, name='Admin')
+            admin_role = M.ProjectRole.query.get(
+                project_id=proj.root_project._id, name='Admin')
             if admin_role is None:
                 continue
-            user_role_list = M.ProjectRole.query.find(dict(project_id=proj.root_project._id, name=None)).all()
+            user_role_list = M.ProjectRole.query.find(
+                dict(project_id=proj.root_project._id, name=None)).all()
             for ur in user_role_list:
                 if ur.user is not None and admin_role._id in ur.roles:
                     entries.append({'project': proj, 'user': ur.user})
@@ -686,6 +733,7 @@ class NeighborhoodStatsController(object):
                     neighborhood=self.neighborhood,
                     )
 
+
 class NeighborhoodModerateController(object):
 
     def __init__(self, neighborhood):
@@ -697,7 +745,8 @@ class NeighborhoodModerateController(object):
     @expose('jinja:allura:templates/neighborhood_moderate.html')
     def index(self, **kw):
         c.project = self.neighborhood.neighborhood_project
-        other_nbhds = list(M.Neighborhood.query.find(dict(_id={'$ne':self.neighborhood._id})).sort('name'))
+        other_nbhds = list(M.Neighborhood.query.find(
+            dict(_id={'$ne': self.neighborhood._id})).sort('name'))
         return dict(neighborhood=self.neighborhood,
                     neighborhoods=other_nbhds)
 
@@ -729,7 +778,8 @@ class NeighborhoodModerateController(object):
     @expose()
     @require_post()
     def evict(self, pid):
-        p = M.Project.query.get(shortname=pid, neighborhood_id=self.neighborhood._id, deleted=False)
+        p = M.Project.query.get(
+            shortname=pid, neighborhood_id=self.neighborhood._id, deleted=False)
         if p is None:
             flash("Cannot evict  %s; it's not in the neighborhood"
                   % pid, 'error')
@@ -744,6 +794,7 @@ class NeighborhoodModerateController(object):
         flash('%s evicted to Projects' % pid)
         redirect('.')
 
+
 class NeighborhoodAwardsController(object):
 
     def __init__(self, neighborhood=None):
@@ -753,7 +804,8 @@ class NeighborhoodAwardsController(object):
     @expose('jinja:allura:templates/awards.html')
     def index(self, **kw):
         require_access(self.neighborhood, 'admin')
-        awards = M.Award.query.find(dict(created_by_neighborhood_id=self.neighborhood._id)).all()
+        awards = M.Award.query.find(
+            dict(created_by_neighborhood_id=self.neighborhood._id)).all()
         return dict(awards=awards or [], count=len(awards))
 
     @expose('jinja:allura:templates/award_not_found.html')
@@ -763,7 +815,8 @@ class NeighborhoodAwardsController(object):
     @expose('jinja:allura:templates/grants.html')
     def grants(self, **kw):
         require_access(self.neighborhood, 'admin')
-        grants = M.AwardGrant.query.find(dict(granted_by_neighborhood_id=self.neighborhood._id))
+        grants = M.AwardGrant.query.find(
+            dict(granted_by_neighborhood_id=self.neighborhood._id))
         count = grants.count()
         return dict(grants=grants or [], count=count)
 
@@ -793,7 +846,7 @@ class NeighborhoodAwardsController(object):
     def grant(self, grant=None, recipient=None):
         require_access(self.neighborhood, 'admin')
         grant_q = M.Award.query.find(dict(short=grant,
-            created_by_neighborhood_id=self.neighborhood._id)).first()
+                                          created_by_neighborhood_id=self.neighborhood._id)).first()
         recipient_q = M.Project.query.find(dict(
             neighborhood_id=self.neighborhood._id, shortname=recipient,
             deleted=False)).first()
@@ -807,13 +860,14 @@ class NeighborhoodAwardsController(object):
                 g.post_event('project_updated')
         redirect(request.referer)
 
+
 class AwardController(object):
 
     def __init__(self, neighborhood=None, award_id=None):
         self.neighborhood = neighborhood
         if award_id:
             self.award = M.Award.query.find(dict(_id=ObjectId(award_id),
-                created_by_neighborhood_id=self.neighborhood._id)).first()
+                                                 created_by_neighborhood_id=self.neighborhood._id)).first()
 
     @with_trailing_slash
     @expose('jinja:allura:templates/award.html')
@@ -874,6 +928,7 @@ class AwardController(object):
             self.award.delete()
         redirect(request.referer)
 
+
 class GrantController(object):
 
     def __init__(self, neighborhood=None, award=None, recipient=None):
@@ -882,9 +937,9 @@ class GrantController(object):
             self.recipient = recipient.replace('_', '/')
             self.award = M.Award.query.get(_id=award._id)
             self.project = M.Project.query.find(dict(shortname=self.recipient,
-                neighborhood_id=self.neighborhood._id)).first()
+                                                     neighborhood_id=self.neighborhood._id)).first()
             self.grant = M.AwardGrant.query.get(award_id=self.award._id,
-                granted_to_project_id=self.project._id)
+                                                granted_to_project_id=self.project._id)
 
     @with_trailing_slash
     @expose('jinja:allura:templates/grant.html')
@@ -915,7 +970,9 @@ class GrantController(object):
             g.post_event('project_updated')
         redirect(request.referer)
 
+
 class ProjectImporterController(object):
+
     def __init__(self, neighborhood, *a, **kw):
         super(ProjectImporterController, self).__init__(*a, **kw)
         self.neighborhood = neighborhood

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/repository.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/repository.py b/Allura/allura/controllers/repository.py
index 2ca33ca..05e7202 100644
--- a/Allura/allura/controllers/repository.py
+++ b/Allura/allura/controllers/repository.py
@@ -58,20 +58,23 @@ from .base import BaseController
 
 log = logging.getLogger(__name__)
 
+
 def on_import():
     BranchBrowser.CommitBrowserClass = CommitBrowser
     CommitBrowser.TreeBrowserClass = TreeBrowser
     TreeBrowser.FileBrowserClass = FileBrowser
 
+
 class RepoRootController(BaseController, FeedController):
     _discuss = AppDiscussionController()
-    commit_browser_widget=SCMCommitBrowserWidget()
+    commit_browser_widget = SCMCommitBrowserWidget()
 
     def get_feed(self, project, app, user):
         query = dict(project_id=project._id, app_config_id=app.config._id)
-        pname, repo =  (project.shortname, app.config.options.mount_label)
+        pname, repo = (project.shortname, app.config.options.mount_label)
         title = '%s %s changes' % (pname, repo)
-        description = 'Recent changes to %s repository in %s project' % (repo, pname)
+        description = 'Recent changes to %s repository in %s project' % (
+            repo, pname)
         return FeedArgs(query, title, app.url, description=description)
 
     def _check_security(self):
@@ -81,7 +84,7 @@ class RepoRootController(BaseController, FeedController):
     @expose()
     def index(self, offset=0, branch=None, **kw):
         if branch is None:
-            branch=c.app.default_branch_name
+            branch = c.app.default_branch_name
         redirect(c.app.repo.url_for_commit(branch, url_type='ref'))
 
     @with_trailing_slash
@@ -94,7 +97,8 @@ class RepoRootController(BaseController, FeedController):
                 repo_path_parts = f.url().strip('/').split('/')
                 links.append(dict(
                     repo_url=f.url(),
-                    repo = '%s / %s' % (repo_path_parts[1], repo_path_parts[-1]),
+                    repo='%s / %s' % (repo_path_parts[1],
+                                      repo_path_parts[-1]),
                 ))
         return dict(links=links)
 
@@ -112,13 +116,15 @@ class RepoRootController(BaseController, FeedController):
     def fork(self, project_id=None, mount_point=None, mount_label=None, **kw):
         # this shows the form and handles the submission
         security.require_authenticated()
-        if not c.app.forkable: raise exc.HTTPNotFound
+        if not c.app.forkable:
+            raise exc.HTTPNotFound
         from_repo = c.app.repo
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
         from_project = c.project
         to_project = M.Project.query.get(_id=ObjectId(project_id))
-        mount_label = mount_label or '%s - %s' % (c.project.name, c.app.config.options.mount_label)
+        mount_label = mount_label or '%s - %s' % (c.project.name,
+                                                  c.app.config.options.mount_label)
         mount_point = (mount_point or from_project.shortname)
         if request.method != 'POST' or not mount_point:
             return dict(from_repo=from_repo,
@@ -137,7 +143,7 @@ class RepoRootController(BaseController, FeedController):
                         mount_label=mount_label,
                         cloned_from_project_id=from_project._id,
                         cloned_from_repo_id=from_repo._id)
-                    redirect(to_project.url()+mount_point+'/')
+                    redirect(to_project.url() + mount_point + '/')
                 except exc.HTTPRedirection:
                     raise
                 except Exception, ex:
@@ -169,15 +175,15 @@ class RepoRootController(BaseController, FeedController):
         with c.app.repo.push_upstream_context():
             target_branch = c.app.default_branch_name
         return {
-                'source_branch': source_branch,
-                'target_branch': target_branch,
-            }
+            'source_branch': source_branch,
+            'target_branch': target_branch,
+        }
 
     @expose()
     @require_post()
     def do_request_merge(self, **kw):
         kw = self.mr_widget.to_python(kw)
-        downstream=dict(
+        downstream = dict(
             project_id=c.project._id,
             mount_point=c.app.config.options.mount_point,
             commit_id=c.app.repo.commit(kw['source_branch'])._id)
@@ -214,8 +220,9 @@ class RepoRootController(BaseController, FeedController):
     @without_trailing_slash
     @expose('json:')
     def commit_browser_data(self, **kw):
-        head_ids = [ head.object_id for head in c.app.repo.get_heads() ]
-        commit_ids = [c.app.repo.rev_to_commit_id(r) for r in c.app.repo.log(head_ids, id_only=True)]
+        head_ids = [head.object_id for head in c.app.repo.get_heads()]
+        commit_ids = [c.app.repo.rev_to_commit_id(r)
+                      for r in c.app.repo.log(head_ids, id_only=True)]
         log.info('Grab %d commit objects by ID', len(commit_ids))
         commits_by_id = {
             c_obj._id: c_obj
@@ -232,25 +239,27 @@ class RepoRootController(BaseController, FeedController):
         result = []
         for row, oid in enumerate(topo_sort(children, parents, dates, head_ids)):
             ci = commits_by_id[oid]
-            url=c.app.repo.url_for_commit(Object(_id=oid))
+            url = c.app.repo.url_for_commit(Object(_id=oid))
             msg_split = ci.message.splitlines()
             if msg_split:
                 msg = msg_split[0]
             else:
                 msg = "No commit message."
             result.append(dict(
-                    oid=oid,
-                    short_id=c.app.repo.shorthand_for_commit(oid),
-                    row=row,
-                    parents=ci.parent_ids,
-                    message=msg,
-                    url=url))
+                oid=oid,
+                short_id=c.app.repo.shorthand_for_commit(oid),
+                row=row,
+                parents=ci.parent_ids,
+                message=msg,
+                url=url))
         log.info('...done')
         col_idx = {}
         columns = []
+
         def find_column(columns):
-            for i,c in enumerate(columns):
-                if c is None: return i
+            for i, c in enumerate(columns):
+                if c is None:
+                    return i
             columns.append(None)
             return len(columns) - 1
         for row, ci_json in enumerate(result):
@@ -263,14 +272,15 @@ class RepoRootController(BaseController, FeedController):
             ci_json['column'] = colno
             for p in parents[oid]:
                 p_col = col_idx.get(p, None)
-                if p_col is not None: continue
+                if p_col is not None:
+                    continue
                 p_col = find_column(columns)
                 col_idx[p] = p_col
                 columns[p_col] = p
         built_tree = dict(
-                (ci_json['oid'], ci_json) for ci_json in result)
+            (ci_json['oid'], ci_json) for ci_json in result)
         return dict(
-            commits=[ ci_json['oid'] for ci_json in result ],
+            commits=[ci_json['oid'] for ci_json in result],
             built_tree=built_tree,
             next_column=len(columns),
             max_row=row)
@@ -279,7 +289,9 @@ class RepoRootController(BaseController, FeedController):
     def status(self, **kw):
         return dict(status=c.app.repo.status)
 
+
 class RepoRestController(RepoRootController):
+
     @expose('json:')
     def index(self, **kw):
         all_commits = c.app.repo._impl.new_commits(all_commits=True)
@@ -292,7 +304,7 @@ class RepoRestController(RepoRootController):
         return {
             'commits': [
                 {
-                    'parents': [{'id':p} for p in commit['parents']],
+                    'parents': [{'id': p} for p in commit['parents']],
                     'url': c.app.repo.url_for_commit(commit['id']),
                     'id': commit['id'],
                     'message': commit['message'],
@@ -302,17 +314,18 @@ class RepoRestController(RepoRootController):
                     'author': {
                         'name': commit['authored']['name'],
                         'email': commit['authored']['email'],
-                        },
+                    },
                     'committer': {
                         'name': commit['committed']['name'],
                         'email': commit['committed']['email'],
                     },
                 }
-            for commit in revisions
-        ]}
+                for commit in revisions
+            ]}
+
 
 class MergeRequestsController(object):
-    mr_filter=SCMMergeRequestFilterWidget()
+    mr_filter = SCMMergeRequestFilterWidget()
 
     @expose('jinja:allura:templates/repo/merge_requests.html')
     @validate(mr_filter)
@@ -328,18 +341,20 @@ class MergeRequestsController(object):
     def _lookup(self, num, *remainder):
         return MergeRequestController(num), remainder
 
+
 class MergeRequestController(object):
-    log_widget=SCMLogWidget(show_paging=False)
-    thread_widget=w.Thread(
+    log_widget = SCMLogWidget(show_paging=False)
+    thread_widget = w.Thread(
         page=None, limit=None, page_size=None, count=None,
         style='linear')
-    mr_dispose_form=SCMMergeRequestDisposeWidget()
+    mr_dispose_form = SCMMergeRequestDisposeWidget()
 
     def __init__(self, num):
         self.req = M.MergeRequest.query.get(
             app_config_id=c.app.config._id,
             request_number=int(num))
-        if self.req is None: raise exc.HTTPNotFound
+        if self.req is None:
+            raise exc.HTTPNotFound
 
     @expose('jinja:allura:templates/repo/merge_request.html')
     def index(self, page=0, limit=250, **kw):
@@ -349,7 +364,7 @@ class MergeRequestController(object):
         with self.req.push_downstream_context():
             downstream_app = c.app
         return dict(
-            downstream_app = downstream_app,
+            downstream_app=downstream_app,
             req=self.req,
             page=page,
             limit=limit,
@@ -376,9 +391,10 @@ class RefsController(object):
         if EOR in remainder:
             i = remainder.index(quote(c.app.END_OF_REF_ESCAPE))
             ref = '/'.join((ref,) + remainder[:i])
-            remainder = remainder[i+1:]
+            remainder = remainder[i + 1:]
         return self.BranchBrowserClass(ref), remainder
 
+
 class CommitsController(object):
 
     @expose()
@@ -387,11 +403,12 @@ class CommitsController(object):
         if EOR in remainder:
             i = remainder.index(quote(c.app.END_OF_REF_ESCAPE))
             ci = '/'.join((ci,) + remainder[:i])
-            remainder = remainder[i+1:]
+            remainder = remainder[i + 1:]
         return CommitBrowser(ci), remainder
 
+
 class BranchBrowser(BaseController):
-    CommitBrowserClass=None
+    CommitBrowserClass = None
 
     def __init__(self, branch):
         self._branch = branch
@@ -415,11 +432,12 @@ class BranchBrowser(BaseController):
         ci = c.app.repo.commit(self._branch)
         redirect(ci.url() + 'log/')
 
+
 class CommitBrowser(BaseController):
-    TreeBrowserClass=None
+    TreeBrowserClass = None
     revision_widget = SCMRevisionWidget()
-    log_widget=SCMLogWidget()
-    page_list=ffw.PageList()
+    log_widget = SCMLogWidget()
+    page_list = ffw.PageList()
     DEFAULT_PAGE_LIMIT = 25
 
     def __init__(self, revision):
@@ -442,11 +460,11 @@ class CommitBrowser(BaseController):
         tree = self._commit.tree
         limit, page, start = g.handle_paging(limit, page,
                                              default=self.DEFAULT_PAGE_LIMIT)
-        diffs = self._commit.paged_diffs(start=start, end=start+limit)
+        diffs = self._commit.paged_diffs(start=start, end=start + limit)
         result['artifacts'] = [
-                (t,f) for t in ('added', 'removed', 'changed', 'copied')
-                    for f in diffs[t]
-                        if t == 'removed' or tree.get_blob_by_path(f)]
+            (t, f) for t in ('added', 'removed', 'changed', 'copied')
+            for f in diffs[t]
+            if t == 'removed' or tree.get_blob_by_path(f)]
         count = diffs['total']
         result.update(dict(page=page, limit=limit, count=count))
         return result
@@ -478,7 +496,6 @@ class CommitBrowser(BaseController):
         rev = self._commit.url().split('/')[-2]
         return dict(status=c.app.repo.get_tarball_status(rev, path))
 
-
     @expose('jinja:allura:templates/repo/log.html')
     @with_trailing_slash
     @validate(dict(page=validators.Int(if_empty=0, if_invalid=0),
@@ -488,10 +505,10 @@ class CommitBrowser(BaseController):
         if path:
             is_file = c.app.repo.is_file(path, self._commit._id)
         commits = list(islice(c.app.repo.log(
-                revs=self._commit._id,
-                path=path,
-                id_only=False,
-                page_size=limit+1), limit+1))
+            revs=self._commit._id,
+            path=path,
+            id_only=False,
+            page_size=limit + 1), limit + 1))
         next_commit = None
         if len(commits) > limit:
             next_commit = commits.pop()
@@ -509,7 +526,7 @@ class CommitBrowser(BaseController):
 
 class TreeBrowser(BaseController, DispatchIndex):
     tree_widget = SCMTreeWidget()
-    FileBrowserClass=None
+    FileBrowserClass = None
     subscribe_form = SubscribeForm()
 
     def __init__(self, commit, tree, path='', parent=None):
@@ -592,10 +609,12 @@ class FileBrowser(BaseController):
         if kw.pop('format', 'html') == 'raw':
             return self.raw()
         elif 'diff' in kw:
-            tg.decorators.override_template(self.index, 'jinja:allura:templates/repo/diff.html')
+            tg.decorators.override_template(
+                self.index, 'jinja:allura:templates/repo/diff.html')
             return self.diff(kw['diff'], kw.pop('diformat', None))
         elif 'barediff' in kw:
-            tg.decorators.override_template(self.index, 'jinja:allura:templates/repo/barediff.html')
+            tg.decorators.override_template(
+                self.index, 'jinja:allura:templates/repo/barediff.html')
             return self.diff(kw['barediff'], kw.pop('diformat', None))
         else:
             force_display = 'force' in kw
@@ -607,7 +626,7 @@ class FileBrowser(BaseController):
                 prev=context.get('prev', None),
                 next=context.get('next', None),
                 force_display=force_display
-                )
+            )
 
     @expose()
     def raw(self, **kw):
@@ -655,17 +674,20 @@ class FileBrowser(BaseController):
             diff = ''.join(difflib.unified_diff(la, lb, adesc, bdesc))
         return dict(a=a, b=b, diff=diff)
 
+
 def topo_sort(children, parents, dates, head_ids):
     to_visit = sorted(list(set(head_ids)), key=lambda x: dates[x])
     visited = set()
     while to_visit:
         next = to_visit.pop()
-        if next in visited: continue
+        if next in visited:
+            continue
         visited.add(next)
         yield next
         for p in parents[next]:
             for c in children[p]:
-                if c not in visited: break
+                if c not in visited:
+                    break
             else:
                 to_visit.append(p)
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/rest.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/rest.py b/Allura/allura/controllers/rest.py
index 84301df..cab7084 100644
--- a/Allura/allura/controllers/rest.py
+++ b/Allura/allura/controllers/rest.py
@@ -100,10 +100,12 @@ class RestController(object):
         c.api_token = self._authenticate_request()
         if c.api_token:
             c.user = c.api_token.user
-        neighborhood = M.Neighborhood.query.get(url_prefix = '/' + name + '/')
-        if not neighborhood: raise exc.HTTPNotFound, name
+        neighborhood = M.Neighborhood.query.get(url_prefix='/' + name + '/')
+        if not neighborhood:
+            raise exc.HTTPNotFound, name
         return NeighborhoodRestController(neighborhood), remainder
 
+
 class OAuthNegotiator(object):
 
     @LazyProperty
@@ -131,7 +133,7 @@ class OAuthNegotiator(object):
             headers=request.headers,
             parameters=dict(request.params),
             query_string=request.query_string
-            )
+        )
         consumer_token = M.OAuthConsumerToken.query.get(
             api_key=req['oauth_consumer_key'])
         access_token = M.OAuthAccessToken.query.get(
@@ -159,7 +161,7 @@ class OAuthNegotiator(object):
             headers=request.headers,
             parameters=dict(request.params),
             query_string=request.query_string
-            )
+        )
         consumer_token = M.OAuthConsumerToken.query.get(
             api_key=req['oauth_consumer_key'])
         if consumer_token is None:
@@ -174,7 +176,7 @@ class OAuthNegotiator(object):
         req_token = M.OAuthRequestToken(
             consumer_token_id=consumer_token._id,
             callback=req.get('oauth_callback', 'oob')
-            )
+        )
         session(req_token).flush()
         log.info('Saving new request token with key: %s', req_token.api_key)
         return req_token.to_string()
@@ -208,7 +210,7 @@ class OAuthNegotiator(object):
             url = rtok.callback + '&'
         else:
             url = rtok.callback + '?'
-        url+='oauth_token=%s&oauth_verifier=%s' % (
+        url += 'oauth_token=%s&oauth_verifier=%s' % (
             rtok.api_key, rtok.validation_pin)
         redirect(url)
 
@@ -220,7 +222,7 @@ class OAuthNegotiator(object):
             headers=request.headers,
             parameters=dict(request.params),
             query_string=request.query_string
-            )
+        )
         consumer_token = M.OAuthConsumerToken.query.get(
             api_key=req['oauth_consumer_key'])
         request_token = M.OAuthRequestToken.query.get(
@@ -244,12 +246,13 @@ class OAuthNegotiator(object):
             log.error('Invalid signature')
             raise exc.HTTPForbidden
         acc_token = M.OAuthAccessToken(
-                consumer_token_id=consumer_token._id,
-                request_token_id=request_token._id,
-                user_id=request_token.user_id,
-            )
+            consumer_token_id=consumer_token._id,
+            request_token_id=request_token._id,
+            user_id=request_token.user_id,
+        )
         return acc_token.to_string()
 
+
 class NeighborhoodRestController(object):
 
     def __init__(self, neighborhood):
@@ -259,24 +262,29 @@ class NeighborhoodRestController(object):
     def _lookup(self, name, *remainder):
         provider = plugin.ProjectRegistrationProvider.get()
         try:
-            provider.shortname_validator.to_python(name, check_allowed=False, neighborhood=self._neighborhood)
+            provider.shortname_validator.to_python(
+                name, check_allowed=False, neighborhood=self._neighborhood)
         except Invalid as e:
             raise exc.HTTPNotFound, name
         name = self._neighborhood.shortname_prefix + name
-        project = M.Project.query.get(shortname=name, neighborhood_id=self._neighborhood._id, deleted=False)
-        if not project: raise exc.HTTPNotFound, name
+        project = M.Project.query.get(
+            shortname=name, neighborhood_id=self._neighborhood._id, deleted=False)
+        if not project:
+            raise exc.HTTPNotFound, name
         c.project = project
         return ProjectRestController(), remainder
 
+
 class ProjectRestController(object):
 
     @expose()
     def _lookup(self, name, *remainder):
         if not name:
             return self, ()
-        subproject = M.Project.query.get(shortname=c.project.shortname + '/' + name,
-                                         neighborhood_id=c.project.neighborhood_id,
-                                         deleted=False)
+        subproject = M.Project.query.get(
+            shortname=c.project.shortname + '/' + name,
+            neighborhood_id=c.project.neighborhood_id,
+            deleted=False)
         if subproject:
             c.project = subproject
             c.app = None
@@ -288,7 +296,7 @@ class ProjectRestController(object):
         if app.api_root is None:
             raise exc.HTTPNotFound, name
         action_logger.info('', extra=dict(
-                api_key=request.params.get('api_key')))
+            api_key=request.params.get('api_key')))
         return app.api_root, remainder
 
     @expose('json:')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/root.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/root.py b/Allura/allura/controllers/root.py
index f00a596..0d7f96b 100644
--- a/Allura/allura/controllers/root.py
+++ b/Allura/allura/controllers/root.py
@@ -18,7 +18,9 @@
 #       under the License.
 
 """Main Controller"""
-import logging, string, os
+import logging
+import string
+import os
 from datetime import datetime
 from collections import defaultdict
 
@@ -54,10 +56,13 @@ log = logging.getLogger(__name__)
 
 TGFlash.static_template = '''$('#messages').notify('%(message)s', {status: '%(status)s'});'''
 
+
 class W:
     project_summary = plw.ProjectSummary()
 
+
 class RootController(WsgiDispatchController):
+
     """
     The root controller for the allura application.
 
@@ -78,8 +83,8 @@ class RootController(WsgiDispatchController):
     nf.admin = SiteAdminController()
     search = SearchController()
     rest = RestController()
-    if config.get('trovecategories.enableediting', 'false')=='true':
-        categories=TroveCategoryController()
+    if config.get('trovecategories.enableediting', 'false') == 'true':
+        categories = TroveCategoryController()
 
     def __init__(self):
         n_url_prefix = '/%s/' % request.path.split('/')[1]
@@ -93,9 +98,10 @@ class RootController(WsgiDispatchController):
     def _setup_request(self):
         c.project = c.app = None
         c.memoize_cache = {}
-        c.user = plugin.AuthenticationProvider.get(request).authenticate_request()
+        c.user = plugin.AuthenticationProvider.get(
+            request).authenticate_request()
         assert c.user is not None, ('c.user should always be at least User.anonymous(). '
-            'Did you run `paster setup-app` to create the database?')
+                                    'Did you run `paster setup-app` to create the database?')
 
     def _cleanup_request(self):
         pass
@@ -105,8 +111,9 @@ class RootController(WsgiDispatchController):
     def index(self, **kw):
         """Handle the front-page."""
         neighborhoods = M.Neighborhood.query.find().sort('name')
-        categories = M.ProjectCategory.query.find({'parent_id':None}).sort('name').all()
+        categories = M.ProjectCategory.query.find(
+            {'parent_id': None}).sort('name').all()
         c.custom_sidebar_menu = [
-            SitemapEntry(cat.label, '/browse/'+cat.name) for cat in categories
+            SitemapEntry(cat.label, '/browse/' + cat.name) for cat in categories
         ]
-        return dict(neighborhoods=neighborhoods,title="All Neighborhoods")
+        return dict(neighborhoods=neighborhoods, title="All Neighborhoods")

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/search.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/search.py b/Allura/allura/controllers/search.py
index a58ee9f..7afeb4d 100644
--- a/Allura/allura/controllers/search.py
+++ b/Allura/allura/controllers/search.py
@@ -29,10 +29,12 @@ from allura import model as M
 from allura.lib.widgets import project_list as plw
 from allura.controllers import BaseController
 
+
 class W:
     project_summary = plw.ProjectSummary()
     search_results = SearchResults()
 
+
 class SearchController(BaseController):
 
     @expose('jinja:allura:templates/search_index.html')
@@ -52,14 +54,17 @@ class SearchController(BaseController):
         d['hide_app_project_switcher'] = True
         return d
 
+
 class ProjectBrowseController(BaseController):
+
     def __init__(self, category_name=None, parent_category=None):
         self.parent_category = parent_category
         self.nav_stub = '/browse/'
         self.additional_filters = {}
         if category_name:
             parent_id = parent_category and parent_category._id or None
-            self.category = M.ProjectCategory.query.find(dict(name=category_name,parent_id=parent_id)).first()
+            self.category = M.ProjectCategory.query.find(
+                dict(name=category_name, parent_id=parent_id)).first()
             if not self.category:
                 raise exc.HTTPNotFound, request.path
         else:
@@ -74,33 +79,36 @@ class ProjectBrowseController(BaseController):
         return title
 
     def _build_nav(self):
-        categories = M.ProjectCategory.query.find({'parent_id':None}).sort('name').all()
+        categories = M.ProjectCategory.query.find(
+            {'parent_id': None}).sort('name').all()
         nav = []
         for cat in categories:
             nav.append(SitemapEntry(
                 cat.label,
-                self.nav_stub+cat.name,
-                ))
+                self.nav_stub + cat.name,
+            ))
             if (self.category and self.category._id == cat._id and cat.subcategories) or (
-                self.parent_category and self.parent_category._id == cat._id):
+                    self.parent_category and self.parent_category._id == cat._id):
                 for subcat in cat.subcategories:
                     nav.append(SitemapEntry(
                         subcat.label,
-                        self.nav_stub+cat.name+'/'+subcat.name,
-                        ))
+                        self.nav_stub + cat.name + '/' + subcat.name,
+                    ))
         return nav
 
-    def _find_projects(self,sort='alpha', limit=None, start=0):
+    def _find_projects(self, sort='alpha', limit=None, start=0):
         if self.category:
             ids = [self.category._id]
             # warning! this is written with the assumption that categories
             # are only two levels deep like the existing site
             if self.category.subcategories:
                 ids = ids + [cat._id for cat in self.category.subcategories]
-            pq = M.Project.query.find(dict(category_id={'$in':ids}, deleted=False, **self.additional_filters))
+            pq = M.Project.query.find(
+                dict(category_id={'$in': ids}, deleted=False, **self.additional_filters))
         else:
-            pq = M.Project.query.find(dict(deleted=False, **self.additional_filters))
-        if sort=='alpha':
+            pq = M.Project.query.find(
+                dict(deleted=False, **self.additional_filters))
+        if sort == 'alpha':
             pq.sort('name')
         else:
             pq.sort('last_updated', pymongo.DESCENDING)
@@ -120,6 +128,6 @@ class ProjectBrowseController(BaseController):
     def index(self, **kw):
         c.project_summary = W.project_summary
         projects, count = self._find_projects()
-        title=self._build_title()
+        title = self._build_title()
         c.custom_sidebar_menu = self._build_nav()
-        return dict(projects=projects,title=title,text=None)
+        return dict(projects=projects, title=title, text=None)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/site_admin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/site_admin.py b/Allura/allura/controllers/site_admin.py
index d7e7c0b..b61da6c 100644
--- a/Allura/allura/controllers/site_admin.py
+++ b/Allura/allura/controllers/site_admin.py
@@ -46,10 +46,12 @@ from urlparse import urlparse
 
 log = logging.getLogger(__name__)
 
+
 class W:
     page_list = ffw.PageList()
     page_size = ffw.PageSize()
 
+
 class SiteAdminController(object):
 
     def __init__(self):
@@ -65,10 +67,12 @@ class SiteAdminController(object):
     def index(self):
         neighborhoods = []
         for n in M.Neighborhood.query.find():
-            project_count = M.Project.query.find(dict(neighborhood_id=n._id)).count()
-            configured_count = M.Project.query.find(dict(neighborhood_id=n._id, database_configured=True)).count()
+            project_count = M.Project.query.find(
+                dict(neighborhood_id=n._id)).count()
+            configured_count = M.Project.query.find(
+                dict(neighborhood_id=n._id, database_configured=True)).count()
             neighborhoods.append((n.name, project_count, configured_count))
-        neighborhoods.sort(key=lambda n:n[0])
+        neighborhoods.sort(key=lambda n: n[0])
         return dict(neighborhoods=neighborhoods)
 
     @expose('jinja:allura:templates/site_admin_api_tickets.html')
@@ -91,7 +95,8 @@ class SiteAdminController(object):
                 flash('JSON format error')
             if type(caps) is not type({}):
                 ok = False
-                flash('Capabilities must be a JSON dictionary, mapping capability name to optional discriminator(s) (or "")')
+                flash(
+                    'Capabilities must be a JSON dictionary, mapping capability name to optional discriminator(s) (or "")')
             try:
                 expires = dateutil.parser.parse(data['expires'])
             except ValueError:
@@ -100,7 +105,8 @@ class SiteAdminController(object):
             if ok:
                 tok = None
                 try:
-                    tok = M.ApiTicket(user_id=for_user._id, capabilities=caps, expires=expires)
+                    tok = M.ApiTicket(user_id=for_user._id,
+                                      capabilities=caps, expires=expires)
                     session(tok).flush()
                     log.info('New token: %s', tok)
                     flash('API Ticket created')
@@ -110,7 +116,8 @@ class SiteAdminController(object):
         elif request.method == 'GET':
             data = {'expires': datetime.utcnow() + timedelta(days=2)}
 
-        data['token_list'] = M.ApiTicket.query.find().sort('mod_date', pymongo.DESCENDING).all()
+        data['token_list'] = M.ApiTicket.query.find().sort(
+            'mod_date', pymongo.DESCENDING).all()
         log.info(data['token_list'])
         return data
 
@@ -119,7 +126,7 @@ class SiteAdminController(object):
         neighborhood = M.Neighborhood.query.find({
             "url_prefix": "/" + artifact_url[0] + "/"}).first()
 
-        if  artifact_url[0] == "u":
+        if artifact_url[0] == "u":
             project = M.Project.query.find({
                 "shortname": artifact_url[0] + "/" + artifact_url[1],
                 "neighborhood_id": neighborhood._id}).first()
@@ -196,10 +203,10 @@ class SiteAdminController(object):
         end = bson.ObjectId.from_datetime(end_dt)
         nb = M.Neighborhood.query.get(name='Users')
         projects = (M.Project.query.find({
-                'neighborhood_id': {'$ne': nb._id},
-                'deleted': False,
-                '_id': {'$lt': start, '$gt': end},
-            }).sort('_id', -1))
+            'neighborhood_id': {'$ne': nb._id},
+            'deleted': False,
+            '_id': {'$lt': start, '$gt': end},
+        }).sort('_id', -1))
         step = start_dt - end_dt
         params = request.params.copy()
         params['start-dt'] = (start_dt + step).strftime('%Y/%m/%d %H:%M:%S')
@@ -226,28 +233,36 @@ class SiteAdminController(object):
             if c.form_errors:
                 error_msg = 'Error: '
                 for msg in list(c.form_errors):
-                    names = {'prefix': 'Neighborhood prefix', 'shortname': 'Project shortname', 'mount_point': 'Repository mount point'}
+                    names = {'prefix': 'Neighborhood prefix', 'shortname':
+                             'Project shortname', 'mount_point': 'Repository mount point'}
                     error_msg += '%s: %s ' % (names[msg], c.form_errors[msg])
                     flash(error_msg, 'error')
                 return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
             nbhd = M.Neighborhood.query.get(url_prefix='/%s/' % prefix)
             if not nbhd:
-                flash('Neighborhood with prefix %s not found' % prefix, 'error')
+                flash('Neighborhood with prefix %s not found' %
+                      prefix, 'error')
                 return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
-            c.project = M.Project.query.get(shortname=shortname, neighborhood_id=nbhd._id)
+            c.project = M.Project.query.get(
+                shortname=shortname, neighborhood_id=nbhd._id)
             if not c.project:
-                flash('Project with shortname %s not found in neighborhood %s' % (shortname, nbhd.name), 'error')
+                flash(
+                    'Project with shortname %s not found in neighborhood %s' %
+                    (shortname, nbhd.name), 'error')
                 return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
             c.app = c.project.app_instance(mount_point)
             if not c.app:
-                flash('Mount point %s not found on project %s' % (mount_point, c.project.shortname), 'error')
+                flash('Mount point %s not found on project %s' %
+                      (mount_point, c.project.shortname), 'error')
                 return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
             source_url = c.app.config.options.get('init_from_url')
             source_path = c.app.config.options.get('init_from_path')
             if not (source_url or source_path):
-                flash('%s does not appear to be a cloned repo' % c.app, 'error')
+                flash('%s does not appear to be a cloned repo' %
+                      c.app, 'error')
                 return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
-            allura.tasks.repo_tasks.reclone_repo.post(prefix=prefix, shortname=shortname, mount_point=mount_point)
+            allura.tasks.repo_tasks.reclone_repo.post(
+                prefix=prefix, shortname=shortname, mount_point=mount_point)
             flash('Repository is being recloned')
         else:
             prefix = 'p'
@@ -255,6 +270,7 @@ class SiteAdminController(object):
             mount_point = ''
         return dict(prefix=prefix, shortname=shortname, mount_point=mount_point)
 
+
 class TaskManagerController(object):
 
     def _check_security(self):
@@ -274,8 +290,8 @@ class TaskManagerController(object):
             minutes = int(minutes)
         except ValueError as e:
             minutes = 1
-        start_dt = now - timedelta(minutes=(page_num-1)*minutes)
-        end_dt = now - timedelta(minutes=page_num*minutes)
+        start_dt = now - timedelta(minutes=(page_num - 1) * minutes)
+        end_dt = now - timedelta(minutes=page_num * minutes)
         start = bson.ObjectId.from_datetime(start_dt)
         end = bson.ObjectId.from_datetime(end_dt)
         query = {'_id': {'$gt': end}}
@@ -292,17 +308,19 @@ class TaskManagerController(object):
         for task in tasks:
             task.project = M.Project.query.get(_id=task.context.project_id)
             task.user = M.User.query.get(_id=task.context.user_id)
-        newer_url = tg.url(params=dict(request.params, page_num=page_num - 1)).lstrip('/')
-        older_url = tg.url(params=dict(request.params, page_num=page_num + 1)).lstrip('/')
+        newer_url = tg.url(
+            params=dict(request.params, page_num=page_num - 1)).lstrip('/')
+        older_url = tg.url(
+            params=dict(request.params, page_num=page_num + 1)).lstrip('/')
         return dict(
-                tasks=tasks,
-                page_num=page_num,
-                minutes=minutes,
-                newer_url=newer_url,
-                older_url=older_url,
-                window_start=start_dt,
-                window_end=end_dt,
-            )
+            tasks=tasks,
+            page_num=page_num,
+            minutes=minutes,
+            newer_url=newer_url,
+            older_url=older_url,
+            window_start=start_dt,
+            window_end=end_dt,
+        )
 
     @expose('jinja:allura:templates/site_admin_task_view.html')
     @without_trailing_slash
@@ -313,7 +331,8 @@ class TaskManagerController(object):
             task = None
         if task:
             task.project = M.Project.query.get(_id=task.context.project_id)
-            task.app_config = M.AppConfig.query.get(_id=task.context.app_config_id)
+            task.app_config = M.AppConfig.query.get(
+                _id=task.context.app_config_id)
             task.user = M.User.query.get(_id=task.context.user_id)
         return dict(task=task)
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/static.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/static.py b/Allura/allura/controllers/static.py
index dd8c37f..100b3a5 100644
--- a/Allura/allura/controllers/static.py
+++ b/Allura/allura/controllers/static.py
@@ -49,4 +49,4 @@ class NewForgeController(object):
         """
         css, md5 = g.tool_icon_css
         return utils.serve_file(StringIO(css), 'tool_icon_css', 'text/css',
-                etag=md5)
+                                etag=md5)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/task.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/task.py b/Allura/allura/controllers/task.py
index 59234e7..3275d94 100644
--- a/Allura/allura/controllers/task.py
+++ b/Allura/allura/controllers/task.py
@@ -15,7 +15,9 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
+
 class TaskController(object):
+
     '''WSGI app providing web-like RPC
 
     The purpose of this app is to allow us to replicate the
@@ -27,4 +29,4 @@ class TaskController(object):
         task = environ['task']
         result = task(restore_context=False)
         start_response('200 OK', [])
-        return [ result ]
+        return [result]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/template.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/template.py b/Allura/allura/controllers/template.py
index ea268af..89d4b11 100644
--- a/Allura/allura/controllers/template.py
+++ b/Allura/allura/controllers/template.py
@@ -27,6 +27,7 @@ __all__ = ['TemplateController']
 
 
 class TemplateController(WsgiDispatchController):
+
     """
     The fallback controller for allura.
     
@@ -49,7 +50,7 @@ class TemplateController(WsgiDispatchController):
                 abort(404)
     
     """
-    
+
     def view(self, url):
         """Abort the request with a 404 HTTP status code."""
         abort(404)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/controllers/trovecategories.py
----------------------------------------------------------------------
diff --git a/Allura/allura/controllers/trovecategories.py b/Allura/allura/controllers/trovecategories.py
index 9af28c5..21601db 100644
--- a/Allura/allura/controllers/trovecategories.py
+++ b/Allura/allura/controllers/trovecategories.py
@@ -15,7 +15,9 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import logging, string, os
+import logging
+import string
+import os
 from urllib import urlencode
 
 import bson
@@ -32,16 +34,19 @@ from allura.controllers import BaseController
 from allura.lib.widgets import forms
 from allura.model import TroveCategory
 
+
 class F(object):
     remove_category_form = forms.RemoveTroveCategoryForm()
     add_category_form = forms.AddTroveCategoryForm()
 
+
 class TroveCategoryController(BaseController):
+
     @expose()
     def _lookup(self, catshortname, *remainder):
         cat = M.TroveCategory.query.get(shortname=catshortname)
         return TroveCategoryController(category=cat), remainder
-        
+
     def __init__(self, category=None):
         self.category = category
         super(TroveCategoryController, self).__init__()
@@ -56,15 +61,15 @@ class TroveCategoryController(BaseController):
             hierarchy = []
             temp_cat = self.category.parent_category
             while temp_cat:
-               hierarchy = [temp_cat] + hierarchy
-               temp_cat = temp_cat.parent_category
+                hierarchy = [temp_cat] + hierarchy
+                temp_cat = temp_cat.parent_category
         else:
             l = M.TroveCategory.query.find(dict(trove_parent_id=0)).all()
             selected_cat = None
             hierarchy = []
         return dict(
-            categories=l, 
-            selected_cat=selected_cat, 
+            categories=l,
+            selected_cat=selected_cat,
             hierarchy=hierarchy)
 
     @expose()
@@ -88,22 +93,23 @@ class TroveCategoryController(BaseController):
             path = upper.fullpath + " :: " + name
             show_as_skill = upper.show_as_skill
 
-        newid=max([el.trove_cat_id for el in M.TroveCategory.query.find()]) + 1
-        shortname=name.replace(" ", "_").lower()
-        shortname=''.join([(c if (c in digits or c in lowercase) else "_")
-                           for c in shortname])
+        newid = max(
+            [el.trove_cat_id for el in M.TroveCategory.query.find()]) + 1
+        shortname = name.replace(" ", "_").lower()
+        shortname = ''.join([(c if (c in digits or c in lowercase) else "_")
+                             for c in shortname])
 
-        oldcat=M.TroveCategory.query.get(shortname=shortname)
+        oldcat = M.TroveCategory.query.get(shortname=shortname)
         if oldcat:
             flash('Category "%s" already exists.' % name, "error")
         else:
             category = M.TroveCategory(
-               trove_cat_id=newid,
-               trove_parent_id=upper_id,
-               fullname=name,
-               shortname=shortname,
-               fullpath=path,
-               show_as_skill=show_as_skill)
+                trove_cat_id=newid,
+                trove_parent_id=upper_id,
+                fullname=name,
+                shortname=shortname,
+                fullpath=path,
+                show_as_skill=show_as_skill)
             if category:
                 flash('Category "%s" successfully created.' % name)
             else:
@@ -121,7 +127,8 @@ class TroveCategoryController(BaseController):
 
         cat = M.TroveCategory.query.get(trove_cat_id=int(kw['categoryid']))
         if cat.trove_parent_id:
-            parent=M.TroveCategory.query.get(trove_cat_id=cat.trove_parent_id)
+            parent = M.TroveCategory.query.get(
+                trove_cat_id=cat.trove_parent_id)
             redirecturl = '/categories/%s' % parent.shortname
         else:
             redirecturl = '/categories'
@@ -131,79 +138,78 @@ class TroveCategoryController(BaseController):
             flash(m, "error")
             redirect(redirecturl)
             return
-        
+
         if M.User.withskill(cat).count() > 0:
             m = "This category is used as a skill by at least a user, "
             m = m + "therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_root_database=cat._id):
             m = "This category is used as a database by at least a project, "
             m = m + "therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_developmentstatus=cat._id):
             m = "This category is used as development status by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_audience=cat._id):
             m = "This category is used as intended audience by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_license=cat._id):
             m = "This category is used as a license by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_os=cat._id):
             m = "This category is used as operating system by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_language=cat._id):
             m = "This category is used as programming language by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_topic=cat._id):
             m = "This category is used as a topic by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_natlanguage=cat._id):
             m = "This category is used as a natural language by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         if M.Project.query.get(trove_environment=cat._id):
             m = "This category is used as an environment by at least a "
             m = m + "project, therefore it can't be removed."
             flash(m, "error")
-            redirect(redirecturl) 
+            redirect(redirecturl)
             return
 
         M.TroveCategory.delete(cat)
 
         flash('Category removed.')
         redirect(redirecturl)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/eventslistener.py
----------------------------------------------------------------------
diff --git a/Allura/allura/eventslistener.py b/Allura/allura/eventslistener.py
index 3924b3b..f9f7431 100644
--- a/Allura/allura/eventslistener.py
+++ b/Allura/allura/eventslistener.py
@@ -19,7 +19,10 @@
 a specific entity (e.g. user, project, ...). To do so, the new classes should
 overwrite the methods defined here, which will be called when the related
 event happens, so that the statistics for the given entity are updated.'''
+
+
 class EventsListener:
+
     def newArtifact(self, art_type, art_datetime, project, user):
         pass
 
@@ -46,7 +49,10 @@ class EventsListener:
 
 '''This class simply allows to iterate through all the registered listeners,
 so that all of them are called to update statistics.'''
+
+
 class PostEvent:
+
     def __init__(self, listeners):
         self.listeners = listeners
 
@@ -58,7 +64,8 @@ class PostEvent:
         self.__iterate('newArtifact', art_type, art_datetime, project, user)
 
     def modifiedArtifact(self, art_type, art_datetime, project, user):
-        self.__iterate('modifiedArtifact',art_type,art_datetime,project,user)
+        self.__iterate('modifiedArtifact', art_type,
+                       art_datetime, project, user)
 
     def newUser(self, user):
         self.__iterate('newUser', user)
@@ -77,4 +84,3 @@ class PostEvent:
 
     def addUserToOrganization(self, organization):
         self.__iterate('addUserToOrganization', organization)
-


[31/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/command/create_trove_categories.py
----------------------------------------------------------------------
diff --git a/Allura/allura/command/create_trove_categories.py b/Allura/allura/command/create_trove_categories.py
index c39dbbd..39c49fe 100644
--- a/Allura/allura/command/create_trove_categories.py
+++ b/Allura/allura/command/create_trove_categories.py
@@ -26,9 +26,10 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
+
 class CreateTroveCategoriesCommand(base.Command):
-    min_args=1
-    max_args=None
+    min_args = 1
+    max_args = None
     usage = '<ini file>'
     summary = 'Remove any existing trove categories and load new ones'
     parser = base.Command.standard_parser(verbose=True)
@@ -57,655 +58,1393 @@ class CreateTroveCategoriesCommand(base.Command):
     def update_trove_cat(self, trove_cat_id, attr_dict):
         t = M.TroveCategory.query.get(trove_cat_id=trove_cat_id)
         if not t:
-            sys.exit("Couldn't find TroveCategory with trove_cat_id=%s" % trove_cat_id)
+            sys.exit("Couldn't find TroveCategory with trove_cat_id=%s" %
+                     trove_cat_id)
         for k, v in attr_dict.iteritems():
             setattr(t, k, v)
 
     def command(self):
         self.basic_setup()
         M.TroveCategory.query.remove()
-        self.create_trove_cat((617,274,"kirghiz","Kirghiz","Translations :: Kirghiz",True))
-        self.create_trove_cat((372,274,"croatian","Croatian","Translations :: Croatian",True))
-        self.create_trove_cat((351,274,"thai","Thai","Translations :: Thai",True))
-        self.create_trove_cat((349,274,"tamil","Tamil","Translations :: Tamil",True))
-        self.create_trove_cat((347,274,"romanian","Romanian","Translations :: Romanian",True))
-        self.create_trove_cat((339,274,"korean","Korean","Translations :: Korean",True))
-        self.create_trove_cat((632,160,"cobol","COBOL","Programming Language :: COBOL",True))
-        self.create_trove_cat((598,160,"aspectj","AspectJ","Programming Language :: AspectJ",True))
-        self.create_trove_cat((167,160,"euler","Euler","Programming Language :: Euler",True))
-        self.create_trove_cat((185,160,"shell","Unix Shell","Programming Language :: Unix Shell",True))
-        self.create_trove_cat((184,160,"asp","ASP","Programming Language :: ASP",True))
-        self.create_trove_cat((273,160,"Pike","Pike","Programming Language :: Pike",True))
-        self.create_trove_cat((271,160,"csharp","C#","Programming Language :: C#",True))
-        self.create_trove_cat((170,160,"lisp","Lisp","Programming Language :: Lisp",True))
-        self.create_trove_cat((169,160,"fortran","Fortran","Programming Language :: Fortran",True))
-        self.create_trove_cat((625,160,"simulink","Simulink","Programming Language :: Simulink",True))
-        self.create_trove_cat((626,160,"matlab","MATLAB","Programming Language :: MATLAB",True))
-        self.create_trove_cat((1,0,"audience","Intended Audience","Intended Audience",False))
-        self.create_trove_cat((618,535,"nonprofit","Non-Profit Organizations","Intended Audience :: by Industry or Sector :: Non-Profit Organizations",False))
-        self.create_trove_cat((599,535,"aerospace","Aerospace","Intended Audience :: by Industry or Sector :: Aerospace",False))
-        self.create_trove_cat((569,535,"government","Government","Intended Audience :: by Industry or Sector :: Government",False))
-        self.create_trove_cat((363,535,"informationtechnology","Information Technology","Intended Audience :: by Industry or Sector :: Information Technology",False))
-        self.create_trove_cat((361,535,"financialinsurance","Financial and Insurance Industry","Intended Audience :: by Industry or Sector :: Financial and Insurance Industry",False))
-        self.create_trove_cat((362,535,"healthcareindustry","Healthcare Industry","Intended Audience :: by Industry or Sector :: Healthcare Industry",False))
-        self.create_trove_cat((367,535,"scienceresearch","Science/Research","Intended Audience :: by Industry or Sector :: Science/Research",False))
-        self.create_trove_cat((359,535,"customerservice","Customer Service","Intended Audience :: by Industry or Sector :: Customer Service",False))
-        self.create_trove_cat((360,535,"education","Education","Intended Audience :: by Industry or Sector :: Education",False))
-        self.create_trove_cat((365,535,"manufacturing","Manufacturing","Intended Audience :: by Industry or Sector :: Manufacturing",False))
-        self.create_trove_cat((368,535,"telecommunications","Telecommunications Industry","Intended Audience :: by Industry or Sector :: Telecommunications Industry",False))
-        self.create_trove_cat((166,160,"eiffel","Eiffel","Programming Language :: Eiffel",True))
-        self.create_trove_cat((550,160,"oberon","Oberon","Programming Language :: Oberon",True))
-        self.create_trove_cat((553,160,"realbasic","REALbasic","Programming Language :: REALbasic",True))
-        self.create_trove_cat((178,160,"python","Python","Programming Language :: Python",True))
-        self.create_trove_cat((179,160,"rexx","Rexx","Programming Language :: Rexx",True))
-        self.create_trove_cat((177,160,"prolog","Prolog","Programming Language :: Prolog",True))
-        self.create_trove_cat((176,160,"perl","Perl","Programming Language :: Perl",True))
-        self.create_trove_cat((175,160,"pascal","Pascal","Programming Language :: Pascal",True))
-        self.create_trove_cat((536,534,"enduser_advanced","Advanced End Users","Intended Audience :: by End-User Class :: Advanced End Users",False))
-        self.create_trove_cat((4,534,"sysadmins","System Administrators","Intended Audience :: by End-User Class :: System Administrators",False))
-        self.create_trove_cat((471,456,"ui_swing","Java Swing","User Interface :: Graphical :: Java Swing",True))
-        self.create_trove_cat((469,456,"ui_dotnet",".NET/Mono","User Interface :: Graphical :: .NET/Mono",True))
-        self.create_trove_cat((231,456,"gnome","Gnome","User Interface :: Graphical :: Gnome",True))
-        self.create_trove_cat((229,456,"x11","X Window System (X11)","User Interface :: Graphical :: X Window System (X11)",True))
-        self.create_trove_cat((475,456,"ui_opengl","OpenGL","User Interface :: Graphical :: OpenGL",True))
-        self.create_trove_cat((474,456,"ui_framebuffer","Framebuffer","User Interface :: Graphical :: Framebuffer",True))
-        self.create_trove_cat((472,456,"ui_swt","Java SWT","User Interface :: Graphical :: Java SWT",True))
-        self.create_trove_cat((470,456,"ui_awt","Java AWT","User Interface :: Graphical :: Java AWT",True))
-        self.create_trove_cat((230,456,"win32","Win32 (MS Windows)","User Interface :: Graphical :: Win32 (MS Windows)",True))
-        self.create_trove_cat((232,456,"kde","KDE","User Interface :: Graphical :: KDE",True))
-        self.create_trove_cat((310,456,"cocoa","Cocoa (MacOS X)","User Interface :: Graphical :: Cocoa (MacOS X)",True))
-        self.create_trove_cat((476,456,"ui_tabletpc","TabletPC","User Interface :: Graphical :: TabletPC",True))
-        self.create_trove_cat((314,456,"handhelds","Handheld/Mobile/PDA","User Interface :: Graphical :: Handheld/Mobile/PDA",True))
-        self.create_trove_cat((462,225,"ui_groupingdesc","Grouping and Descriptive Categories (UI)","User Interface :: Grouping and Descriptive Categories (UI)",True))
-        self.create_trove_cat((466,462,"ui_meta_3d","Project is a 3D engine","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a 3D engine",True))
-        self.create_trove_cat((464,462,"ui_meta_template","Project is a templating system","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a templating system",True))
-        self.create_trove_cat((463,462,"ui_meta_system","Project is a user interface (UI) system","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a user interface (UI) system",True))
-        self.create_trove_cat((465,462,"ui_meta_windowmanager","Project is a window manager","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a window manager",True))
-        self.create_trove_cat((467,462,"ui_meta_toolkit","Project is a graphics toolkit","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a graphics toolkit",True))
-        self.create_trove_cat((468,462,"ui_meta_remotecontrol","Project is a remote control application","User Interface :: Grouping and Descriptive Categories (UI) :: Project is a remote control application",True))
-        self.create_trove_cat((237,225,"web","Web-based","User Interface :: Web-based",True))
-        self.create_trove_cat((238,225,"daemon","Non-interactive (Daemon)","User Interface :: Non-interactive (Daemon)",True))
-        self.create_trove_cat((457,225,"textual_ui","Textual","User Interface :: Textual",True))
-        self.create_trove_cat((460,457,"ui_consoleterm","Console/Terminal","User Interface :: Textual :: Console/Terminal",True))
-        self.create_trove_cat((459,457,"ui_commandline","Command-line","User Interface :: Textual :: Command-line",True))
-        self.create_trove_cat((225,0,"environment","User Interface","User Interface",True))
-        self.create_trove_cat((461,225,"ui_plugins","Plugins","User Interface :: Plugins",True))
-        self.create_trove_cat((583,461,"eclipse_plugins","Eclipse","User Interface :: Plugins :: Eclipse",True))
-        self.create_trove_cat((458,225,"ui_toolkit","Toolkits/Libraries","User Interface :: Toolkits/Libraries",True))
-        self.create_trove_cat((495,458,"ui_othertoolkit","Other toolkit","User Interface :: Toolkits/Libraries :: Other toolkit",True))
-        self.create_trove_cat((493,458,"ui_motif","Motif/LessTif","User Interface :: Toolkits/Libraries :: Motif/LessTif",True))
-        self.create_trove_cat((491,458,"ui_crystalspace","Crystal Space","User Interface :: Toolkits/Libraries :: Crystal Space",True))
-        self.create_trove_cat((489,458,"ui_clanlib","ClanLib","User Interface :: Toolkits/Libraries :: ClanLib",True))
-        self.create_trove_cat((516,500,"db_group_objmap","Project is a relational object mapper","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a relational object mapper",True))
-        self.create_trove_cat((487,458,"ui_ggi","GGI","User Interface :: Toolkits/Libraries :: GGI",True))
-        self.create_trove_cat((485,458,"ui_directx","DirectX","User Interface :: Toolkits/Libraries :: DirectX",True))
-        self.create_trove_cat((483,458,"ui_svgalib","SVGAlib","User Interface :: Toolkits/Libraries :: SVGAlib",True))
-        self.create_trove_cat((481,458,"ui_wxwidgets","wxWidgets","User Interface :: Toolkits/Libraries :: wxWidgets",True))
-        self.create_trove_cat((511,500,"db_group_mgmt","Project is a database management tool","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database management tool",True))
-        self.create_trove_cat((479,458,"ui_qt","Qt","User Interface :: Toolkits/Libraries :: Qt",True))
-        self.create_trove_cat((477,458,"ui_gtk","GTK+","User Interface :: Toolkits/Libraries :: GTK+",True))
-        self.create_trove_cat((513,500,"db_group_netdbms","Project is a network-based DBMS (database system)","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a network-based DBMS (database system)",True))
-        self.create_trove_cat((228,458,"newt","Newt","User Interface :: Toolkits/Libraries :: Newt",True))
-        self.create_trove_cat((227,458,"curses","Curses/Ncurses","User Interface :: Toolkits/Libraries :: Curses/Ncurses",True))
-        self.create_trove_cat((515,500,"db_group_conv","Project is a database conversion tool","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database conversion tool",True))
-        self.create_trove_cat((478,458,"ui_tk","Tk","User Interface :: Toolkits/Libraries :: Tk",True))
-        self.create_trove_cat((480,458,"ui_sdl","SDL","User Interface :: Toolkits/Libraries :: SDL",True))
-        self.create_trove_cat((33,28,"postoffice","Post-Office","Topic :: Communications :: Email :: Post-Office",True))
-        self.create_trove_cat((514,500,"db_group_propfmt","Project is a tool for a proprietary database file format","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a tool for a proprietary database file format",True))
-        self.create_trove_cat((482,458,"ui_aalib","AAlib","User Interface :: Toolkits/Libraries :: AAlib",True))
-        self.create_trove_cat((484,458,"ui_fltk","FLTK","User Interface :: Toolkits/Libraries :: FLTK",True))
-        self.create_trove_cat((512,500,"db_group_filedbms","Project is a file-based DBMS (database system)","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a file-based DBMS (database system)",True))
-        self.create_trove_cat((486,458,"ui_plib","Plib","User Interface :: Toolkits/Libraries :: Plib",True))
-        self.create_trove_cat((488,458,"ui_glide","Glide","User Interface :: Toolkits/Libraries :: Glide",True))
-        self.create_trove_cat((510,500,"db_group_api","Project is a database abstraction layer (API)","Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database abstraction layer (API)",True))
-        self.create_trove_cat((490,458,"ui_glut","GLUT","User Interface :: Toolkits/Libraries :: GLUT",True))
-        self.create_trove_cat((492,458,"ui_allegro","Allegro","User Interface :: Toolkits/Libraries :: Allegro",True))
-        self.create_trove_cat((500,496,"db_grouping","Grouping and Descriptive Categories (DB)","Database Environment :: Grouping and Descriptive Categories (DB)",True))
-        self.create_trove_cat((494,458,"ui_quartz","Quartz","User Interface :: Toolkits/Libraries :: Quartz",True))
-        self.create_trove_cat((456,225,"graphical_ui","Graphical","User Interface :: Graphical",True))
-        self.create_trove_cat((276,274,"french","French","Translations :: French",True))
-        self.create_trove_cat((473,456,"ui_carbon","Carbon (Mac OS X)","User Interface :: Graphical :: Carbon (Mac OS X)",True))
-        self.create_trove_cat((535,1,"by_industrysector","by Industry or Sector","Intended Audience :: by Industry or Sector",False))
-        self.create_trove_cat((364,535,"legalindustry","Legal Industry","Intended Audience :: by Industry or Sector :: Legal Industry",False))
-        self.create_trove_cat((353,274,"ukrainian","Ukrainian","Translations :: Ukrainian",True))
-        self.create_trove_cat((330,274,"dutch","Dutch","Translations :: Dutch",True))
-        self.create_trove_cat((343,274,"persian","Persian","Translations :: Persian",True))
-        self.create_trove_cat((344,274,"polish","Polish","Translations :: Polish",True))
-        self.create_trove_cat((455,274,"irish_gaelic","Irish Gaelic","Translations :: Irish Gaelic",True))
-        self.create_trove_cat((413,274,"lithuanian","Lithuanian","Translations :: Lithuanian",True))
-        self.create_trove_cat((414,274,"albanian","Albanian","Translations :: Albanian",True))
-        self.create_trove_cat((415,274,"malagasy","Malagasy","Translations :: Malagasy",True))
-        self.create_trove_cat((416,274,"mongolian","Mongolian","Translations :: Mongolian",True))
-        self.create_trove_cat((417,274,"maltese","Maltese","Translations :: Maltese",True))
-        self.create_trove_cat((380,274,"slovenian","Slovene","Translations :: Slovene",True))
-        self.create_trove_cat((374,274,"icelandic","Icelandic","Translations :: Icelandic",True))
-        self.create_trove_cat((376,274,"macedonian","Macedonian","Translations :: Macedonian",True))
-        self.create_trove_cat((377,274,"latin","Latin","Translations :: Latin",True))
-        self.create_trove_cat((375,274,"latvian","Latvian","Translations :: Latvian",True))
-        self.create_trove_cat((373,274,"czech","Czech","Translations :: Czech",True))
-        self.create_trove_cat((369,274,"afrikaans","Afrikaans","Translations :: Afrikaans",True))
-        self.create_trove_cat((357,274,"finnish","Finnish","Translations :: Finnish",True))
-        self.create_trove_cat((186,160,"visualbasic","Visual Basic","Programming Language :: Visual Basic",True))
-        self.create_trove_cat((505,499,"db_pear","PHP Pear::DB","Database Environment :: Database API :: PHP Pear::DB",True))
-        self.create_trove_cat((507,499,"db_api_xml","XML-based","Database Environment :: Database API :: XML-based",True))
-        self.create_trove_cat((509,499,"db_api_other","Other API","Database Environment :: Database API :: Other API",True))
-        self.create_trove_cat((532,497,"db_net_hsql","HSQL","Database Environment :: Network-based DBMS :: HSQL",True))
-        self.create_trove_cat((547,160,"applescript","AppleScript","Programming Language :: AppleScript",True))
-        self.create_trove_cat((173,160,"modula","Modula","Programming Language :: Modula",True))
-        self.create_trove_cat((337,274,"italian","Italian","Translations :: Italian",True))
-        self.create_trove_cat((333,274,"hebrew","Hebrew","Translations :: Hebrew",True))
-        self.create_trove_cat((331,274,"esperanto","Esperanto","Translations :: Esperanto",True))
-        self.create_trove_cat((329,274,"catalan","Catalan","Translations :: Catalan",True))
-        self.create_trove_cat((327,274,"bengali","Bengali","Translations :: Bengali",True))
-        self.create_trove_cat((332,274,"greek","Greek","Translations :: Greek",True))
-        self.create_trove_cat((341,274,"marathi","Marathi","Translations :: Marathi",True))
-        self.create_trove_cat((355,274,"vietnamese","Vietnamese","Translations :: Vietnamese",True))
-        self.create_trove_cat((275,274,"english","English","Translations :: English",True))
-        self.create_trove_cat((345,274,"portuguese","Portuguese","Translations :: Portuguese",True))
-        self.create_trove_cat((171,160,"logo","Logo","Programming Language :: Logo",True))
-        self.create_trove_cat((502,499,"db_api_jdbc","JDBC","Database Environment :: Database API :: JDBC",True))
-        self.create_trove_cat((504,499,"db_api_perldbi","Perl DBI/DBD","Database Environment :: Database API :: Perl DBI/DBD",True))
-        self.create_trove_cat((274,0,"natlanguage","Translations","Translations",True))
-        self.create_trove_cat((506,499,"db_python","Python Database API","Database Environment :: Database API :: Python Database API",True))
-        self.create_trove_cat((526,497,"db_net_oracle","Oracle","Database Environment :: Network-based DBMS :: Oracle",True))
-        self.create_trove_cat((524,497,"db_net_mysql","MySQL","Database Environment :: Network-based DBMS :: MySQL",True))
-        self.create_trove_cat((525,497,"db_net_pgsql","PostgreSQL (pgsql)","Database Environment :: Network-based DBMS :: PostgreSQL (pgsql)",True))
-        self.create_trove_cat((527,497,"db_net_ibmdb2","IBM DB2","Database Environment :: Network-based DBMS :: IBM DB2",True))
-        self.create_trove_cat((529,497,"db_net_sybase","Sybase","Database Environment :: Network-based DBMS :: Sybase",True))
-        self.create_trove_cat((531,497,"db_net_sqlite","SQLite","Database Environment :: Network-based DBMS :: SQLite",True))
-        self.create_trove_cat((533,497,"db_net_other","Other network-based DBMS","Database Environment :: Network-based DBMS :: Other network-based DBMS",True))
-        self.create_trove_cat((497,496,"db_networkbased","Network-based DBMS","Database Environment :: Network-based DBMS",True))
-        self.create_trove_cat((426,199,"os_emu_api","Emulation and API Compatibility","Operating System :: Emulation and API Compatibility",True))
-        self.create_trove_cat((311,236,"macos9","Apple Mac OS Classic","Operating System :: Other Operating Systems :: Apple Mac OS Classic",True))
-        self.create_trove_cat((224,236,"beos","BeOS","Operating System :: Other Operating Systems :: BeOS",True))
-        self.create_trove_cat((215,236,"msdos","MS-DOS","Operating System :: Other Operating Systems :: MS-DOS",True))
-        self.create_trove_cat((421,236,"mswin_95","Win95","Operating System :: Other Operating Systems :: Win95",True))
-        self.create_trove_cat((508,499,"db_api_sql","SQL-based","Database Environment :: Database API :: SQL-based",True))
-        self.create_trove_cat((499,496,"db_api","Database API","Database Environment :: Database API",True))
-        self.create_trove_cat((378,274,"serbian","Serbian","Translations :: Serbian",True))
-        self.create_trove_cat((379,274,"slovak","Slovak","Translations :: Slovak",True))
-        self.create_trove_cat((371,274,"chinesetraditional","Chinese (Traditional)","Translations :: Chinese (Traditional)",True))
-        self.create_trove_cat((410,274,"belarusian","Belarusian","Translations :: Belarusian",True))
-        self.create_trove_cat((411,274,"estonian","Estonian","Translations :: Estonian",True))
-        self.create_trove_cat((412,274,"galician","Galician","Translations :: Galician",True))
-        self.create_trove_cat((34,33,"pop3","POP3","Topic :: Communications :: Email :: Post-Office :: POP3",True))
-        self.create_trove_cat((35,33,"imap","IMAP","Topic :: Communications :: Email :: Post-Office :: IMAP",True))
-        self.create_trove_cat((29,28,"filters","Filters","Topic :: Communications :: Email :: Filters",True))
-        self.create_trove_cat((30,28,"listservers","Mailing List Servers","Topic :: Communications :: Email :: Mailing List Servers",True))
-        self.create_trove_cat((597,80,"card_games","Card Games","Topic :: Games/Entertainment :: Card Games",True))
-        self.create_trove_cat((63,18,"editors","Text Editors","Topic :: Text Editors",True))
-        self.create_trove_cat((366,535,"religion","Religion","Intended Audience :: by Industry or Sector :: Religion",False))
-        self.create_trove_cat((534,1,"by_enduser","by End-User Class","Intended Audience :: by End-User Class",False))
-        self.create_trove_cat((528,497,"db_net_firebird","Firebird/InterBase","Database Environment :: Network-based DBMS :: Firebird/InterBase",True))
-        self.create_trove_cat((3,534,"developers","Developers","Intended Audience :: by End-User Class :: Developers",False))
-        self.create_trove_cat((530,497,"db_net_mssql","Microsoft SQL Server","Database Environment :: Network-based DBMS :: Microsoft SQL Server",True))
-        self.create_trove_cat((2,534,"endusers","End Users/Desktop","Intended Audience :: by End-User Class :: End Users/Desktop",False))
-        self.create_trove_cat((498,496,"db_filebased","File-based DBMS","Database Environment :: File-based DBMS",True))
-        self.create_trove_cat((537,534,"enduser_qa","Quality Engineers","Intended Audience :: by End-User Class :: Quality Engineers",False))
-        self.create_trove_cat((5,1,"other","Other Audience","Intended Audience :: Other Audience",False))
-        self.create_trove_cat((517,498,"db_file_dbm","Berkeley/Sleepycat/Gdbm (DBM)","Database Environment :: File-based DBMS :: Berkeley/Sleepycat/Gdbm (DBM)",True))
-        self.create_trove_cat((358,6,"inactive","7 - Inactive","Development Status :: 7 - Inactive",False))
-        self.create_trove_cat((520,498,"db_file_palm","PalmOS PDB","Database Environment :: File-based DBMS :: PalmOS PDB",True))
-        self.create_trove_cat((523,498,"db_file_other","Other file-based DBMS","Database Environment :: File-based DBMS :: Other file-based DBMS",True))
-        self.create_trove_cat((165,160,"cpp","C++","Programming Language :: C++",True))
-        self.create_trove_cat((163,160,"ada","Ada","Programming Language :: Ada",True))
-        self.create_trove_cat((328,274,"bulgarian","Bulgarian","Translations :: Bulgarian",True))
-        self.create_trove_cat((546,274,"swahili","Swahili","Translations :: Swahili",True))
-        self.create_trove_cat((348,274,"swedish","Swedish","Translations :: Swedish",True))
-        self.create_trove_cat((350,274,"telugu","Telugu","Translations :: Telugu",True))
-        self.create_trove_cat((162,160,"assembly","Assembly","Programming Language :: Assembly",True))
-        self.create_trove_cat((164,160,"c","C","Programming Language :: C",True))
-        self.create_trove_cat((161,160,"apl","APL","Programming Language :: APL",True))
-        self.create_trove_cat((267,160,"zope","Zope","Programming Language :: Zope",True))
-        self.create_trove_cat((264,160,"erlang","Erlang","Programming Language :: Erlang",True))
-        self.create_trove_cat((263,160,"euphoria","Euphoria","Programming Language :: Euphoria",True))
-        self.create_trove_cat((183,160,"php","PHP","Programming Language :: PHP",True))
-        self.create_trove_cat((182,160,"tcl","Tcl","Programming Language :: Tcl",True))
-        self.create_trove_cat((181,160,"smalltalk","Smalltalk","Programming Language :: Smalltalk",True))
-        self.create_trove_cat((180,160,"simula","Simula","Programming Language :: Simula",True))
-        self.create_trove_cat((174,160,"objectivec","Objective C","Programming Language :: Objective C",True))
-        self.create_trove_cat((560,160,"xsl","XSL (XSLT/XPath/XSL-FO)","Programming Language :: XSL (XSLT/XPath/XSL-FO)",True))
-        self.create_trove_cat((293,160,"ruby","Ruby","Programming Language :: Ruby",True))
-        self.create_trove_cat((265,160,"Delphi","Delphi/Kylix","Programming Language :: Delphi/Kylix",True))
-        self.create_trove_cat((281,160,"REBOL","REBOL","Programming Language :: REBOL",True))
-        self.create_trove_cat((454,160,"ocaml","OCaml (Objective Caml)","Programming Language :: OCaml (Objective Caml)",True))
-        self.create_trove_cat((453,160,"vb_net","Visual Basic .NET","Programming Language :: Visual Basic .NET",True))
-        self.create_trove_cat((452,160,"visual_foxpro","Visual FoxPro","Programming Language :: Visual FoxPro",True))
-        self.create_trove_cat((451,160,"haskell","Haskell","Programming Language :: Haskell",True))
-        self.create_trove_cat((450,160,"lua","Lua","Programming Language :: Lua",True))
-        self.create_trove_cat((280,160,"JavaScript","JavaScript","Programming Language :: JavaScript",True))
-        self.create_trove_cat((262,160,"coldfusion","Cold Fusion","Programming Language :: Cold Fusion",True))
-        self.create_trove_cat((261,160,"xbasic","XBasic","Programming Language :: XBasic",True))
-        self.create_trove_cat((258,160,"objectpascal","Object Pascal","Programming Language :: Object Pascal",True))
-        self.create_trove_cat((539,160,"proglang_basic","BASIC","Programming Language :: BASIC",True))
-        self.create_trove_cat((543,160,"groovy","Groovy","Programming Language :: Groovy",True))
-        self.create_trove_cat((545,160,"proglang_labview","LabVIEW","Programming Language :: LabVIEW",True))
-        self.create_trove_cat((548,160,"vbscript","VBScript","Programming Language :: VBScript",True))
-        self.create_trove_cat((552,160,"d_proglang","D","Programming Language :: D",True))
-        self.create_trove_cat((551,160,"vhdl_verilog","VHDL/Verilog","Programming Language :: VHDL/Verilog",True))
-        self.create_trove_cat((549,160,"proglang_lpc","LPC","Programming Language :: LPC",True))
-        self.create_trove_cat((544,160,"yacc","Yacc","Programming Language :: Yacc",True))
-        self.create_trove_cat((352,274,"turkish","Turkish","Translations :: Turkish",True))
-        self.create_trove_cat((354,274,"urdu","Urdu","Translations :: Urdu",True))
-        self.create_trove_cat((160,0,"language","Programming Language","Programming Language",True))
-        self.create_trove_cat((542,160,"emacs_lisp","Emacs-Lisp","Programming Language :: Emacs-Lisp",True))
-        self.create_trove_cat((540,160,"clisp","Common Lisp","Programming Language :: Common Lisp",True))
-        self.create_trove_cat((12,6,"mature","6 - Mature","Development Status :: 6 - Mature",False))
-        self.create_trove_cat((538,160,"awk","AWK","Programming Language :: AWK",True))
-        self.create_trove_cat((572,160,"jsp","JSP","Programming Language :: JSP",True))
-        self.create_trove_cat((172,160,"ml","Standard ML","Programming Language :: Standard ML",True))
-        self.create_trove_cat((255,160,"progress","PROGRESS","Programming Language :: PROGRESS",True))
-        self.create_trove_cat((254,160,"plsql","PL/SQL","Programming Language :: PL/SQL",True))
-        self.create_trove_cat((242,160,"scheme","Scheme","Programming Language :: Scheme",True))
-        self.create_trove_cat((624,160,"idl","IDL","Programming Language :: IDL",True))
-        self.create_trove_cat((198,160,"java","Java","Programming Language :: Java",True))
-        self.create_trove_cat((589,160,"asp_dot_net","ASP.NET","Programming Language :: ASP.NET",True))
-        self.create_trove_cat((608,160,"mumps","MUMPS","Programming Language :: MUMPS",True))
-        self.create_trove_cat((541,160,"dylan","Dylan","Programming Language :: Dylan",True))
-        self.create_trove_cat((573,160,"s_slash_r","S/R","Programming Language :: S/R",True))
-        self.create_trove_cat((584,160,"actionscript","ActionScript","Programming Language :: ActionScript",True))
-        self.create_trove_cat((168,160,"forth","Forth","Programming Language :: Forth",True))
-        self.create_trove_cat((334,274,"hindi","Hindi","Translations :: Hindi",True))
-        self.create_trove_cat((336,274,"indonesian","Indonesian","Translations :: Indonesian",True))
-        self.create_trove_cat((521,498,"db_file_flat","Flat-file","Database Environment :: File-based DBMS :: Flat-file",True))
-        self.create_trove_cat((519,498,"db_file_xbase","xBase","Database Environment :: File-based DBMS :: xBase",True))
-        self.create_trove_cat((338,274,"javanese","Javanese","Translations :: Javanese",True))
-        self.create_trove_cat((518,498,"db_msaccess","Microsoft Access","Database Environment :: File-based DBMS :: Microsoft Access",True))
-        self.create_trove_cat((522,498,"db_file_proprietary","Proprietary file format","Database Environment :: File-based DBMS :: Proprietary file format",True))
-        self.create_trove_cat((496,0,"root_database","Database Environment","Database Environment",True))
-        self.create_trove_cat((501,499,"db_api_odbc","ODBC","Database Environment :: Database API :: ODBC",True))
-        self.create_trove_cat((503,499,"db_adodb","ADOdb","Database Environment :: Database API :: ADOdb",True))
-        self.create_trove_cat((340,274,"malay","Malay","Translations :: Malay",True))
-        self.create_trove_cat((6,0,"developmentstatus","Development Status","Development Status",False))
-        self.create_trove_cat((342,274,"norwegian","Norwegian","Translations :: Norwegian",True))
-        self.create_trove_cat((381,274,"portuguesebrazilian","Brazilian Portuguese","Translations :: Brazilian Portuguese",True))
-        self.create_trove_cat((382,274,"chinesesimplified","Chinese (Simplified)","Translations :: Chinese (Simplified)",True))
-        self.create_trove_cat((356,274,"danish","Danish","Translations :: Danish",True))
-        self.create_trove_cat((346,274,"panjabi","Panjabi","Translations :: Panjabi",True))
-        self.create_trove_cat((370,274,"bosnian","Bosnian","Translations :: Bosnian",True))
-        self.create_trove_cat((279,274,"german","German","Translations :: German",True))
-        self.create_trove_cat((278,274,"japanese","Japanese","Translations :: Japanese",True))
-        self.create_trove_cat((277,274,"spanish","Spanish","Translations :: Spanish",True))
-        self.create_trove_cat((11,6,"production","5 - Production/Stable","Development Status :: 5 - Production/Stable",False))
-        self.create_trove_cat((10,6,"beta","4 - Beta","Development Status :: 4 - Beta",False))
-        self.create_trove_cat((9,6,"alpha","3 - Alpha","Development Status :: 3 - Alpha",False))
-        self.create_trove_cat((8,6,"prealpha","2 - Pre-Alpha","Development Status :: 2 - Pre-Alpha",False))
-        self.create_trove_cat((7,6,"planning","1 - Planning","Development Status :: 1 - Planning",False))
-        self.create_trove_cat((295,274,"russian","Russian","Translations :: Russian",True))
-        self.create_trove_cat((326,274,"arabic","Arabic","Translations :: Arabic",True))
-        self.create_trove_cat((335,274,"hungarian","Hungarian","Translations :: Hungarian",True))
-        self.create_trove_cat((13,0,"license","License","License",False))
-        self.create_trove_cat((14,13,"osi","OSI-Approved Open Source","License :: OSI-Approved Open Source",False))
-        self.create_trove_cat((388,14,"osl","Open Software License","License :: OSI-Approved Open Source :: Open Software License",False))
-        self.create_trove_cat((321,14,"motosoto","Motosoto License","License :: OSI-Approved Open Source :: Motosoto License",False))
-        self.create_trove_cat((325,14,"attribut","Attribution Assurance License","License :: OSI-Approved Open Source :: Attribution Assurance License",False))
-        self.create_trove_cat((304,14,"mpl","Mozilla Public License 1.0 (MPL)","License :: OSI-Approved Open Source :: Mozilla Public License 1.0 (MPL)",False))
-        self.create_trove_cat((398,14,"plan9","Lucent Public License (Plan9)","License :: OSI-Approved Open Source :: Lucent Public License (Plan9)",False))
-        self.create_trove_cat((187,14,"bsd","BSD License","License :: OSI-Approved Open Source :: BSD License",False))
-        self.create_trove_cat((393,14,"historical","Historical Permission Notice and Disclaimer","License :: OSI-Approved Open Source :: Historical Permission Notice and Disclaimer",False))
-        self.create_trove_cat((395,14,"real","RealNetworks Public Source License V1.0","License :: OSI-Approved Open Source :: RealNetworks Public Source License V1.0",False))
-        self.create_trove_cat((396,14,"rpl","Reciprocal Public License","License :: OSI-Approved Open Source :: Reciprocal Public License",False))
-        self.create_trove_cat((392,14,"eiffel2","Eiffel Forum License V2.0","License :: OSI-Approved Open Source :: Eiffel Forum License V2.0",False))
-        self.create_trove_cat((320,14,"w3c","W3C License","License :: OSI-Approved Open Source :: W3C License",False))
-        self.create_trove_cat((400,14,"frameworx","Frameworx Open License","License :: OSI-Approved Open Source :: Frameworx Open License",False))
-        self.create_trove_cat((194,14,"python","Python License (CNRI Python License)","License :: OSI-Approved Open Source :: Python License (CNRI Python License)",False))
-        self.create_trove_cat((296,14,"apache","Apache Software License","License :: OSI-Approved Open Source :: Apache Software License",False))
-        self.create_trove_cat((298,14,"sissl","Sun Industry Standards Source License (SISSL)","License :: OSI-Approved Open Source :: Sun Industry Standards Source License (SISSL)",False))
-        self.create_trove_cat((196,13,"other","Other/Proprietary License","License :: Other/Proprietary License",False))
-        self.create_trove_cat((197,13,"publicdomain","Public Domain","License :: Public Domain",False))
-        self.create_trove_cat((301,14,"nokia","Nokia Open Source License","License :: OSI-Approved Open Source :: Nokia Open Source License",False))
-        self.create_trove_cat((319,14,"eiffel","Eiffel Forum License","License :: OSI-Approved Open Source :: Eiffel Forum License",False))
-        self.create_trove_cat((318,14,"sunpublic","Sun Public License","License :: OSI-Approved Open Source :: Sun Public License",False))
-        self.create_trove_cat((190,14,"qpl","Qt Public License (QPL)","License :: OSI-Approved Open Source :: Qt Public License (QPL)",False))
-        self.create_trove_cat((390,14,"oclc","OCLC Research Public License 2.0","License :: OSI-Approved Open Source :: OCLC Research Public License 2.0",False))
-        self.create_trove_cat((407,14,"nasalicense","NASA Open Source Agreement","License :: OSI-Approved Open Source :: NASA Open Source Agreement",False))
-        self.create_trove_cat((406,14,"eclipselicense","Eclipse Public License","License :: OSI-Approved Open Source :: Eclipse Public License",False))
-        self.create_trove_cat((316,14,"opengroup","Open Group Test Suite License","License :: OSI-Approved Open Source :: Open Group Test Suite License",False))
-        self.create_trove_cat((300,14,"jabber","Jabber Open Source License","License :: OSI-Approved Open Source :: Jabber Open Source License",False))
-        self.create_trove_cat((297,14,"vovida","Vovida Software License 1.0","License :: OSI-Approved Open Source :: Vovida Software License 1.0",False))
-        self.create_trove_cat((324,14,"afl","Academic Free License (AFL)","License :: OSI-Approved Open Source :: Academic Free License (AFL)",False))
-        self.create_trove_cat((189,14,"psfl","Python Software Foundation License","License :: OSI-Approved Open Source :: Python Software Foundation License",False))
-        self.create_trove_cat((193,14,"rscpl","Ricoh Source Code Public License","License :: OSI-Approved Open Source :: Ricoh Source Code Public License",False))
-        self.create_trove_cat((17,14,"artistic","Artistic License","License :: OSI-Approved Open Source :: Artistic License",False))
-        self.create_trove_cat((389,14,"sybase","Sybase Open Watcom Public License","License :: OSI-Approved Open Source :: Sybase Open Watcom Public License",False))
-        self.create_trove_cat((391,14,"wxwindows","wxWindows Library Licence","License :: OSI-Approved Open Source :: wxWindows Library Licence",False))
-        self.create_trove_cat((397,14,"entessa","Entessa Public License","License :: OSI-Approved Open Source :: Entessa Public License",False))
-        self.create_trove_cat((16,14,"lgpl","GNU Library or Lesser General Public License (LGPL)","License :: OSI-Approved Open Source :: GNU Library or Lesser General Public License (LGPL)",False))
-        self.create_trove_cat((629,14,"educom","Educational Community License","License :: OSI-Approved Open Source :: Educational Community License",False))
-        self.create_trove_cat((15,14,"gpl","GNU General Public License (GPL)","License :: OSI-Approved Open Source :: GNU General Public License (GPL)",False))
-        self.create_trove_cat((191,14,"ibm","IBM Public License","License :: OSI-Approved Open Source :: IBM Public License",False))
-        self.create_trove_cat((192,14,"cvw","MITRE Collaborative Virtual Workspace License (CVW)","License :: OSI-Approved Open Source :: MITRE Collaborative Virtual Workspace License (CVW)",False))
-        self.create_trove_cat((299,14,"iosl","Intel Open Source License","License :: OSI-Approved Open Source :: Intel Open Source License",False))
-        self.create_trove_cat((399,14,"php-license","PHP License","License :: OSI-Approved Open Source :: PHP License",False))
-        self.create_trove_cat((188,14,"mit","MIT License","License :: OSI-Approved Open Source :: MIT License",False))
-        self.create_trove_cat((405,14,"public102","Lucent Public License Version 1.02","License :: OSI-Approved Open Source :: Lucent Public License Version 1.02",False))
-        self.create_trove_cat((404,14,"fair","Fair License","License :: OSI-Approved Open Source :: Fair License",False))
-        self.create_trove_cat((403,14,"datagrid","EU DataGrid Software License","License :: OSI-Approved Open Source :: EU DataGrid Software License",False))
-        self.create_trove_cat((307,14,"ibmcpl","Common Public License","License :: OSI-Approved Open Source :: Common Public License",False))
-        self.create_trove_cat((402,14,"cua","CUA Office Public License Version 1.0","License :: OSI-Approved Open Source :: CUA Office Public License Version 1.0",False))
-        self.create_trove_cat((401,14,"apache2","Apache License V2.0","License :: OSI-Approved Open Source :: Apache License V2.0",False))
-        self.create_trove_cat((394,14,"nausite","Naumen Public License","License :: OSI-Approved Open Source :: Naumen Public License",False))
-        self.create_trove_cat((317,14,"xnet","X.Net License","License :: OSI-Approved Open Source :: X.Net License",False))
-        self.create_trove_cat((195,14,"zlib","zlib/libpng License","License :: OSI-Approved Open Source :: zlib/libpng License",False))
-        self.create_trove_cat((323,14,"ncsa","University of Illinois/NCSA Open Source License","License :: OSI-Approved Open Source :: University of Illinois/NCSA Open Source License",False))
-        self.create_trove_cat((322,14,"zope","Zope Public License","License :: OSI-Approved Open Source :: Zope Public License",False))
-        self.create_trove_cat((302,14,"sleepycat","Sleepycat License","License :: OSI-Approved Open Source :: Sleepycat License",False))
-        self.create_trove_cat((303,14,"nethack","Nethack General Public License","License :: OSI-Approved Open Source :: Nethack General Public License",False))
-        self.create_trove_cat((306,14,"apsl","Apple Public Source License","License :: OSI-Approved Open Source :: Apple Public Source License",False))
-        self.create_trove_cat((305,14,"mpl11","Mozilla Public License 1.1 (MPL 1.1)","License :: OSI-Approved Open Source :: Mozilla Public License 1.1 (MPL 1.1)",False))
-        self.create_trove_cat((628,14,"adaptive","Adaptive Public License","License :: OSI-Approved Open Source :: Adaptive Public License",False))
-        self.create_trove_cat((630,14,"cddl","Common Development and Distribution License","License :: OSI-Approved Open Source :: Common Development and Distribution License",False))
-        self.create_trove_cat((631,14,"catosl","Computer Associates Trusted Open Source License","License :: OSI-Approved Open Source :: Computer Associates Trusted Open Source License",False))
-        self.create_trove_cat((199,0,"os","Operating System","Operating System",True))
-        self.create_trove_cat((429,426,"fink","Fink (Mac OS X)","Operating System :: Emulation and API Compatibility :: Fink (Mac OS X)",True))
-        self.create_trove_cat((427,426,"cygwin","Cygwin (MS Windows)","Operating System :: Emulation and API Compatibility :: Cygwin (MS Windows)",True))
-        self.create_trove_cat((428,426,"dosemu","DOSEMU","Operating System :: Emulation and API Compatibility :: DOSEMU",True))
-        self.create_trove_cat((430,426,"wine","WINE","Operating System :: Emulation and API Compatibility :: WINE",True))
-        self.create_trove_cat((431,426,"emx","EMX (OS/2 and MS-DOS)","Operating System :: Emulation and API Compatibility :: EMX (OS/2 and MS-DOS)",True))
-        self.create_trove_cat((445,426,"mingw_msys","MinGW/MSYS (MS Windows)","Operating System :: Emulation and API Compatibility :: MinGW/MSYS (MS Windows)",True))
-        self.create_trove_cat((315,199,"pdasystems","Handheld/Embedded Operating Systems","Operating System :: Handheld/Embedded Operating Systems",True))
-        self.create_trove_cat((222,315,"wince","WinCE","Operating System :: Handheld/Embedded Operating Systems :: WinCE",True))
-        self.create_trove_cat((223,315,"palmos","PalmOS","Operating System :: Handheld/Embedded Operating Systems :: PalmOS",True))
-        self.create_trove_cat((441,315,"ecos","eCos","Operating System :: Handheld/Embedded Operating Systems :: eCos",True))
-        self.create_trove_cat((443,315,"vxworks","VxWorks","Operating System :: Handheld/Embedded Operating Systems :: VxWorks",True))
-        self.create_trove_cat((444,315,"symbianos","SymbianOS","Operating System :: Handheld/Embedded Operating Systems :: SymbianOS",True))
-        self.create_trove_cat((442,315,"qnx","QNX","Operating System :: Handheld/Embedded Operating Systems :: QNX",True))
-        self.create_trove_cat((440,315,"uclinux","uClinux","Operating System :: Handheld/Embedded Operating Systems :: uClinux",True))
-        self.create_trove_cat((418,199,"modern_oses","Modern (Vendor-Supported) Desktop Operating Systems","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems",True))
-        self.create_trove_cat((420,418,"mswin_2000","Win2K","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Win2K",True))
-        self.create_trove_cat((207,418,"sun","Solaris","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Solaris",True))
-        self.create_trove_cat((201,418,"linux","Linux","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Linux",True))
-        self.create_trove_cat((205,418,"openbsd","OpenBSD","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: OpenBSD",True))
-        self.create_trove_cat((203,418,"freebsd","FreeBSD","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: FreeBSD",True))
-        self.create_trove_cat((204,418,"netbsd","NetBSD","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: NetBSD",True))
-        self.create_trove_cat((309,418,"macosx","OS X","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: OS X",True))
-        self.create_trove_cat((419,418,"mswin_xp","WinXP","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: WinXP",True))
-        self.create_trove_cat((236,199,"other","Other Operating Systems","Operating System :: Other Operating Systems",True))
-        self.create_trove_cat((206,236,"bsdos","BSD/OS","Operating System :: Other Operating Systems :: BSD/OS",True))
-        self.create_trove_cat((634,236,"console-platforms","Console-based Platforms","Operating System :: Other Operating Systems :: Console-based Platforms",True))
-        self.create_trove_cat((637,634,"sega-dreamcast","Sega Dreamcast","Operating System :: Other Operating Systems :: Console-based Platforms :: Sega Dreamcast",True))
-        self.create_trove_cat((635,634,"xbox","Microsoft Xbox","Operating System :: Other Operating Systems :: Console-based Platforms :: Microsoft Xbox",True))
-        self.create_trove_cat((636,634,"sony-ps2","Sony Playstation 2","Operating System :: Other Operating Systems :: Console-based Platforms :: Sony Playstation 2",True))
-        self.create_trove_cat((422,236,"mswin_98","Win98","Operating System :: Other Operating Systems :: Win98",True))
-        self.create_trove_cat((425,422,"mswin_98_osr2","Win98 OSR2","Operating System :: Other Operating Systems :: Win98 :: Win98 OSR2",True))
-        self.create_trove_cat((424,236,"mswin_me","WinME","Operating System :: Other Operating Systems :: WinME",True))
-        self.create_trove_cat((423,236,"mswin_nt","WinNT","Operating System :: Other Operating Systems :: WinNT",True))
-        self.create_trove_cat((220,236,"os2","IBM OS/2","Operating System :: Other Operating Systems :: IBM OS/2",True))
-        self.create_trove_cat((211,236,"irix","SGI IRIX","Operating System :: Other Operating Systems :: SGI IRIX",True))
-        self.create_trove_cat((210,236,"aix","IBM AIX","Operating System :: Other Operating Systems :: IBM AIX",True))
-        self.create_trove_cat((212,236,"other","Other","Operating System :: Other Operating Systems :: Other",True))
-        self.create_trove_cat((446,236,"openvms","OpenVMS","Operating System :: Other Operating Systems :: OpenVMS",True))
-        self.create_trove_cat((434,236,"amigaos","AmigaOS","Operating System :: Other Operating Systems :: AmigaOS",True))
-        self.create_trove_cat((448,236,"mswin_server2003","Microsoft Windows Server 2003","Operating System :: Other Operating Systems :: Microsoft Windows Server 2003",True))
-        self.create_trove_cat((447,236,"morphos","MorphOS","Operating System :: Other Operating Systems :: MorphOS",True))
-        self.create_trove_cat((209,236,"hpux","HP-UX","Operating System :: Other Operating Systems :: HP-UX",True))
-        self.create_trove_cat((208,236,"sco","SCO","Operating System :: Other Operating Systems :: SCO",True))
-        self.create_trove_cat((240,236,"gnuhurd","GNU Hurd","Operating System :: Other Operating Systems :: GNU Hurd",True))
-        self.create_trove_cat((217,236,"win31","Microsoft Windows 3.x","Operating System :: Other Operating Systems :: Microsoft Windows 3.x",True))
-        self.create_trove_cat((432,199,"os_groups","Grouping and Descriptive Categories","Operating System :: Grouping and Descriptive Categories",True))
-        self.create_trove_cat((218,432,"win95","32-bit MS Windows (95/98)","Operating System :: Grouping and Descriptive Categories :: 32-bit MS Windows (95/98)",True))
-        self.create_trove_cat((439,432,"os_projectdistrospecific","Project is OS Distribution-Specific","Operating System :: Grouping and Descriptive Categories :: Project is OS Distribution-Specific",True))
-        self.create_trove_cat((449,432,"eightbit_oses","Classic 8-bit Operating Systems (Apple, Atari, Commodore, etc.)","Operating System :: Grouping and Descriptive Categories :: Classic 8-bit Operating Systems (Apple, Atari, Commodore, etc.)",True))
-        self.create_trove_cat((436,432,"os_portable","OS Portable (Source code to work with many OS platforms)","Operating System :: Grouping and Descriptive Categories :: OS Portable (Source code to work with many OS platforms)",True))
-        self.create_trove_cat((438,432,"os_projectdistro","Project is an Operating System Distribution","Operating System :: Grouping and Descriptive Categories :: Project is an Operating System Distribution",True))
-        self.create_trove_cat((235,432,"independent","OS Independent (Written in an interpreted language)","Operating System :: Grouping and Descriptive Categories :: OS Independent (Written in an interpreted language)",True))
-        self.create_trove_cat((200,432,"posix","All POSIX (Linux/BSD/UNIX-like OSes)","Operating System :: Grouping and Descriptive Categories :: All POSIX (Linux/BSD/UNIX-like OSes)",True))
-        self.create_trove_cat((219,432,"winnt","32-bit MS Windows (NT/2000/XP)","Operating System :: Grouping and Descriptive Categories :: 32-bit MS Windows (NT/2000/XP)",True))
-        self.create_trove_cat((202,432,"bsd","All BSD Platforms (FreeBSD/NetBSD/OpenBSD/Apple Mac OS X)","Operating System :: Grouping and Descriptive Categories :: All BSD Platforms (FreeBSD/NetBSD/OpenBSD/Apple Mac OS X)",True))
-        self.create_trove_cat((435,432,"mswin_all32bit","All 32-bit MS Windows (95/98/NT/2000/XP)","Operating System :: Grouping and Descriptive Categories :: All 32-bit MS Windows (95/98/NT/2000/XP)",True))
-        self.create_trove_cat((437,432,"os_projectkernel","Project is an Operating System Kernel","Operating System :: Grouping and Descriptive Categories :: Project is an Operating System Kernel",True))
-        self.create_trove_cat((64,63,"emacs","Emacs","Topic :: Text Editors :: Emacs",True))
-        self.create_trove_cat((65,63,"ide","Integrated Development Environments (IDE)","Topic :: Text Editors :: Integrated Development Environments (IDE)",True))
-        self.create_trove_cat((69,63,"documentation","Documentation","Topic :: Text Editors :: Documentation",True))
-        self.create_trove_cat((70,63,"wordprocessors","Word Processors","Topic :: Text Editors :: Word Processors",True))
-        self.create_trove_cat((285,63,"textprocessing","Text Processing","Topic :: Text Editors :: Text Processing",True))
-        self.create_trove_cat((611,18,"formats_and_protocols","Formats and Protocols","Topic :: Formats and Protocols",True))
-        self.create_trove_cat((554,611,"data_formats","Data Formats","Topic :: Formats and Protocols :: Data Formats",True))
-        self.create_trove_cat((559,554,"xml","XML","Topic :: Formats and Protocols :: Data Formats :: XML",True))
-        self.create_trove_cat((557,554,"sgml","SGML","Topic :: Formats and Protocols :: Data Formats :: SGML",True))
-        self.create_trove_cat((555,554,"docbook","DocBook","Topic :: Formats and Protocols :: Data Formats :: DocBook",True))
-        self.create_trove_cat((556,554,"html_xhtml","HTML/XHTML","Topic :: Formats and Protocols :: Data Formats :: HTML/XHTML",True))
-        self.create_trove_cat((558,554,"tex_latex","TeX/LaTeX","Topic :: Formats and Protocols :: Data Formats :: TeX/LaTeX",True))
-        self.create_trove_cat((612,611,"protocols","Protocols","Topic :: Formats and Protocols :: Protocols",True))
-        self.create_trove_cat((616,612,"xml_rpc","XML-RPC","Topic :: Formats and Protocols :: Protocols :: XML-RPC",True))
-        self.create_trove_cat((614,612,"nntp","NNTP","Topic :: Formats and Protocols :: Protocols :: NNTP",True))
-        self.create_trove_cat((613,612,"soap","SOAP","Topic :: Formats and Protocols :: Protocols :: SOAP",True))
-        self.create_trove_cat((615,612,"rss","RSS","Topic :: Formats and Protocols :: Protocols :: RSS",True))
-        self.create_trove_cat((156,18,"terminals","Terminals","Topic :: Terminals",True))
-        self.create_trove_cat((157,156,"serial","Serial","Topic :: Terminals :: Serial",True))
-        self.create_trove_cat((158,156,"virtual","Terminal Emulators/X Terminals","Topic :: Terminals :: Terminal Emulators/X Terminals",True))
-        self.create_trove_cat((159,156,"telnet","Telnet","Topic :: Terminals :: Telnet",True))
-        self.create_trove_cat((20,18,"communications","Communications","Topic :: Communications",True))
-        self.create_trove_cat((37,20,"fido","FIDO","Topic :: Communications :: FIDO",True))
-        self.create_trove_cat((38,20,"hamradio","Ham Radio","Topic :: Communications :: Ham Radio",True))
-        self.create_trove_cat((39,20,"usenet","Usenet News","Topic :: Communications :: Usenet News",True))
-        self.create_trove_cat((40,20,"internetphone","Internet Phone","Topic :: Communications :: Internet Phone",True))
-        self.create_trove_cat((36,20,"fax","Fax","Topic :: Communications :: Fax",True))
-        self.create_trove_cat((22,20,"chat","Chat","Topic :: Communications :: Chat",True))
-        self.create_trove_cat((574,22,"msn_messenger","MSN Messenger","Topic :: Communications :: Chat :: MSN Messenger",True))
-        self.create_trove_cat((26,22,"aim","AOL Instant Messenger","Topic :: Communications :: Chat :: AOL Instant Messenger",True))
-        self.create_trove_cat((24,22,"irc","Internet Relay Chat","Topic :: Communications :: Chat :: Internet Relay Chat",True))
-        self.create_trove_cat((25,22,"talk","Unix Talk","Topic :: Communications :: Chat :: Unix Talk",True))
-        self.create_trove_cat((23,22,"icq","ICQ","Topic :: Communications :: Chat :: ICQ",True))
-        self.create_trove_cat((590,20,"streaming_comms","Streaming","Topic :: Communications :: Streaming",True))
-        self.create_trove_cat((27,20,"conferencing","Conferencing","Topic :: Communications :: Conferencing",True))
-        self.create_trove_cat((247,20,"telephony","Telephony","Topic :: Communications :: Telephony",True))
-        self.create_trove_cat((251,20,"filesharing","File Sharing","Topic :: Communications :: File Sharing",True))
-        self.create_trove_cat((622,251,"bittorrent","BitTorrent","Topic :: Communications :: File Sharing :: BitTorrent",True))
-        self.create_trove_cat((286,251,"gnutella","Gnutella","Topic :: Communications :: File Sharing :: Gnutella",True))
-        self.create_trove_cat((241,251,"napster","Napster","Topic :: Communications :: File Sharing :: Napster",True))
-        self.create_trove_cat((21,20,"bbs","BBS","Topic :: Communications :: BBS",True))
-        self.create_trove_cat((28,20,"email","Email","Topic :: Communications :: Email",True))
-        self.create_trove_cat((31,28,"mua","Email Clients (MUA)","Topic :: Communications :: Email :: Email Clients (MUA)",True))
-        self.create_trove_cat((32,28,"mta","Mail Transport Agents","Topic :: Communications :: Email :: Mail Transport Agents",True))
-        self.create_trove_cat((234,18,"other","Other/Nonlisted Topic","Topic :: Other/Nonlisted Topic",True))
-        self.create_trove_cat((129,18,"office","Office/Business","Topic :: Office/Business",True))
-        self.create_trove_cat((576,129,"enterprise","Enterprise","Topic :: Office/Business :: Enterprise",True))
-        self.create_trove_cat((579,576,"crm","CRM","Topic :: Office/Business :: Enterprise :: CRM",True))
-        self.create_trove_cat((577,576,"erp","ERP","Topic :: Office/Business :: Enterprise :: ERP",True))
-        self.create_trove_cat((578,576,"olap","OLAP","Topic :: Office/Business :: Enterprise :: OLAP",True))
-        self.create_trove_cat((580,576,"data_warehousing","Data Warehousing","Topic :: Office/Business :: Enterprise :: Data Warehousing",True))
-        self.create_trove_cat((587,129,"time_tracking","Time Tracking","Topic :: Office/Business :: Time Tracking",True))
-        self.create_trove_cat((75,129,"financial","Financial","Topic :: Office/Business :: Financial",True))
-        self.create_trove_cat((76,75,"accounting","Accounting","Topic :: Office/Business :: Financial :: Accounting",True))
-        self.create_trove_cat((77,75,"investment","Investment","Topic :: Office/Business :: Financial :: Investment",True))
-        self.create_trove_cat((78,75,"spreadsheet","Spreadsheet","Topic :: Office/Business :: Financial :: Spreadsheet",True))
-        self.create_trove_cat((79,75,"pointofsale","Point-Of-Sale","Topic :: Office/Business :: Financial :: Point-Of-Sale",True))
-        self.create_trove_cat((130,129,"scheduling","Scheduling","Topic :: Office/Business :: Scheduling",True))
-        self.create_trove_cat((585,130,"calendar","Calendar","Topic :: Office/Business :: Scheduling :: Calendar",True))
-        self.create_trove_cat((586,130,"resource_booking","Resource Booking","Topic :: Office/Business :: Scheduling :: Resource Booking",True))
-        self.create_trove_cat((131,129,"suites","Office Suites","Topic :: Office/Business :: Office Suites",True))
-        self.create_trove_cat((588,129,"todo_lists","To-Do Lists","Topic :: Office/Business :: To-Do Lists",True))
-        self.create_trove_cat((607,129,"project_management","Project Management","Topic :: Office/Business :: Project Management",True))
-        self.create_trove_cat((66,18,"database","Database","Topic :: Database",True))
-        self.create_trove_cat((68,66,"frontends","Front-Ends","Topic :: Database :: Front-Ends",True))
-        self.create_trove_cat((67,66,"engines","Database Engines/Servers","Topic :: Database :: Database Engines/Servers",True))
-        self.create_trove_cat((43,18,"security","Security","Topic :: Security",True))
-        self.create_trove_cat((44,43,"cryptography","Cryptography","Topic :: Security :: Cryptography",True))
-        self.create_trove_cat((55,18,"desktop","Desktop Environment","Topic :: Desktop Environment",True))
-        self.create_trove_cat((56,55,"windowmanagers","Window Managers","Topic :: Desktop Environment :: Window Managers",True))
-        self.create_trove_cat((59,56,"enlightenment","Enlightenment","Topic :: Desktop Environment :: Window Managers :: Enlightenment",True))
-        self.create_trove_cat((60,59,"themes","Themes","Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes",True))
-        self.create_trove_cat((57,55,"kde","K Desktop Environment (KDE)","Topic :: Desktop Environment :: K Desktop Environment (KDE)",True))
-        self.create_trove_cat((61,57,"themes","Themes","Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes",True))
-        self.create_trove_cat((58,55,"gnome","Gnome","Topic :: Desktop Environment :: Gnome",True))
-        self.create_trove_cat((62,55,"screensavers","Screen Savers","Topic :: Desktop Environment :: Screen Savers",True))
-        self.create_trove_cat((80,18,"games","Games/Entertainment","Topic :: Games/Entertainment",True))
-        self.create_trove_cat((633,80,"console-games","Console-based Games","Topic :: Games/Entertainment :: Console-based Games",True))
-        self.create_trove_cat((287,80,"boardgames","Board Games","Topic :: Games/Entertainment :: Board Games",True))
-        self.create_trove_cat((288,80,"sidescrolling","Side-Scrolling/Arcade Games","Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games",True))
-        self.create_trove_cat((81,80,"realtimestrategy","Real Time Strategy","Topic :: Games/Entertainment :: Real Time Strategy",True))
-        self.create_trove_cat((82,80,"firstpersonshooters","First Person Shooters","Topic :: Games/Entertainment :: First Person Shooters",True))
-        self.create_trove_cat((83,80,"turnbasedstrategy","Turn Based Strategy","Topic :: Games/Entertainment :: Turn Based Strategy",True))
-        self.create_trove_cat((84,80,"rpg","Role-Playing","Topic :: Games/Entertainment :: Role-Playing",True))
-        self.create_trove_cat((85,80,"simulation","Simulation","Topic :: Games/Entertainment :: Simulation",True))
-        self.create_trove_cat((86,80,"mud","Multi-User Dungeons (MUD)","Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)",True))
-        self.create_trove_cat((268,80,"Puzzles","Puzzle Games","Topic :: Games/Entertainment :: Puzzle Games",True))
-        self.create_trove_cat((88,87,"finger","Finger","Topic :: Internet :: Finger",True))
-        self.create_trove_cat((89,87,"ftp","File Transfer Protocol (FTP)","Topic :: Internet :: File Transfer Protocol (FTP)",True))
-        self.create_trove_cat((270,87,"WAP","WAP","Topic :: Internet :: WAP",True))
-        self.create_trove_cat((90,87,"www","WWW/HTTP","Topic :: Internet :: WWW/HTTP",True))
-        self.create_trove_cat((91,90,"browsers","Browsers","Topic :: Internet :: WWW/HTTP :: Browsers",True))
-        self.create_trove_cat((92,90,"dynamic","Dynamic Content","Topic :: Internet :: WWW/HTTP :: Dynamic Content",True))
-        self.create_trove_cat((95,92,"messageboards","Message Boards","Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards",True))
-        self.create_trove_cat((96,92,"cgi","CGI Tools/Libraries","Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",True))
-        self.create_trove_cat((94,92,"counters","Page Counters","Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters",True))
-        self.create_trove_cat((93,90,"indexing","Indexing/Search","Topic :: Internet :: WWW/HTTP :: Indexing/Search",True))
-        self.create_trove_cat((243,90,"sitemanagement","Site Management","Topic :: Internet :: WWW/HTTP :: Site Management",True))
-        self.create_trove_cat((244,243,"linkchecking","Link Checking","Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking",True))
-        self.create_trove_cat((250,90,"httpservers","HTTP Servers","Topic :: Internet :: WWW/HTTP :: HTTP Servers",True))
-        self.create_trove_cat((149,87,"dns","Name Service (DNS)","Topic :: Internet :: Name Service (DNS)",True))
-        self.create_trove_cat((245,87,"loganalysis","Log Analysis","Topic :: Internet :: Log Analysis",True))
-        self.create_trove_cat((45,18,"development","Software Development","Topic :: Software Development",True))
-        self.create_trove_cat((563,45,"modeling","Modeling","Topic :: Software Development :: Modeling",True))
-        self.create_trove_cat((46,45,"build","Build Tools","Topic :: Software Development :: Build Tools",True))
-        self.create_trove_cat((575,45,"testing","Testing","Topic :: Software Development :: Testing",True))
-        self.create_trove_cat((620,45,"algorithms","Algorithms","Topic :: Software Development :: Algorithms",True))
-        self.create_trove_cat((621,620,"genetic_algorithms","Genetic Algorithms","Topic :: Software Development :: Algorithms :: Genetic Algorithms",True))
-        self.create_trove_cat((606,45,"frameworks","Frameworks","Topic :: Software Development :: Frameworks",True))
-        self.create_trove_cat((564,45,"documentation","Documentation","Topic :: Software Development :: Documentation",True))
-        self.create_trove_cat((562,45,"swdev_oo","Object Oriented","Topic :: Software Development :: Object Oriented",True))
-        self.create_trove_cat((409,45,"l10n","L10N (Localization)","Topic :: Software Development :: L10N (Localization)",True))
-        self.create_trove_cat((408,45,"i18n","I18N (Internationalization)","Topic :: Software Development :: I18N (Internationalization)",True))
-        self.create_trove_cat((50,45,"objectbrokering","Object Brokering","Topic :: Software Development :: Object Brokering",True))
-        self.create_trove_cat((51,50,"corba","CORBA","Topic :: Software Development :: Object Brokering :: CORBA",True))
-        self.create_trove_cat((52,45,"versioncontrol","Version Control","Topic :: Software Development :: Version Control",True))
-        self.create_trove_cat((53,52,"cvs","CVS","Topic :: Software Development :: Version Control :: CVS",True))
-        self.create_trove_cat((54,52,"rcs","RCS","Topic :: Software Development :: Version Control :: RCS",True))
-        self.create_trove_cat((260,52,"SCCS","SCCS","Topic :: Software Development :: Version Control :: SCCS",True))
-        self.create_trove_cat((259,45,"codegen","Code Generators","Topic :: Software Development :: Code Generators",True))
-        self.create_trove_cat((47,45,"debuggers","Debuggers","Topic :: Software Development :: Debuggers",True))
-        self.create_trove_cat((48,45,"compilers","Compilers","Topic :: Software Development :: Compilers",True))
-        self.create_trove_cat((49,45,"interpreters","Interpreters","Topic :: Software Development :: Interpreters",True))
-        self.create_trove_cat((561,45,"softwaredev_ui","User Interfaces","Topic :: Software Development :: User Interfaces",True))
-        self.create_trove_cat((565,45,"quality_assurance","Quality Assurance","Topic :: Software Development :: Quality Assurance",True))
-        self.create_trove_cat((570,45,"case_tools","CASE","Topic :: Software Development :: CASE",True))
-        self.create_trove_cat((582,45,"design","Design","Topic :: Software Development :: Design",True))
-        self.create_trove_cat((593,45,"cross_compilers","Cross Compilers","Topic :: Software Development :: Cross Compilers",True))
-        self.create_trove_cat((603,45,"profilers","Profiling","Topic :: Software Development :: Profiling",True))
-        self.create_trove_cat((610,45,"virtual_machines","Virtual Machines","Topic :: Software Development :: Virtual Machines",True))
-        self.create_trove_cat((619,45,"usability","Usability","Topic :: Software Development :: Usability",True))
-        self.create_trove_cat((581,71,"library","Library","Topic :: Education :: Library",True))
-        self.create_trove_cat((604,581,"opac","OPAC","Topic :: Education :: Library :: OPAC",True))
-        self.create_trove_cat((605,581,"marc_and_metadata","MARC and Book/Library Metadata","Topic :: Education :: Library :: MARC and Book/Library Metadata",True))
-        self.create_trove_cat((132,18,"religion","Religion and Philosophy","Topic :: Religion and Philosophy",True))
-        self.create_trove_cat((571,132,"new_age","New Age","Topic :: Religion and Philosophy :: New Age",True))
-        self.create_trove_cat((136,18,"system","System","Topic :: System",True))
-        self.create_trove_cat((638,136,"storage","Storage","Topic :: System :: Storage",True))
-        self.create_trove_cat((601,638,"file_management","File Management","Topic :: System :: Storage :: File Management",True))
-        self.create_trove_cat((19,638,"archiving","Archiving","Topic :: System :: Storage :: Archiving",True))
-        self.create_trove_cat((42,19,"compression","Compression","Topic :: System :: Storage :: Archiving :: Compression",True))
-        self.create_trove_cat((137,19,"backup","Backup","Topic :: System :: Storage :: Archiving :: Backup",True))
-        self.create_trove_cat((41,19,"packaging","Packaging","Topic :: System :: Storage :: Archiving :: Packaging",True))
-        self.create_trove_cat((294,136,"shells","System Shells","Topic :: System :: System Shells",True))
-        self.create_trove_cat((74,136,"emulators","Emulators","Topic :: System :: Emulators",True))
-        self.create_trove_cat((627,136,"system_search","Search","Topic :: System :: Search",True))
-        self.create_trove_cat((257,136,"softwaredist","Software Distribution","Topic :: System :: Software Distribution",True))
-        self.create_trove_cat((122,113,"players","Players","Topic :: Multimedia :: Sound/Audio :: Players",True))
-        self.create_trove_cat((253,136,"sysadministration","Systems Administration","Topic :: System :: Systems Administration",True))
-        self.create_trove_cat((289,253,"authentication","Authentication/Directory","Topic :: System :: Systems Administration :: Authentication/Directory",True))
-        self.create_trove_cat((290,289,"nis","NIS","Topic :: System :: Systems Administration :: Authentication/Directory :: NIS",True))
-        self.create_trove_cat((291,289,"ldap","LDAP","Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",True))
-        self.create_trove_cat((153,136,"power","Power (UPS)","Topic :: System :: Power (UPS)",True))
-        self.create_trove_cat((150,136,"networking","Networking","Topic :: System :: Networking",True))
-        self.create_trove_cat((566,150,"wireless","Wireless","Topic :: System :: Networking :: Wireless",True))
-        self.create_trove_cat((151,150,"firewalls","Firewalls","Topic :: System :: Networking :: Firewalls",True))
-        self.create_trove_cat((152,150,"monitoring","Monitoring","Topic :: System :: Networking :: Monitoring",True))
-        self.create_trove_cat((155,152,"watchdog","Hardware Watchdog","Topic :: System :: Networking :: Monitoring :: Hardware Watchdog",True))
-        self.create_trove_cat((148,136,"logging","Logging","Topic :: System :: Logging",True))
-        self.create_trove_cat((592,148,"log_rotation","Log Rotation","Topic :: System :: Logging :: Log Rotation",True))
-        self.create_trove_cat((144,136,"kernels","Operating System Kernels","Topic :: System :: Operating System Kernels",True))
-        self.create_trove_cat((145,144,"bsd","BSD","Topic :: System :: Operating System Kernels :: BSD",True))
-        self.create_trove_cat((239,144,"gnuhurd","GNU Hurd","Topic :: System :: Operating System Kernels :: GNU Hurd",True))
-        self.create_trove_cat((143,144,"linux","Linux","Topic :: System :: Operating System Kernels :: Linux",True))
-        self.create_trove_cat((147,136,"setup","Installation/Setup","Topic :: System :: Installation/Setup",True))
-        self.create_trove_cat((146,136,"hardware","Hardware","Topic :: System :: Hardware",True))
-        self.create_trove_cat((313,146,"mainframe","Mainframes","Topic :: System :: Hardware :: Mainframes",True))
-        self.create_trove_cat((312,146,"smp","Symmetric Multi-processing","Topic :: System :: Hardware :: Symmetric Multi-processing",True))
-        self.create_trove_cat((292,146,"drivers","Hardware Drivers","Topic :: System :: Hardware :: Hardware Drivers",True))
-        self.create_trove_cat((138,136,"benchmark","Benchmark","Topic :: System :: Benchmark",True))
-        self.create_trove_cat((139,136,"boot","Boot","Topic :: System :: Boot",True))
-        self.create_trove_cat((140,139,"init","Init","Topic :: System :: Boot :: Init",True))
-        self.create_trove_cat((141,136,"clustering","Clustering","Topic :: System :: Clustering",True))
-        self.create_trove_cat((308,136,"distributed_computing","Distributed Computing","Topic :: System :: Distributed Computing",True))
-        self.create_trove_cat((142,136,"filesystems","Filesystems","Topic :: System :: Filesystems",True))
-        self.create_trove_cat((154,18,"printing","Printing","Topic :: Printing",True))
-        self.create_trove_cat((87,18,"internet","Internet","Topic :: Internet",True))
-        self.create_trove_cat((118,116,"cdripping","CD Ripping","Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping",True))
-        self.create_trove_cat((119,113,"conversion","Conversion","Topic :: Multimedia :: Sound/Audio :: Conversion",True))
-        self.create_trove_cat((120,113,"editors","Editors","Topic :: Multimedia :: Sound/Audio :: Editors",True))
-        self.create_trove_cat((121,113,"mixers","Mixers","Topic :: Multimedia :: Sound/Audio :: Mixers",True))
-        self.create_trove_cat((100,99,"graphics","Graphics","Topic :: Multimedia :: Graphics",True))
-        self.create_trove_cat((109,100,"3dmodeling","3D Modeling","Topic :: Multimedia :: Graphics :: 3D Modeling",True))
-        self.create_trove_cat((110,100,"3drendering","3D Rendering","Topic :: Multimedia :: Graphics :: 3D Rendering",True))
-        self.create_trove_cat((111,100,"presentation","Presentation","Topic :: Multimedia :: Graphics :: Presentation",True))
-        self.create_trove_cat((112,100,"viewers","Viewers","Topic :: Multimedia :: Graphics :: Viewers",True))
-        self.create_trove_cat((101,100,"capture","Capture","Topic :: Multimedia :: Graphics :: Capture",True))
-        self.create_trove_cat((104,101,"screencapture","Screen Capture","Topic :: Multimedia :: Graphics :: Capture :: Screen Capture",True))
-        self.create_trove_cat((103,101,"cameras","Digital Camera","Topic :: Multimedia :: Graphics :: Capture :: Digital Camera",True))
-        self.create_trove_cat((102,101,"scanners","Scanners","Topic :: Multimedia :: Graphics :: Capture :: Scanners",True))
-        self.create_trove_cat((105,100,"conversion","Graphics Conversion","Topic :: Multimedia :: Graphics :: Graphics Conversion",True))
-        self.create_trove_cat((106,100,"editors","Editors","Topic :: Multimedia :: Graphics :: Editors",True))
-        self.create_trove_cat((108,106,"raster","Raster-Based","Topic :: Multimedia :: Graphics :: Editors :: Raster-Based",True))
-        self.create_trove_cat((107,106,"vector","Vector-Based","Topic :: Multimedia :: Graphics :: Editors :: Vector-Based",True))
-        self.create_trove_cat((97,18,"scientific","Scientific/Engineering","Topic :: Scientific/Engineering",True))
-        self.create_trove_cat((609,97,"molecular_science","Molecular Science","Topic :: Scientific/Engineering :: Molecular Science",True))
-        self.create_trove_cat((602,97,"robotics","Robotics","Topic :: Scientific/Engineering :: Robotics",True))
-        self.create_trove_cat((600,97,"simulations","Simulations","Topic :: Scientific/Engineering :: Simulations",True))
-        self.create_trove_cat((568,97,"ecosystem_sciences","Ecosystem Sciences","Topic :: Scientific/Engineering :: Ecosystem Sciences",True))
-        self.create_trove_cat((386,97,"interfaceengine","Interface Engine/Protocol Translator","Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator",True))
-        self.create_trove_cat((384,97,"chemistry","Chemistry","Topic :: Scientific/Engineering :: Chemistry",True))
-        self.create_trove_cat((252,97,"bioinformatics","Bio-Informatics","Topic :: Scientific/Engineering :: Bio-Informatics",True))
-        self.create_trove_cat((246,97,"eda","Electronic Design Automation (EDA)","Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",True))
-        self.create_trove_cat((135,97,"visualization","Visualization","Topic :: Scientific/Engineering :: Visualization",True))
-        self.create_trove_cat((134,97,"astronomy","Astronomy","Topic :: Scientific/Engineering :: Astronomy",True))
-        self.create_trove_cat((133,97,"ai","Artificial Intelligence","Topic :: Scientific/Engineering :: Artificial Intelligence",True))
-        self.create_trove_cat((591,133,"intelligent_agents","Intelligent Agents","Topic :: Scientific/Engineering :: Artificial Intelligence :: Intelligent Agents",True))
-        self.create_trove_cat((98,97,"mathematics","Mathematics","Topic :: Scientific/Engineering :: Mathematics",True))
-        self.create_trove_cat((272,97,"HMI","Human Machine Interfaces","Topic :: Scientific/Engineering :: Human Machine Interfaces",True))
-        self.create_trove_cat((266,97,"medical","Medical Science Apps.","Topic :: Scientific/Engineering :: Medical Science Apps.",True))
-        self.create_trove_cat((383,97,"gis","GIS","Topic :: Scientific/Engineering :: GIS",True))
-        self.create_trove_cat((385,97,"informationanalysis","Information Analysis","Topic :: Scientific/Engineering :: Information Analysis",True))
-        self.create_trove_cat((387,97,"physics","Physics","Topic :: Scientific/Engineering :: Physics",True))
-        self.create_trove_cat((567,97,"earth_science","Earth Sciences","Topic :: Scientific/Engineering :: Earth Sciences",True))
-        self.create_trove_cat((282,18,"Sociology","Sociology","Topic :: Sociology",True))
-        self.create_trove_cat((284,282,"Genealogy","Genealogy","Topic :: Sociology :: Genealogy",True))
-        self.create_trove_cat((283,282,"History","History","Topic :: Sociology :: History",True))
-        self.create_trove_cat((71,18,"education","Education","Topic :: Education",True))
-        self.create_trove_cat((73,71,"testing","Testing","Topic :: Education :: Testing",True))
-        self.create_trove_cat((72,71,"cai","Computer Aided Instruction (CAI)","Topic :: Education :: Computer Aided Instruction (CAI)",True))
-        self.create_trove_cat((18,0,"topic","Topic","Topic",True))
-        self.create_trove_cat((125,99,"video","Video","Topic :: Multimedia :: Video",True))
-        self.create_trove_cat((594,125,"still_capture","Still Capture","Topic :: Multimedia :: Video :: Still Capture",True))
-        self.create_trove_cat((596,125,"codec","Codec","Topic :: Multimedia :: Video :: Codec",True))
-        self.create_trove_cat((127,125,"conversion","Conversion","Topic :: Multimedia :: Video :: Conversion",True))
-        self.create_trove_cat((128,125,"display","Display","Topic :: Multimedia :: Video :: Display",True))
-        self.create_trove_cat((256,125,"nonlineareditor","Non-Linear Editor","Topic :: Multimedia :: Video :: Non-Linear Editor",True))
-        self.create_trove_cat((595,125,"special_effects","Special Effects","Topic :: Multimedia :: Video :: Special Effects",True))
-        self.create_trove_cat((623,125,"video_realtime","Realtime Processing","Topic :: Multimedia :: Video :: Realtime Processing",True))
-        self.create_trove_cat((126,125,"vidcapture","Video Capture","Topic :: Multimedia :: Video :: Video Capture",True))
-        self.create_trove_cat((113,99,"sound","Sound/Audio","Topic :: Multimedia :: Sound/Audio",True))
-        self.create_trove_cat((123,122,"mp3","MP3","Topic :: Multimedia :: Sound/Audio :: Players :: MP3",True))
-        self.create_trove_cat((124,113,"speech","Speech","Topic :: Multimedia :: Sound/Audio :: Speech",True))
-        self.create_trove_cat((114,113,"analysis","Analysis","Topic :: Multimedia :: Sound/Audio :: Analysis",True))
-        self.create_trove_cat((115,113,"capture","Capture/Recording","Topic :: Multimedia :: Sound/Audio :: Capture/Recording",True))
-        self.create_trove_cat((248,113,"midi","MIDI","Topic :: Multimedia :: Sound/Audio :: MIDI",True))
-        self.create_trove_cat((249,113,"synthesis","Sound Synthesis","Topic :: Multimedia :: Sound/Audio :: Sound Synthesis",True))
-        self.create_trove_cat((116,113,"cdaudio","CD Audio","Topic :: Multimedia :: Sound/Audio :: CD Audio",True))
-        self.create_trove_cat((117,116,"cdplay","CD Playing","Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing",True))
-        self.create_trove_cat((99,18,"multimedia","Multimedia","Topic :: Multimedia",True))
-        self.create_trove_cat((670,14,"agpl","Affero GNU Public License","License :: OSI-Approved Open Source :: Affero GNU Public License",True))
-        self.create_trove_cat((862,14,"lppl","LaTeX Project Public License","License :: OSI-Approved Open Source :: LaTeX Project Public License",True))
-        self.create_trove_cat((655,432,"win64","64-bit MS Windows","Operating System :: Grouping and Descriptive Categories :: 64-bit MS Windows",True))
-        self.create_trove_cat((657,418,"vista","Vista","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Vista",True))
-        self.create_trove_cat((851,418,"win7","Windows 7","Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Windows 7",True))
-        self.create_trove_cat((728,315,"android","Android","Operating System :: Handheld/Embedded Operating Systems :: Android",True))
-        self.create_trove_cat((780,315,"ios","Apple iPhone","Operating System :: Handheld/Embedded Operating Systems :: Apple iPhone",True))
-        self.create_trove_cat((863,534,"architects","Architects","Intended Audience :: by End-User Class :: Architects",False))
-        self.create_trove_cat((864,534,"auditors","Auditors","Intended Audience :: by End-User Class :: Auditors",False))
-        self.create_trove_cat((865,534,"testers","Testers","Intended Audience :: by End-User Class :: Testers",False))
-        self.create_trove_cat((866,534,"secpros","Security Professionals","Intended Audience :: by End-User Class :: Security Professionals",False))
-        self.create_trove_cat((867,535,"secindustry","Security","Intended Audience :: by Industry or Sector :: Security",False))
+        self.create_trove_cat(
+            (617, 274, "kirghiz", "Kirghiz", "Translations :: Kirghiz", True))
+        self.create_trove_cat(
+            (372, 274, "croatian", "Croatian", "Translations :: Croatian", True))
+        self.create_trove_cat(
+            (351, 274, "thai", "Thai", "Translations :: Thai", True))
+        self.create_trove_cat(
+            (349, 274, "tamil", "Tamil", "Translations :: Tamil", True))
+        self.create_trove_cat(
+            (347, 274, "romanian", "Romanian", "Translations :: Romanian", True))
+        self.create_trove_cat(
+            (339, 274, "korean", "Korean", "Translations :: Korean", True))
+        self.create_trove_cat(
+            (632, 160, "cobol", "COBOL", "Programming Language :: COBOL", True))
+        self.create_trove_cat(
+            (598, 160, "aspectj", "AspectJ", "Programming Language :: AspectJ", True))
+        self.create_trove_cat(
+            (167, 160, "euler", "Euler", "Programming Language :: Euler", True))
+        self.create_trove_cat(
+            (185, 160, "shell", "Unix Shell", "Programming Language :: Unix Shell", True))
+        self.create_trove_cat(
+            (184, 160, "asp", "ASP", "Programming Language :: ASP", True))
+        self.create_trove_cat(
+            (273, 160, "Pike", "Pike", "Programming Language :: Pike", True))
+        self.create_trove_cat(
+            (271, 160, "csharp", "C#", "Programming Language :: C#", True))
+        self.create_trove_cat(
+            (170, 160, "lisp", "Lisp", "Programming Language :: Lisp", True))
+        self.create_trove_cat(
+            (169, 160, "fortran", "Fortran", "Programming Language :: Fortran", True))
+        self.create_trove_cat(
+            (625, 160, "simulink", "Simulink", "Programming Language :: Simulink", True))
+        self.create_trove_cat(
+            (626, 160, "matlab", "MATLAB", "Programming Language :: MATLAB", True))
+        self.create_trove_cat(
+            (1, 0, "audience", "Intended Audience", "Intended Audience", False))
+        self.create_trove_cat(
+            (618, 535, "nonprofit", "Non-Profit Organizations",
+             "Intended Audience :: by Industry or Sector :: Non-Profit Organizations", False))
+        self.create_trove_cat((599, 535, "aerospace", "Aerospace",
+                              "Intended Audience :: by Industry or Sector :: Aerospace", False))
+        self.create_trove_cat((569, 535, "government", "Government",
+                              "Intended Audience :: by Industry or Sector :: Government", False))
+        self.create_trove_cat(
+            (363, 535, "informationtechnology", "Information Technology",
+             "Intended Audience :: by Industry or Sector :: Information Technology", False))
+        self.create_trove_cat(
+            (361, 535, "financialinsurance", "Financial and Insurance Industry",
+             "Intended Audience :: by Industry or Sector :: Financial and Insurance Industry", False))
+        self.create_trove_cat(
+            (362, 535, "healthcareindustry", "Healthcare Industry",
+             "Intended Audience :: by Industry or Sector :: Healthcare Industry", False))
+        self.create_trove_cat((367, 535, "scienceresearch", "Science/Research",
+                              "Intended Audience :: by Industry or Sector :: Science/Research", False))
+        self.create_trove_cat((359, 535, "customerservice", "Customer Service",
+                              "Intended Audience :: by Industry or Sector :: Customer Service", False))
+        self.create_trove_cat((360, 535, "education", "Education",
+                              "Intended Audience :: by Industry or Sector :: Education", False))
+        self.create_trove_cat((365, 535, "manufacturing", "Manufacturing",
+                              "Intended Audience :: by Industry or Sector :: Manufacturing", False))
+        self.create_trove_cat(
+            (368, 535, "telecommunications", "Telecommunications Industry",
+             "Intended Audience :: by Industry or Sector :: Telecommunications Industry", False))
+        self.create_trove_cat(
+            (166, 160, "eiffel", "Eiffel", "Programming Language :: Eiffel", True))
+        self.create_trove_cat(
+            (550, 160, "oberon", "Oberon", "Programming Language :: Oberon", True))
+        self.create_trove_cat(
+            (553, 160, "realbasic", "REALbasic", "Programming Language :: REALbasic", True))
+        self.create_trove_cat(
+            (178, 160, "python", "Python", "Programming Language :: Python", True))
+        self.create_trove_cat(
+            (179, 160, "rexx", "Rexx", "Programming Language :: Rexx", True))
+        self.create_trove_cat(
+            (177, 160, "prolog", "Prolog", "Programming Language :: Prolog", True))
+        self.create_trove_cat(
+            (176, 160, "perl", "Perl", "Programming Language :: Perl", True))
+        self.create_trove_cat(
+            (175, 160, "pascal", "Pascal", "Programming Language :: Pascal", True))
+        self.create_trove_cat(
+            (536, 534, "enduser_advanced", "Advanced End Users",
+             "Intended Audience :: by End-User Class :: Advanced End Users", False))
+        self.create_trove_cat((4, 534, "sysadmins", "System Administrators",
+                              "Intended Audience :: by End-User Class :: System Administrators", False))
+        self.create_trove_cat(
+            (471, 456, "ui_swing", "Java Swing", "User Interface :: Graphical :: Java Swing", True))
+        self.create_trove_cat(
+            (469, 456, "ui_dotnet", ".NET/Mono", "User Interface :: Graphical :: .NET/Mono", True))
+        self.create_trove_cat(
+            (231, 456, "gnome", "Gnome", "User Interface :: Graphical :: Gnome", True))
+        self.create_trove_cat((229, 456, "x11", "X Window System (X11)",
+                              "User Interface :: Graphical :: X Window System (X11)", True))
+        self.create_trove_cat(
+            (475, 456, "ui_opengl", "OpenGL", "User Interface :: Graphical :: OpenGL", True))
+        self.create_trove_cat(
+            (474, 456, "ui_framebuffer", "Framebuffer", "User Interface :: Graphical :: Framebuffer", True))
+        self.create_trove_cat(
+            (472, 456, "ui_swt", "Java SWT", "User Interface :: Graphical :: Java SWT", True))
+        self.create_trove_cat(
+            (470, 456, "ui_awt", "Java AWT", "User Interface :: Graphical :: Java AWT", True))
+        self.create_trove_cat((230, 456, "win32", "Win32 (MS Windows)",
+                              "User Interface :: Graphical :: Win32 (MS Windows)", True))
+        self.create_trove_cat(
+            (232, 456, "kde", "KDE", "User Interface :: Graphical :: KDE", True))
+        self.create_trove_cat((310, 456, "cocoa", "Cocoa (MacOS X)",
+                              "User Interface :: Graphical :: Cocoa (MacOS X)", True))
+        self.create_trove_cat(
+            (476, 456, "ui_tabletpc", "TabletPC", "User Interface :: Graphical :: TabletPC", True))
+        self.create_trove_cat((314, 456, "handhelds", "Handheld/Mobile/PDA",
+                              "User Interface :: Graphical :: Handheld/Mobile/PDA", True))
+        self.create_trove_cat(
+            (462, 225, "ui_groupingdesc", "Grouping and Descriptive Categories (UI)",
+             "User Interface :: Grouping and Descriptive Categories (UI)", True))
+        self.create_trove_cat(
+            (466, 462, "ui_meta_3d", "Project is a 3D engine",
+             "User Interface :: Grouping and Descriptive Categories (UI) :: Project is a 3D engine", True))
+        self.create_trove_cat(
+            (464, 462, "ui_meta_template", "Project is a templating system",
+             "User Interface :: Grouping and Descriptive Categories (UI) :: Project is a templating system", True))
+        self.create_trove_cat(
+            (463, 462, "ui_meta_system", "Project is a user interface (UI) system",
+             "User Interface :: Grouping and Descript

<TRUNCATED>

[06/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/functional/test_root.py b/ForgeTracker/forgetracker/tests/functional/test_root.py
index 26791bb..8860b3b 100644
--- a/ForgeTracker/forgetracker/tests/functional/test_root.py
+++ b/ForgeTracker/forgetracker/tests/functional/test_root.py
@@ -41,10 +41,12 @@ from allura.lib.security import has_access
 from allura.lib import helpers as h
 from allura.lib.search import SearchError
 from allura.tests import decorators as td
-from allura.tasks import  mail_tasks
+from allura.tasks import mail_tasks
 from ming.orm.ormsession import ThreadLocalORMSession
 
+
 class TrackerTestController(TestController):
+
     def setUp(self):
         super(TrackerTestController, self).setUp()
         self.setup_with_tools()
@@ -67,7 +69,8 @@ class TrackerTestController(TestController):
             is_usercombo = (field and field.tag == 'select' and
                             field.attrs.get('class') == 'project-user-combobox')
             if is_usercombo:
-                field.options = [('', False)] + [(u.username, False) for u in p.users()]
+                field.options = [('', False)] + [(u.username, False)
+                                                 for u in p.users()]
 
         for k, v in kw.iteritems():
             form['ticket_form.%s' % k] = v
@@ -75,7 +78,9 @@ class TrackerTestController(TestController):
         assert resp.status_int != 200, resp
         return resp
 
+
 class TestMilestones(TrackerTestController):
+
     def test_milestone_list(self):
         r = self.app.get('/bugs/milestones')
         assert '1.0' in r, r.showbrowser()
@@ -92,12 +97,12 @@ class TestMilestones(TrackerTestController):
         app.globals.custom_fields = []
         ThreadLocalORMSession.flush_all()
         d = {
-            'field_name':'_milestone',
-            'milestones-0.old_name':'',
-            'milestones-0.new_name':'1.0',
-            'milestones-0.description':'Version 1',
-            'milestones-0.complete':'Open',
-            'milestones-0.due_date':''
+            'field_name': '_milestone',
+            'milestones-0.old_name': '',
+            'milestones-0.new_name': '1.0',
+            'milestones-0.description': 'Version 1',
+            'milestones-0.complete': 'Open',
+            'milestones-0.due_date': ''
         }
         r = self.app.post('/bugs/update_milestones', d)
         r = self.app.get('/bugs/milestones')
@@ -106,35 +111,36 @@ class TestMilestones(TrackerTestController):
         r = self.app.post('/bugs/update_milestones', d)
         p = M.Project.query.get(shortname='test')
         app = p.app_instance('bugs')
-        assert len(app.globals.custom_fields) == 1, len(app.globals.custom_fields)
+        assert len(app.globals.custom_fields) == 1, len(
+            app.globals.custom_fields)
 
     def test_closed_milestone(self):
         self.new_ticket(summary='bar', _milestone='1.0', status='closed')
         d = {
-            'field_name':'_milestone',
-            'milestones-0.old_name':'1.0',
-            'milestones-0.new_name':'1.0',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Closed',
-            'milestones-0.due_date':''
+            'field_name': '_milestone',
+            'milestones-0.old_name': '1.0',
+            'milestones-0.new_name': '1.0',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Closed',
+            'milestones-0.due_date': ''
         }
         self.app.post('/bugs/update_milestones', d)
         d = {
-            'field_name':'_milestone',
-            'milestones-9.old_name':'',
-            'milestones-9.new_name':'3.0',
-            'milestones-9.description':'',
-            'milestones-9.complete':'Closed',
-            'milestones-9.due_date':''
+            'field_name': '_milestone',
+            'milestones-9.old_name': '',
+            'milestones-9.new_name': '3.0',
+            'milestones-9.description': '',
+            'milestones-9.complete': 'Closed',
+            'milestones-9.due_date': ''
         }
         self.app.post('/bugs/update_milestones', d)
         d = {
-            'field_name':'_milestone',
-            'milestones-9.old_name':'',
-            'milestones-9.new_name':'4.0',
-            'milestones-9.description':'',
-            'milestones-9.complete':'Closed',
-            'milestones-9.due_date':''
+            'field_name': '_milestone',
+            'milestones-9.old_name': '',
+            'milestones-9.new_name': '4.0',
+            'milestones-9.description': '',
+            'milestones-9.complete': 'Closed',
+            'milestones-9.due_date': ''
         }
         self.app.post('/bugs/update_milestones', d)
         r = self.app.get('/bugs/1/')
@@ -161,12 +167,12 @@ class TestMilestones(TrackerTestController):
     def test_duplicate_milestone(self):
         self.new_ticket(summary='bar', _milestone='1.0', status='closed')
         d = {
-            'field_name':'_milestone',
-            'milestones-0.old_name':'',
-            'milestones-0.new_name':'1.0',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Closed',
-            'milestones-0.due_date':''
+            'field_name': '_milestone',
+            'milestones-0.old_name': '',
+            'milestones-0.new_name': '1.0',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Closed',
+            'milestones-0.due_date': ''
         }
         r = self.app.post('/bugs/update_milestones', d)
         assert 'error' in self.webflash(r)
@@ -176,27 +182,28 @@ class TestMilestones(TrackerTestController):
         assert len(app.globals.milestone_fields[0]['milestones']) == 2
 
         d = {
-            'field_name':'_milestone',
-            'milestones-0.old_name':'2.0',
-            'milestones-0.new_name':'1.0',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Closed',
-            'milestones-0.due_date':''
+            'field_name': '_milestone',
+            'milestones-0.old_name': '2.0',
+            'milestones-0.new_name': '1.0',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Closed',
+            'milestones-0.due_date': ''
         }
         r = self.app.post('/bugs/update_milestones', d)
         assert 'error' in self.webflash(r)
-        assert app.globals.milestone_fields[0]['milestones'][1]['name'] == '2.0'
+        assert app.globals.milestone_fields[0][
+            'milestones'][1]['name'] == '2.0'
 
     def test_default_milestone(self):
         self.new_ticket(summary='bar', _milestone='1.0', status='closed')
         d = {
-            'field_name':'_milestone',
-            'milestones-0.old_name':'2.0',
-            'milestones-0.new_name':'2.0',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Open',
-            'milestones-0.default':'on',
-            'milestones-0.due_date':''
+            'field_name': '_milestone',
+            'milestones-0.old_name': '2.0',
+            'milestones-0.new_name': '2.0',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Open',
+            'milestones-0.default': 'on',
+            'milestones-0.due_date': ''
         }
         self.app.post('/bugs/update_milestones', d)
         r = self.app.get('/bugs/new/')
@@ -213,6 +220,7 @@ def post_install_create_ticket_permission(app):
     if update_permission in acl:
         acl.remove(update_permission)
 
+
 def post_install_update_ticket_permission(app):
     """Set to anonymous permission to create and update tickets"""
     role = M.ProjectRole.by_name('*anonymous')._id
@@ -221,12 +229,13 @@ def post_install_update_ticket_permission(app):
 
 
 class TestSubprojectTrackerController(TrackerTestController):
+
     @td.with_tool('test/sub1', 'Tickets', 'tickets')
     def test_index_page_ticket_visibility(self):
         """Test that non-admin users can see tickets created by admins."""
         self.new_ticket(summary="my ticket", mount_point="/sub1/tickets/")
         response = self.app.get('/p/test/sub1/tickets/',
-                extra_environ=dict(username='*anonymous'))
+                                extra_environ=dict(username='*anonymous'))
         assert 'my ticket' in response
 
     @td.with_tool('test/sub1', 'Tickets', 'tickets')
@@ -237,7 +246,7 @@ class TestSubprojectTrackerController(TrackerTestController):
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
         response = self.app.get('/p/test/sub1/tickets/search/?q=my',
-                extra_environ=dict(username='*anonymous'))
+                                extra_environ=dict(username='*anonymous'))
         assert 'my ticket' in response, response.showbrowser()
 
     @td.with_tool('test/sub1', 'Tickets', 'tickets')
@@ -249,12 +258,13 @@ class TestSubprojectTrackerController(TrackerTestController):
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
         r = self.app.get('/p/test/sub1/tickets/search/',
-                params=dict(q='test', deleted='True'))
+                         params=dict(q='test', deleted='True'))
         assert '<td><a href="/p/test/sub1/tickets/1/">test' in r
         assert '<tr class=" deleted">' in r
 
 
 class TestFunctionalController(TrackerTestController):
+
     def test_bad_ticket_number(self):
         self.app.get('/bugs/input.project_user_select', status=404)
 
@@ -267,9 +277,9 @@ class TestFunctionalController(TrackerTestController):
         assert create_activity.call_count == 1
         assert create_activity.call_args[0][1] == 'created'
         create_activity.reset_mock()
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'my ticket',
-            'description':'new description',
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'my ticket',
+            'description': 'new description',
         })
         # create_activity is called twice here:
         #   - once for the ticket modification
@@ -284,25 +294,28 @@ class TestFunctionalController(TrackerTestController):
         assert 'class="artifact_subscribe' in ticket_view
 
     def test_new_with_milestone(self):
-        ticket_view = self.new_ticket(summary='test new with milestone', **{'_milestone':'1.0'}).follow()
+        ticket_view = self.new_ticket(
+            summary='test new with milestone', **{'_milestone': '1.0'}).follow()
         assert 'Milestone' in ticket_view
         assert '1.0' in ticket_view
 
     def test_milestone_count(self):
-        self.new_ticket(summary='test new with milestone', **{'_milestone':'1.0'})
-        self.new_ticket(summary='test new with milestone', **{'_milestone':'1.0',
-                                                              'private': '1'})
+        self.new_ticket(summary='test new with milestone',
+                        **{'_milestone': '1.0'})
+        self.new_ticket(
+            summary='test new with milestone', **{'_milestone': '1.0',
+                                                  'private': '1'})
         r = self.app.get('/bugs/milestone_counts')
         counts = {
             'milestone_counts': [
                 {'name': '1.0', 'count': 2},
                 {'name': '2.0', 'count': 0}
-        ]}
+            ]}
         assert_equal(r.body, json.dumps(counts))
         # Private tickets shouldn't be included in counts if user doesn't
         # have read access to private tickets.
         r = self.app.get('/bugs/milestone_counts',
-                extra_environ=dict(username='*anonymous'))
+                         extra_environ=dict(username='*anonymous'))
         counts['milestone_counts'][0]['count'] = 1
         assert_equal(r.body, json.dumps(counts))
 
@@ -311,8 +324,8 @@ class TestFunctionalController(TrackerTestController):
         assert_equal(r.body, json.dumps(counts))
 
     def test_milestone_progress(self):
-        self.new_ticket(summary='Ticket 1', **{'_milestone':'1.0'})
-        self.new_ticket(summary='Ticket 2', **{'_milestone':'1.0',
+        self.new_ticket(summary='Ticket 1', **{'_milestone': '1.0'})
+        self.new_ticket(summary='Ticket 2', **{'_milestone': '1.0',
                                                'status': 'closed',
                                                'private': '1'}).follow()
         r = self.app.get('/bugs/milestone/1.0/')
@@ -377,7 +390,6 @@ class TestFunctionalController(TrackerTestController):
         r = self.app.get('/p/test/bugs/2/')
         assert '<li><strong>Status</strong>: open --&gt; accepted</li>' in r
 
-
     def test_label_for_mass_edit(self):
         self.new_ticket(summary='Ticket1')
         self.new_ticket(summary='Ticket2', labels='tag1')
@@ -409,9 +421,9 @@ class TestFunctionalController(TrackerTestController):
         params = dict(
             custom_fields=[
                 dict(name='_major', label='Major', type='boolean'), ],
-                open_status_names='aa bb',
-                closed_status_names='cc',
-                )
+            open_status_names='aa bb',
+            closed_status_names='cc',
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
@@ -560,7 +572,8 @@ class TestFunctionalController(TrackerTestController):
         assert_true('<label class="simple">Private:</label> No' in ticket_view)
         ticket_view = self.new_ticket(summary='Private Ticket',
                                       private=True).follow()
-        assert_true('<label class="simple">Private:</label> Yes' in ticket_view)
+        assert_true(
+            '<label class="simple">Private:</label> Yes' in ticket_view)
         M.MonQTask.run_ready()
         # Creator sees private ticket on list page...
         index_response = self.app.get('/p/test/bugs/')
@@ -581,7 +594,8 @@ class TestFunctionalController(TrackerTestController):
         assert '1 results' in r
         assert 'Private Ticket' not in r
         # ... or in search feed...
-        r = self.app.get('/p/test/bugs/search_feed?q=ticket', extra_environ=env)
+        r = self.app.get('/p/test/bugs/search_feed?q=ticket',
+                         extra_environ=env)
         assert 'Private Ticket' not in r
         # ...and can't get to the private ticket directly.
         r = self.app.get(ticket_view.request.url, extra_environ=env)
@@ -595,23 +609,25 @@ class TestFunctionalController(TrackerTestController):
         assert 'Private Ticket' not in r
 
         # update private ticket
-        self.app.post('/bugs/1/update_ticket_from_widget',{
-            'ticket_form.summary':'Public Ticket',
-            'ticket_form.description':'',
-            'ticket_form.status':'open',
-            'ticket_form._milestone':'1.0',
-            'ticket_form.assigned_to':'',
-            'ticket_form.labels':'',
+        self.app.post('/bugs/1/update_ticket_from_widget', {
+            'ticket_form.summary': 'Public Ticket',
+            'ticket_form.description': '',
+            'ticket_form.status': 'open',
+            'ticket_form._milestone': '1.0',
+            'ticket_form.assigned_to': '',
+            'ticket_form.labels': '',
             'ticket_form.comment': 'gotta be secret about this now',
             'ticket_form.private': 'on',
         })
         response = self.app.get('/bugs/1/')
-        assert_true('<li><strong>private</strong>: No --&gt; Yes</li>' in response)
+        assert_true(
+            '<li><strong>private</strong>: No --&gt; Yes</li>' in response)
 
     @td.with_tool('test', 'Tickets', 'doc-bugs')
     def test_two_trackers(self):
         summary = 'test two trackers'
-        ticket_view = self.new_ticket('/doc-bugs/', summary=summary, _milestone='1.0').follow()
+        ticket_view = self.new_ticket(
+            '/doc-bugs/', summary=summary, _milestone='1.0').follow()
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
@@ -648,9 +664,9 @@ class TestFunctionalController(TrackerTestController):
 
     def test_ticket_diffs(self):
         self.new_ticket(summary='difftest', description='1\n2\n3\n')
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'difftest',
-            'description':'1\n3\n4\n',
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'difftest',
+            'description': '1\n3\n4\n',
         })
         r = self.app.get('/bugs/1/')
         assert '<span class="gd">-2</span>' in r, r.showbrowser()
@@ -659,50 +675,53 @@ class TestFunctionalController(TrackerTestController):
     def test_ticket_label_unlabel(self):
         summary = 'test labeling and unlabeling a ticket'
         self.new_ticket(summary=summary)
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'aaa',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'',
-            'assigned_to':'',
-            'labels':u'yellow,greén'.encode('utf-8'),
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'aaa',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': '',
+            'assigned_to': '',
+            'labels': u'yellow,greén'.encode('utf-8'),
             'comment': ''
         })
         response = self.app.get('/bugs/1/')
         assert_true('yellow' in response)
         assert_true(u'greén' in response)
-        assert_true('<li><strong>labels</strong>:  --&gt; yellow, greén</li>' in response)
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'',
-            'assigned_to':'',
-            'labels':'yellow',
+        assert_true(
+            '<li><strong>labels</strong>:  --&gt; yellow, greén</li>' in response)
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': '',
+            'assigned_to': '',
+            'labels': 'yellow',
             'comment': ''
         })
         response = self.app.get('/bugs/1/')
         assert_true('yellow' in response)
-        assert_true('<li><strong>labels</strong>: yellow, greén --&gt; yellow</li>' in response)
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'',
-            'assigned_to':'',
-            'labels':'',
+        assert_true(
+            '<li><strong>labels</strong>: yellow, greén --&gt; yellow</li>' in response)
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': '',
+            'assigned_to': '',
+            'labels': '',
             'comment': ''
         })
         response = self.app.get('/bugs/1/')
-        assert_true('<li><strong>labels</strong>: yellow --&gt; </li>' in response)
+        assert_true(
+            '<li><strong>labels</strong>: yellow --&gt; </li>' in response)
 
     def test_new_attachment(self):
         file_name = 'test_root.py'
         file_data = file(__file__).read()
         upload = ('attachment', file_name, file_data)
         self.new_ticket(summary='test new attachment')
-        ticket_editor = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz'
+        ticket_editor = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz'
         }, upload_files=[upload]).follow()
         assert_true(file_name in ticket_editor)
         assert '<span>py</span>' not in ticket_editor
@@ -712,15 +731,15 @@ class TestFunctionalController(TrackerTestController):
         file_data = file(__file__).read()
         upload = ('attachment', file_name, file_data)
         self.new_ticket(summary='test new attachment')
-        ticket_editor = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz'
+        ticket_editor = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz'
         }, upload_files=[upload]).follow()
         assert file_name in ticket_editor, ticket_editor.showbrowser()
         req = self.app.get('/bugs/1/')
         file_link = req.html.findAll('form')[1].findAll('a')[6]
         assert_equal(file_link.string, file_name)
-        self.app.post(str(file_link['href']),{
-            'delete':'True'
+        self.app.post(str(file_link['href']), {
+            'delete': 'True'
         })
         deleted_form = self.app.get('/bugs/1/')
         assert file_name not in deleted_form
@@ -734,14 +753,16 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'test comment'
         self.app.post(f['action'].encode('utf-8'), params=params,
-                          headers={'Referer': '/bugs/1/'.encode("utf-8")})
+                      headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/1/', dict(page=1))
-        post_link = str(r.html.find('div',{'class':'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         self.app.post(post_link + 'attach',
-                          upload_files=[('file_info', 'test.txt', 'HiThere!')])
+                      upload_files=[('file_info', 'test.txt', 'HiThere!')])
         r = self.app.get('/bugs/1/', dict(page=1))
         assert '<input class="submit delete_attachment file" type="submit" value="X"/>' in r
         form = r.forms[5].submit()
@@ -753,10 +774,11 @@ class TestFunctionalController(TrackerTestController):
         file_data = file(__file__).read()
         upload = ('attachment', file_name, file_data)
         self.new_ticket(summary='test new attachment')
-        ticket_editor = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz'
+        ticket_editor = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz'
         }, upload_files=[upload]).follow()
-        download = self.app.get(str(ticket_editor.html.findAll('form')[1].findAll('a')[7]['href']))
+        download = self.app.get(
+            str(ticket_editor.html.findAll('form')[1].findAll('a')[7]['href']))
         assert_equal(download.body, file_data)
 
     def test_two_attachments(self):
@@ -764,8 +786,8 @@ class TestFunctionalController(TrackerTestController):
         file_name2 = 'test_root2.py'
         file_data = file(__file__).read()
         self.new_ticket(summary='test new attachment')
-        ticket_editor = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz'
+        ticket_editor = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz'
         }, upload_files=[('attachment', file_name1, file_data), ('attachment', file_name2, file_data)]).follow()
 
         assert 'test_root1.py' in ticket_editor
@@ -774,24 +796,25 @@ class TestFunctionalController(TrackerTestController):
     def test_new_image_attachment_content(self):
         h.set_context('test', 'bugs', neighborhood='Projects')
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('attachment', file_name, file_data)
         self.new_ticket(summary='test new attachment')
-        ticket_editor = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz'
+        ticket_editor = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz'
         }, upload_files=[upload]).follow()
-        ticket = tm.Ticket.query.find({'ticket_num':1}).first()
+        ticket = tm.Ticket.query.find({'ticket_num': 1}).first()
         filename = ticket.attachments[0].filename
 
         uploaded = PIL.Image.open(file_path)
-        r = self.app.get('/bugs/1/attachment/'+filename)
+        r = self.app.get('/bugs/1/attachment/' + filename)
         downloaded = PIL.Image.open(StringIO.StringIO(r.body))
         assert uploaded.size == downloaded.size
-        r = self.app.get('/bugs/1/attachment/'+filename+'/thumb')
+        r = self.app.get('/bugs/1/attachment/' + filename + '/thumb')
 
         thumbnail = PIL.Image.open(StringIO.StringIO(r.body))
-        assert thumbnail.size == (100,100)
+        assert thumbnail.size == (100, 100)
 
     def test_sidebar_static_page(self):
         admin = M.User.query.get(username='test-admin')
@@ -808,10 +831,10 @@ class TestFunctionalController(TrackerTestController):
         response = self.app.get('/p/test/bugs/1/')
         assert 'Related Pages' not in response
         self.app.post('/wiki/aaa/update', params={
-                'title':'aaa',
-                'text':'',
-                'labels':'',
-                'viewable_by-0.id':'all'})
+            'title': 'aaa',
+            'text': '',
+            'labels': '',
+            'viewable_by-0.id': 'all'})
         self.new_ticket(summary='bbb')
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
@@ -849,8 +872,10 @@ class TestFunctionalController(TrackerTestController):
         self.new_ticket(summary=summary)
         response = self.app.get('/p/test/bugs/1/')
         assert response.html.find('input', {'name': 'ticket_form.summary'})
-        assert response.html.find('select', {'name': 'ticket_form.assigned_to'})
-        assert response.html.find('textarea', {'name': 'ticket_form.description'})
+        assert response.html.find(
+            'select', {'name': 'ticket_form.assigned_to'})
+        assert response.html.find(
+            'textarea', {'name': 'ticket_form.description'})
         assert response.html.find('select', {'name': 'ticket_form.status'})
         assert response.html.find('select', {'name': 'ticket_form._milestone'})
         assert response.html.find('input', {'name': 'ticket_form.labels'})
@@ -860,23 +885,26 @@ class TestFunctionalController(TrackerTestController):
         summary = 'test default assignment'
         self.new_ticket(summary=summary)
         response = self.app.get('/p/test/bugs/1/')
-        assert 'nobody' in str(response.html.find('div', {'class': 'grid-5 ticket-assigned-to'}))
+        assert 'nobody' in str(
+            response.html.find('div', {'class': 'grid-5 ticket-assigned-to'}))
 
     def test_assign_ticket(self):
         summary = 'test assign ticket'
         self.new_ticket(summary=summary)
         response = self.app.get('/p/test/bugs/1/')
-        assert 'nobody' in str(response.html.find('div', {'class': 'grid-5 ticket-assigned-to'}))
-        response = self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'',
-            'assigned_to':'test-admin',
-            'labels':'',
+        assert 'nobody' in str(
+            response.html.find('div', {'class': 'grid-5 ticket-assigned-to'}))
+        response = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': '',
+            'assigned_to': 'test-admin',
+            'labels': '',
             'comment': ''
         }).follow()
-        assert 'test-admin' in str(response.html.find('div', {'class': 'grid-5 ticket-assigned-to'}))
+        assert 'test-admin' in str(response.html.find('div',
+                                   {'class': 'grid-5 ticket-assigned-to'}))
         assert '<li><strong>summary</strong>: test assign ticket --&gt; zzz' in response
         assert '<li><strong>status</strong>: open --&gt; ccc' in response
 
@@ -890,14 +918,15 @@ class TestFunctionalController(TrackerTestController):
                 dict(name='_code_review', label='Code Review', type='user')],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
-        kw = {'custom_fields._priority':'normal',
-              'custom_fields._category':'helloworld',
-              'custom_fields._code_review':'test-admin'}
-        ticket_view = self.new_ticket(summary='test custom fields', **kw).follow()
+        kw = {'custom_fields._priority': 'normal',
+              'custom_fields._category': 'helloworld',
+              'custom_fields._code_review': 'test-admin'}
+        ticket_view = self.new_ticket(
+            summary='test custom fields', **kw).follow()
         assert 'Priority:' in ticket_view
         assert 'normal' in ticket_view
         assert 'Test Admin' in ticket_view
@@ -907,17 +936,18 @@ class TestFunctionalController(TrackerTestController):
             custom_fields=[
                 dict(name='_testselect', label='Test', type='select',
                      options='"test select"'),
-               ],
+            ],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
         r = self.app.get('/bugs/new/')
         assert '<option value="test select">test select</option>' in r
-        kw = {'custom_fields._testselect':'test select'}
-        ticket_view = self.new_ticket(summary='test select custom fields', **kw).follow()
+        kw = {'custom_fields._testselect': 'test select'}
+        ticket_view = self.new_ticket(
+            summary='test select custom fields', **kw).follow()
         assert '<option selected value="test select">test select</option>' in ticket_view
 
     def test_select_custom_field_unicode(self):
@@ -925,10 +955,10 @@ class TestFunctionalController(TrackerTestController):
             custom_fields=[
                 dict(name='_testselect', label='Test', type='select',
                      options='oné "one and á half" two'),
-               ],
+            ],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
@@ -942,10 +972,10 @@ class TestFunctionalController(TrackerTestController):
             custom_fields=[
                 dict(name='_testselect', label='Test', type='select',
                      options='closéd "quote missing'),
-               ],
+            ],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
@@ -960,31 +990,32 @@ class TestFunctionalController(TrackerTestController):
                 dict(label='Number', type='number', options='')],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         r = self.app.post('/admin/bugs/set_custom_fields',
                           params=variable_encode(params))
-        kw = {'custom_fields._number':''}
-        ticket_view = self.new_ticket(summary='test custom fields', **kw).follow()
+        kw = {'custom_fields._number': ''}
+        ticket_view = self.new_ticket(
+            summary='test custom fields', **kw).follow()
         assert '<strong>Number</strong>:  --&gt;' not in ticket_view
-        ticket_view = self.app.post('/bugs/1/update_ticket',params={
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'aaa',
-            'assigned_to':'',
-            'labels':'',
-            'custom_fields._number':'',
+        ticket_view = self.app.post('/bugs/1/update_ticket', params={
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': 'aaa',
+            'assigned_to': '',
+            'labels': '',
+            'custom_fields._number': '',
             'comment': ''
         }).follow()
         assert '<strong>Number</strong>:  --&gt;' not in ticket_view
-        ticket_view = self.app.post('/bugs/1/update_ticket',params={
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'aaa',
-            'assigned_to':'',
-            'labels':'',
-            'custom_fields._number':'4',
+        ticket_view = self.app.post('/bugs/1/update_ticket', params={
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': 'aaa',
+            'assigned_to': '',
+            'labels': '',
+            'custom_fields._number': '4',
             'comment': ''
         }).follow()
         assert '<strong>Number</strong>:  --&gt;' in ticket_view
@@ -994,24 +1025,24 @@ class TestFunctionalController(TrackerTestController):
             'open_status_names': 'aa bb',
             'closed_status_names': 'cc',
             'custom_fields': [dict(
-                    label='Milestone',
-                    show_in_search='on',
-                    type='milestone',
-                    milestones=[
-                        dict(name='aaaé'),
-                        dict(name='bbb'),
-                        dict(name='ccc')])] }
+                label='Milestone',
+                show_in_search='on',
+                type='milestone',
+                milestones=[
+                    dict(name='aaaé'),
+                    dict(name='bbb'),
+                    dict(name='ccc')])]}
         self.app.post('/admin/bugs/set_custom_fields',
                       variable_encode(params),
                       status=302)
         self.new_ticket(summary='test milestone names')
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'zzz',
-            'description':'bbb',
-            'status':'ccc',
-            '_milestone':'aaaé',
-            'assigned_to':'',
-            'labels':'',
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'zzz',
+            'description': 'bbb',
+            'status': 'ccc',
+            '_milestone': 'aaaé',
+            'assigned_to': '',
+            'labels': '',
             'comment': ''
         })
         ticket_view = self.app.get('/p/test/bugs/1/')
@@ -1020,13 +1051,13 @@ class TestFunctionalController(TrackerTestController):
 
     def test_milestone_rename(self):
         self.new_ticket(summary='test milestone rename')
-        self.app.post('/bugs/1/update_ticket',{
-            'summary':'test milestone rename',
-            'description':'',
-            'status':'',
-            '_milestone':'1.0',
-            'assigned_to':'',
-            'labels':'',
+        self.app.post('/bugs/1/update_ticket', {
+            'summary': 'test milestone rename',
+            'description': '',
+            'status': '',
+            '_milestone': '1.0',
+            'assigned_to': '',
+            'labels': '',
             'comment': ''
         })
         ThreadLocalORMSession.flush_all()
@@ -1036,13 +1067,13 @@ class TestFunctionalController(TrackerTestController):
         assert 'Milestone' in ticket_view
         assert '1.0' in ticket_view
         assert 'zzzé' not in ticket_view
-        r = self.app.post('/bugs/update_milestones',{
-            'field_name':'_milestone',
-            'milestones-0.old_name':'1.0',
-            'milestones-0.new_name':'zzzé',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Open',
-            'milestones-0.due_date':''
+        r = self.app.post('/bugs/update_milestones', {
+            'field_name': '_milestone',
+            'milestones-0.old_name': '1.0',
+            'milestones-0.new_name': 'zzzé',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Open',
+            'milestones-0.due_date': ''
         })
         ticket_view = self.app.get('/p/test/bugs/1/')
         assert '1.0' not in ticket_view
@@ -1052,13 +1083,13 @@ class TestFunctionalController(TrackerTestController):
         self.new_ticket(summary='test milestone close')
         r = self.app.get('/bugs/milestones')
         assert 'view closed' not in r
-        r = self.app.post('/bugs/update_milestones',{
-            'field_name':'_milestone',
-            'milestones-0.old_name':'1.0',
-            'milestones-0.new_name':'1.0',
-            'milestones-0.description':'',
-            'milestones-0.complete':'Closed',
-            'milestones-0.due_date':''
+        r = self.app.post('/bugs/update_milestones', {
+            'field_name': '_milestone',
+            'milestones-0.old_name': '1.0',
+            'milestones-0.new_name': '1.0',
+            'milestones-0.description': '',
+            'milestones-0.complete': 'Closed',
+            'milestones-0.due_date': ''
         })
         r = self.app.get('/bugs/milestones')
         assert 'view closed' in r
@@ -1080,7 +1111,7 @@ class TestFunctionalController(TrackerTestController):
                      options='')],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields', params=variable_encode(params))
         # Test new ticket form
@@ -1089,40 +1120,48 @@ class TestFunctionalController(TrackerTestController):
         form['ticket_form.custom_fields._priority'] = 'urgent'
         form['ticket_form.custom_fields._category'] = 'bugs'
         error_form = form.submit()
-        assert_equal(error_form.forms[1]['ticket_form.custom_fields._priority'].value, 'urgent')
-        assert_equal(error_form.forms[1]['ticket_form.custom_fields._category'].value, 'bugs')
+        assert_equal(
+            error_form.forms[1]['ticket_form.custom_fields._priority'].value, 'urgent')
+        assert_equal(
+            error_form.forms[1]['ticket_form.custom_fields._category'].value, 'bugs')
         # Test edit ticket form
         self.new_ticket(summary='Test ticket')
         response = self.app.get('/bugs/1/')
         form = response.forms[1]
-        assert_equal(form['ticket_form.custom_fields._priority'].value, 'normal')
+        assert_equal(
+            form['ticket_form.custom_fields._priority'].value, 'normal')
         assert_equal(form['ticket_form.custom_fields._category'].value, '')
         form['ticket_form.summary'] = ''
         form['ticket_form.custom_fields._priority'] = 'urgent'
         form['ticket_form.custom_fields._category'] = 'bugs'
         error_form = form.submit()
-        assert_equal(error_form.forms[1]['ticket_form.custom_fields._priority'].value, 'urgent')
-        assert_equal(error_form.forms[1]['ticket_form.custom_fields._category'].value, 'bugs')
+        assert_equal(
+            error_form.forms[1]['ticket_form.custom_fields._priority'].value, 'urgent')
+        assert_equal(
+            error_form.forms[1]['ticket_form.custom_fields._category'].value, 'bugs')
 
     def test_new_ticket_validation(self):
         summary = 'ticket summary'
         response = self.app.get('/bugs/new/')
-        assert not response.html.find('div', {'class':'error'})
+        assert not response.html.find('div', {'class': 'error'})
         form = response.forms[1]
         form['ticket_form.labels'] = 'foo'
         # try submitting with no summary set and check for error message
         error_form = form.submit()
         assert error_form.forms[1]['ticket_form.labels'].value == 'foo'
-        error_message = error_form.html.find('div', {'class':'error'})
+        error_message = error_form.html.find('div', {'class': 'error'})
         assert error_message
-        assert (error_message.string == 'You must provide a Title' or \
+        assert (error_message.string == 'You must provide a Title' or
                 error_message.string == 'Missing value')
-        assert error_message.findPreviousSibling('input').get('name') == 'ticket_form.summary'
+        assert error_message.findPreviousSibling(
+            'input').get('name') == 'ticket_form.summary'
         # set a summary, submit, and check for success
         error_form.forms[1]['ticket_form.summary'] = summary
         success = error_form.forms[1].submit().follow().html
-        assert success.findAll('form')[1].get('action') == '/p/test/bugs/1/update_ticket_from_widget'
-        assert success.find('input', {'name':'ticket_form.summary'})['value'] == summary
+        assert success.findAll('form')[1].get(
+            'action') == '/p/test/bugs/1/update_ticket_from_widget'
+        assert success.find('input', {'name': 'ticket_form.summary'})[
+            'value'] == summary
 
     def test_edit_ticket_validation(self):
         old_summary = 'edit ticket test'
@@ -1130,23 +1169,27 @@ class TestFunctionalController(TrackerTestController):
         self.new_ticket(summary=old_summary)
         response = self.app.get('/bugs/1/')
         # check that existing form is valid
-        assert response.html.find('input', {'name':'ticket_form.summary'})['value'] == old_summary
-        assert not response.html.find('div', {'class':'error'})
+        assert response.html.find(
+            'input', {'name': 'ticket_form.summary'})['value'] == old_summary
+        assert not response.html.find('div', {'class': 'error'})
         form = response.forms[1]
         # try submitting with no summary set and check for error message
         form['ticket_form.summary'] = ""
         error_form = form.submit()
-        error_message = error_form.html.find('div', {'class':'error'})
+        error_message = error_form.html.find('div', {'class': 'error'})
         assert error_message
         assert error_message.string == 'You must provide a Title'
-        assert error_message.findPreviousSibling('input').get('name') == 'ticket_form.summary'
+        assert error_message.findPreviousSibling(
+            'input').get('name') == 'ticket_form.summary'
         # set a summary, submit, and check for success
         error_form.forms[1]['ticket_form.summary'] = new_summary
         r = error_form.forms[1].submit()
         assert r.status_int == 302, r.showbrowser()
         success = r.follow().html
-        assert success.findAll('form')[1].get('action') == '/p/test/bugs/1/update_ticket_from_widget'
-        assert success.find('input', {'name':'ticket_form.summary'})['value'] == new_summary
+        assert success.findAll('form')[1].get(
+            'action') == '/p/test/bugs/1/update_ticket_from_widget'
+        assert success.find('input', {'name': 'ticket_form.summary'})[
+            'value'] == new_summary
 
     def test_home(self):
         self.new_ticket(summary='test first ticket')
@@ -1170,12 +1213,13 @@ class TestFunctionalController(TrackerTestController):
         assert 'test third ticket' in response, response.showbrowser()
 
     def test_search_with_strange_chars(self):
-        r = self.app.get('/p/test/bugs/search/?' + urllib.urlencode({'q': 'tést'}))
+        r = self.app.get('/p/test/bugs/search/?' +
+                         urllib.urlencode({'q': 'tést'}))
         assert 'Search bugs: tést' in r
 
     def test_saved_search_with_strange_chars(self):
         '''Sidebar must be visible even with a strange characters in saved search terms'''
-        r = self.app.post('/admin/bugs/bins/save_bin',{
+        r = self.app.post('/admin/bugs/bins/save_bin', {
             'summary': 'Strange chars in terms here',
             'terms': 'labels:tést',
             'old_summary': '',
@@ -1190,7 +1234,7 @@ class TestFunctionalController(TrackerTestController):
         ThreadLocalORMSession.flush_all()
         for ext in ['', '.rss', '.atom']:
             assert '<title>test first ticket</title>' in \
-                    self.app.get('/p/test/bugs/search_feed%s?q=test' % ext)
+                self.app.get('/p/test/bugs/search_feed%s?q=test' % ext)
 
     def test_search_current_user(self):
         self.new_ticket(summary='test first ticket')
@@ -1240,7 +1284,7 @@ class TestFunctionalController(TrackerTestController):
     def test_save_invalid_search(self, search_artifact):
         err = 'Error running search query: [Reason: undefined field label]'
         search_artifact.side_effect = SearchError(err)
-        r = self.app.post('/admin/bugs/bins/save_bin',{
+        r = self.app.post('/admin/bugs/bins/save_bin', {
             'summary': 'This is not too long.',
             'terms': 'label:foo',
             'old_summary': '',
@@ -1254,20 +1298,21 @@ class TestFunctionalController(TrackerTestController):
         assert err in r
 
     def test_saved_search_labels_truncated(self):
-        r = self.app.post('/admin/bugs/bins/save_bin',{
+        r = self.app.post('/admin/bugs/bins/save_bin', {
             'summary': 'This is not too long.',
             'terms': 'aaa',
             'old_summary': '',
             'sort': ''}).follow()
         r = self.app.get('/bugs/')
         assert sidebar_contains(r, 'This is not too long.')
-        r = self.app.post('/admin/bugs/bins/save_bin',{
+        r = self.app.post('/admin/bugs/bins/save_bin', {
             'summary': 'This will be truncated because it is too long to show in the sidebar without being ridiculous.',
             'terms': 'aaa',
             'old_summary': '',
             'sort': ''}).follow()
         r = self.app.get('/bugs/')
-        assert sidebar_contains(r, 'This will be truncated because it is too long to show in the sidebar ...')
+        assert sidebar_contains(
+            r, 'This will be truncated because it is too long to show in the sidebar ...')
 
     def test_edit_saved_search(self):
         r = self.app.get('/admin/bugs/bins/')
@@ -1297,13 +1342,15 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = post_content
         r = self.app.post(f['action'].encode('utf-8'), params=params,
                           headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/1/', dict(page=1))
         assert_true(post_content in r)
-        assert_true(len(r.html.findAll(attrs={'class': 'discussion-post'})) == 1)
+        assert_true(
+            len(r.html.findAll(attrs={'class': 'discussion-post'})) == 1)
 
         new_summary = 'old ticket'
         for f in ticket_view.html.findAll('form'):
@@ -1313,13 +1360,15 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params['ticket_form.summary'] = new_summary
         r = self.app.post(f['action'].encode('utf-8'), params=params,
                           headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/1/', dict(page=1))
-        assert_true(summary+' --&gt; '+new_summary in r)
-        assert_true(len(r.html.findAll(attrs={'class': 'discussion-post'})) == 2)
+        assert_true(summary + ' --&gt; ' + new_summary in r)
+        assert_true(
+            len(r.html.findAll(attrs={'class': 'discussion-post'})) == 2)
 
     def test_discussion_paging(self):
         summary = 'test discussion paging'
@@ -1332,7 +1381,8 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = post_content
         r = self.app.post(f['action'].encode('utf-8'), params=params,
                           headers={'Referer': '/bugs/1/'.encode("utf-8")})
@@ -1345,7 +1395,7 @@ class TestFunctionalController(TrackerTestController):
         # add some more posts and check for pager
         for i in range(2):
             r = self.app.post(f['action'].encode('utf-8'), params=params,
-                  headers={'Referer': '/bugs/1/'.encode("utf-8")})
+                              headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/1/', dict(page=1, limit=2))
         assert_true('Page 2 of 2' in r)
 
@@ -1360,25 +1410,28 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = post_content
         self.app.post(f['action'].encode('utf-8'), params=params,
-            headers={'Referer': '/bugs/1/'.encode("utf-8")})
+                      headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/feed.rss')
         post = M.Post.query.find().first()
         assert '/p/test/bugs/1/?limit=50#' + post.slug in r
         r = self.app.get('/bugs/1/')
-        post_link = str(r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         post_form = r.html.find('form', {'action': post_link + 'reply'})
         params = dict()
         inputs = post_form.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[post_form.find('textarea')['name']] = 'Tis a reply'
         r = self.app.post(post_link + 'reply',
-            params=params,
-            headers={'Referer':post_link.encode("utf-8")})
+                          params=params,
+                          headers={'Referer': post_link.encode("utf-8")})
         r = self.app.get('/bugs/feed.rss')
         assert 'Tis a reply' in r
         assert 'ticket discussion post content' in r
@@ -1394,34 +1447,40 @@ class TestFunctionalController(TrackerTestController):
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
         response = self.app.get('/p/test/bugs/?sort=summary+asc')
-        ticket_rows = response.html.find('table', {'class':'ticket-list'}).find('tbody')
+        ticket_rows = response.html.find(
+            'table', {'class': 'ticket-list'}).find('tbody')
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
-        edit_link = response.html.find('a',{'title':'Bulk Edit'})
+        edit_link = response.html.find('a', {'title': 'Bulk Edit'})
         expected_link = "/p/test/bugs/edit/?q=%21status%3Awont-fix+%26%26+%21status%3Aclosed&sort=snippet_s+asc&limit=25&page=0"
         assert_equal(expected_link, edit_link['href'])
         response = self.app.get(edit_link['href'])
-        ticket_rows = response.html.find('tbody', {'class':'ticket-list'})
+        ticket_rows = response.html.find('tbody', {'class': 'ticket-list'})
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
 
     def test_bulk_edit_milestone(self):
-        self.new_ticket(summary='test first ticket', status='open', _milestone='1.0')
-        self.new_ticket(summary='test second ticket', status='accepted', _milestone='1.0')
-        self.new_ticket(summary='test third ticket', status='closed', _milestone='1.0')
+        self.new_ticket(summary='test first ticket',
+                        status='open', _milestone='1.0')
+        self.new_ticket(summary='test second ticket',
+                        status='accepted', _milestone='1.0')
+        self.new_ticket(summary='test third ticket',
+                        status='closed', _milestone='1.0')
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
-        response = self.app.get('/p/test/bugs/milestone/1.0/?sort=ticket_num+asc')
-        ticket_rows = response.html.find('table', {'class':'ticket-list'}).find('tbody')
+        response = self.app.get(
+            '/p/test/bugs/milestone/1.0/?sort=ticket_num+asc')
+        ticket_rows = response.html.find(
+            'table', {'class': 'ticket-list'}).find('tbody')
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
         assert_in('test third ticket', str(ticket_rows))
-        edit_link = response.html.find('a',{'title':'Bulk Edit'})
+        edit_link = response.html.find('a', {'title': 'Bulk Edit'})
         expected_link = "/p/test/bugs/edit/?q=_milestone%3A1.0&sort=ticket_num_i+asc&limit=25&page=0"
         assert_equal(expected_link, edit_link['href'])
         response = self.app.get(edit_link['href'])
-        ticket_rows = response.html.find('tbody', {'class':'ticket-list'})
+        ticket_rows = response.html.find('tbody', {'class': 'ticket-list'})
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
         assert_in('test third ticket', str(ticket_rows))
@@ -1429,27 +1488,31 @@ class TestFunctionalController(TrackerTestController):
     def test_bulk_edit_search(self):
         self.new_ticket(summary='test first ticket', status='open')
         self.new_ticket(summary='test second ticket', status='open')
-        self.new_ticket(summary='test third ticket', status='closed', _milestone='1.0')
+        self.new_ticket(summary='test third ticket',
+                        status='closed', _milestone='1.0')
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
         response = self.app.get('/p/test/bugs/search/?q=status%3Aopen')
-        ticket_rows = response.html.find('table', {'class':'ticket-list'}).find('tbody')
+        ticket_rows = response.html.find(
+            'table', {'class': 'ticket-list'}).find('tbody')
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
         assert_false('test third ticket' in str(ticket_rows))
-        edit_link = response.html.find('a',{'title':'Bulk Edit'})
+        edit_link = response.html.find('a', {'title': 'Bulk Edit'})
         expected_link = "/p/test/bugs/edit/?q=status%3Aopen&limit=25&page=0"
         assert_equal(expected_link, edit_link['href'])
         response = self.app.get(edit_link['href'])
-        ticket_rows = response.html.find('tbody', {'class':'ticket-list'})
+        ticket_rows = response.html.find('tbody', {'class': 'ticket-list'})
         assert_in('test first ticket', str(ticket_rows))
         assert_in('test second ticket', str(ticket_rows))
         assert_false('test third ticket' in str(ticket_rows))
 
     def test_bulk_edit_notifications(self):
-        self.new_ticket(summary='test first ticket', status='open', _milestone='2.0')
-        self.new_ticket(summary='test second ticket', status='accepted', _milestone='1.0')
+        self.new_ticket(summary='test first ticket',
+                        status='open', _milestone='2.0')
+        self.new_ticket(summary='test second ticket',
+                        status='accepted', _milestone='1.0')
         self.new_ticket(summary='test third ticket', status='unread')
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
@@ -1474,10 +1537,12 @@ class TestFunctionalController(TrackerTestController):
                       'assigned_to': 'test-admin'})
         M.MonQTask.run_ready()
 
-        emails = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
+        emails = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
         assert_equal(len(emails), 3)
         for email in emails:
-            assert_equal(email.kwargs.subject, '[test:bugs] Mass edit changes by Test Admin')
+            assert_equal(email.kwargs.subject,
+                         '[test:bugs] Mass edit changes by Test Admin')
         first_user_email = M.MonQTask.query.find({
             'task_name': 'allura.tasks.mail_tasks.sendmail',
             'kwargs.destinations': str(first_user._id)
@@ -1535,7 +1600,8 @@ class TestFunctionalController(TrackerTestController):
             'TicketMonitoringEmail': 'monitoring@email.com',
             'TicketMonitoringType': 'AllTicketChanges',
         })
-        self.new_ticket(summary='test first ticket', status='open', _milestone='2.0', private=True)
+        self.new_ticket(summary='test first ticket',
+                        status='open', _milestone='2.0', private=True)
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
@@ -1546,10 +1612,13 @@ class TestFunctionalController(TrackerTestController):
                       '__ticket_ids': [ticket._id],
                       'status': 'accepted'})
         M.MonQTask.run_ready()
-        emails = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
-        assert_equal(len(emails), 2)  # one for admin and one for monitoring email
+        emails = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
+        # one for admin and one for monitoring email
+        assert_equal(len(emails), 2)
         for email in emails:
-            assert_equal(email.kwargs.subject, '[test:bugs] Mass edit changes by Test Admin')
+            assert_equal(email.kwargs.subject,
+                         '[test:bugs] Mass edit changes by Test Admin')
         admin = M.User.by_username('test-admin')
         admin_email = M.MonQTask.query.find({
             'task_name': 'allura.tasks.mail_tasks.sendmail',
@@ -1573,8 +1642,10 @@ class TestFunctionalController(TrackerTestController):
             'TicketMonitoringEmail': 'monitoring@email.com',
             'TicketMonitoringType': 'AllPublicTicketChanges',
         })
-        self.new_ticket(summary='test first ticket', status='open', _milestone='2.0')
-        self.new_ticket(summary='test second ticket', status='open', private=True)
+        self.new_ticket(summary='test first ticket',
+                        status='open', _milestone='2.0')
+        self.new_ticket(summary='test second ticket',
+                        status='open', private=True)
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
@@ -1585,10 +1656,13 @@ class TestFunctionalController(TrackerTestController):
                       '__ticket_ids': [t._id for t in tickets],
                       'status': 'accepted'})
         M.MonQTask.run_ready()
-        emails = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
-        assert_equal(len(emails), 2)  # one for admin and one for monitoring email
+        emails = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
+        # one for admin and one for monitoring email
+        assert_equal(len(emails), 2)
         for email in emails:
-            assert_equal(email.kwargs.subject, '[test:bugs] Mass edit changes by Test Admin')
+            assert_equal(email.kwargs.subject,
+                         '[test:bugs] Mass edit changes by Test Admin')
         admin = M.User.by_username('test-admin')
         admin_email = M.MonQTask.query.find({
             'task_name': 'allura.tasks.mail_tasks.sendmail',
@@ -1613,8 +1687,10 @@ class TestFunctionalController(TrackerTestController):
             'TicketMonitoringEmail': 'monitoring@email.com',
             'TicketMonitoringType': 'AllPublicTicketChanges',
         })
-        self.new_ticket(summary='test first ticket', status='open', private=True)
-        self.new_ticket(summary='test second ticket', status='open', private=True)
+        self.new_ticket(summary='test first ticket',
+                        status='open', private=True)
+        self.new_ticket(summary='test second ticket',
+                        status='open', private=True)
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
@@ -1625,10 +1701,12 @@ class TestFunctionalController(TrackerTestController):
                       '__ticket_ids': [t._id for t in tickets],
                       'status': 'accepted'})
         M.MonQTask.run_ready()
-        emails = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
+        emails = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendmail')).all()
         assert_equal(len(emails), 1)  # only admin email sent
         for email in emails:
-            assert_equal(email.kwargs.subject, '[test:bugs] Mass edit changes by Test Admin')
+            assert_equal(email.kwargs.subject,
+                         '[test:bugs] Mass edit changes by Test Admin')
         admin = M.User.by_username('test-admin')
         admin_email = M.MonQTask.query.find({
             'task_name': 'allura.tasks.mail_tasks.sendmail',
@@ -1675,7 +1753,8 @@ class TestFunctionalController(TrackerTestController):
         }
         filtered_changes = c.app.globals.filtered_by_subscription(changes)
         filtered_users = [uid for uid, data in filtered_changes.iteritems()]
-        assert_equal(sorted(filtered_users), sorted([u._id for u in users[:-1] + [admin]]))
+        assert_equal(sorted(filtered_users),
+                     sorted([u._id for u in users[:-1] + [admin]]))
         ticket_ids = [t._id for t in tickets]
         assert_equal(filtered_changes[users[0]._id], set(ticket_ids[0:1]))
         assert_equal(filtered_changes[users[1]._id], set(ticket_ids[:-1]))
@@ -1730,9 +1809,8 @@ class TestFunctionalController(TrackerTestController):
         r = self.app.get('/bugs/')
         assert "Votes" not in r
 
-
     @td.with_tool('test', 'Tickets', 'tracker',
-            post_install_hook=post_install_create_ticket_permission)
+                  post_install_hook=post_install_create_ticket_permission)
     def test_create_permission(self):
         """Test that user with `create` permission can create ticket,
         but can't edit it without `update` permission.
@@ -1747,11 +1825,11 @@ class TestFunctionalController(TrackerTestController):
         ticket_url = response.headers['Location']
         response = self.app.get(ticket_url,
                                 extra_environ=dict(username='test-user-0'))
-        assert not response.html.find('div',{'class': 'error'})
+        assert not response.html.find('div', {'class': 'error'})
         assert not response.html.find('a', {'class': 'edit_ticket'})
 
     @td.with_tool('test', 'Tickets', 'tracker',
-            post_install_hook=post_install_update_ticket_permission)
+                  post_install_hook=post_install_update_ticket_permission)
     def test_update_permission(self):
         r = self.app.get('/p/test/tracker/',
                          extra_environ=dict(username='*anonymous'))
@@ -1787,7 +1865,8 @@ class TestFunctionalController(TrackerTestController):
         assert 'Private ticket title' in r
         assert '<label class="simple">Private:</label> Yes' in r, 'Ticket is not private'
         # ... and can't see 'Edit' link
-        assert r.html.find('a', {'class': 'edit_ticket'}) is None, "Found 'Edit' link"
+        assert r.html.find('a', {'class': 'edit_ticket'}
+                           ) is None, "Found 'Edit' link"
         # ... and can't actually edit it
         self.app.post('/bugs/1/update_ticket', {'summary': 'should fail'},
                       extra_environ=env, status=403)
@@ -1818,11 +1897,13 @@ class TestFunctionalController(TrackerTestController):
 
     def test_ticket_delete_without_permission(self):
         self.new_ticket(summary='Test ticket')
-        self.app.post('/bugs/1/delete', extra_environ=dict(username='*anonymous'))
+        self.app.post('/bugs/1/delete',
+                      extra_environ=dict(username='*anonymous'))
         r = self.app.get('/bugs/')
         assert '<a href="/p/test/bugs/1/">Test ticket</a>' in r
         self.app.post('/bugs/1/delete')
-        self.app.post('/bugs/1/undelete', extra_environ=dict(username='*anonymous'))
+        self.app.post('/bugs/1/undelete',
+                      extra_environ=dict(username='*anonymous'))
         r = self.app.get('/bugs/')
         assert 'No open tickets found.' in r
 
@@ -1831,12 +1912,15 @@ class TestFunctionalController(TrackerTestController):
         self.app.post('/bugs/1/delete')
         r = self.app.get('/p/test/bugs/1/')
         assert '#1 test' in r
-        self.app.get('/p/test/bugs/1/', extra_environ=dict(username='*anonymous'), status=404)
-        r = self.app.get('/p/test/bugs/',params=dict(q='test',deleted='True'))
+        self.app.get('/p/test/bugs/1/',
+                     extra_environ=dict(username='*anonymous'), status=404)
+        r = self.app.get('/p/test/bugs/',
+                         params=dict(q='test', deleted='True'))
         assert '<td><a href="/p/test/bugs/1/">test' in r
         assert '<tr class=" deleted">' in r
-        r = self.app.get('/p/test/bugs/',params=dict(q='test',deleted='True'),
-                         extra_environ=dict(username='*anonymous'))
+        r = self.app.get(
+            '/p/test/bugs/', params=dict(q='test', deleted='True'),
+            extra_environ=dict(username='*anonymous'))
         assert 'No open tickets found.' in r
 
     def test_show_hide_deleted_tickets(self):
@@ -1859,15 +1943,17 @@ class TestFunctionalController(TrackerTestController):
         r = self.app.get('/p/test/bugs/1/move')
         trackers = r.html.find('select', {'name': 'tracker'}).findAll('option')
         trackers = set([t.text for t in trackers])
-        expected = set(['test/bugs', 'test/bugs2', 'test2/bugs', 'test2/bugs2'])
+        expected = set(
+            ['test/bugs', 'test/bugs2', 'test2/bugs', 'test2/bugs2'])
         assert trackers == expected, trackers
 
         p = M.Project.query.get(shortname='test2')
         tracker = p.app_instance('bugs2')
         r = self.app.post('/p/test/bugs/1/move/',
-                params={'tracker': str(tracker.config._id)}).follow()
+                          params={'tracker': str(tracker.config._id)}).follow()
         assert_equal(r.request.path, '/p/test2/bugs2/1/')
-        summary = r.html.findAll('h2', {'class': 'dark title'})[0].contents[0].strip()
+        summary = r.html.findAll(
+            'h2', {'class': 'dark title'})[0].contents[0].strip()
         assert_equal(summary, '#1 test')
         ac_id = tracker.config._id
         ticket = tm.Ticket.query.find({
@@ -1885,7 +1971,7 @@ class TestFunctionalController(TrackerTestController):
         p = M.Project.query.get(shortname='test2')
         ac_id = p.app_instance('bugs2').config._id
         r = self.app.post('/p/test/bugs/1/move/',
-                params={'tracker': str(ac_id)}).follow()
+                          params={'tracker': str(ac_id)}).follow()
 
         ticket = tm.Ticket.query.find({
             'app_config_id': ac_id,
@@ -1913,7 +1999,8 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = post_content
         r = self.app.post(f['action'].encode('utf-8'), params=params,
                           headers={'Referer': '/p/test2/bugs2/1/'.encode("utf-8")})
@@ -1937,22 +2024,22 @@ class TestFunctionalController(TrackerTestController):
         r = self.app.post('/p/test/bugs/1/move').follow()  # empty POST
         assert 'Select valid tracker' in r, r
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': 'invalid tracker id'}).follow()
-        assert 'Select valid tracker' in r,r
+                          params={'tracker': 'invalid tracker id'}).follow()
+        assert 'Select valid tracker' in r, r
         p = M.Project.query.get(shortname='test')
         tracker = p.app_instance('bugs')
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': str(tracker.config._id)}).follow()
+                          params={'tracker': str(tracker.config._id)}).follow()
         assert 'Ticket already in a selected tracker' in r, r
 
     def test_move_ticket_access(self):
         self.new_ticket(summary='test')
         self.app.get('/p/test/bugs/1/move',
-                extra_environ={'username': 'test-user'},
-                status=403)
+                     extra_environ={'username': 'test-user'},
+                     status=403)
         self.app.post('/p/test/bugs/1/move',
-                extra_environ={'username': 'test-user'},
-                status=403)
+                      extra_environ={'username': 'test-user'},
+                      status=403)
 
     @td.with_tool('test', 'Tickets', 'dummy')
     def test_move_ticket_redirect(self):
@@ -1963,7 +2050,7 @@ class TestFunctionalController(TrackerTestController):
         p = M.Project.query.get(shortname='test')
         dummy_tracker = p.app_instance('dummy')
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': str(dummy_tracker.config._id)}).follow()
+                          params={'tracker': str(dummy_tracker.config._id)}).follow()
         assert_equal(r.request.path, '/p/test/dummy/1/')
 
         # test that old url redirects to moved ticket
@@ -1983,7 +2070,7 @@ class TestFunctionalController(TrackerTestController):
         p = M.Project.query.get(shortname='test')
         dummy_tracker = p.app_instance('dummy')
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': str(dummy_tracker.config._id)}).follow()
+                          params={'tracker': str(dummy_tracker.config._id)}).follow()
         assert_equal(r.request.path, '/p/test/dummy/1/')
 
         # delete 'dummy' tracker
@@ -1991,7 +2078,8 @@ class TestFunctionalController(TrackerTestController):
 
         # remaining tickets in 'bugs' tracker should still be viewable
         self.app.get('/p/test/bugs/2/', status=200)  # shouldn't fail
-        r = self.app.get('/p/test/bugs/1/', status=301)  # ticket counts as moved
+        # ticket counts as moved
+        r = self.app.get('/p/test/bugs/1/', status=301)
         r.follow(status=404)  # and not found 'cause already deleted
 
     @td.with_tool('test', 'Tickets', 'dummy')
@@ -2007,7 +2095,7 @@ class TestFunctionalController(TrackerTestController):
         p = M.Project.query.get(shortname='test')
         dummy_tracker = p.app_instance('dummy')
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': str(dummy_tracker.config._id)}).follow()
+                          params={'tracker': str(dummy_tracker.config._id)}).follow()
         assert_equal(r.request.path, '/p/test/dummy/1/')
 
         # comment ticket 2
@@ -2021,7 +2109,8 @@ class TestFunctionalController(TrackerTestController):
         r.forms[2].fields[field_name][0].value = 'Hi there'
         r.forms[2].submit()
 
-        # notification for ticket 2 should reference [test:bugs], not [test:dummy]
+        # notification for ticket 2 should reference [test:bugs], not
+        # [test:dummy]
         n = M.Notification.query.find().all()[0]
         assert_in('[test:bugs]', n.subject)
         assert_in('[test:bugs]', n.reply_to_address)
@@ -2029,18 +2118,20 @@ class TestFunctionalController(TrackerTestController):
     @td.with_tool('test2', 'Tickets', 'bugs2')
     def test_move_attachment(self):
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('attachment', file_name, file_data)
         self.new_ticket(summary='test move attachment')
 
         # attach an image to the ticket
         self.app.post('/bugs/1/update_ticket',
-                      {'summary':'test'},
+                      {'summary': 'test'},
                       upload_files=[upload])
         # attach a txt file to the comment
         r = self.app.get('/p/test/bugs/1/')
-        post_link = str(r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         r = self.app.post(post_link + 'attach',
                           upload_files=[('file_info', 'test.txt', 'test')])
         # move ticket
@@ -2052,10 +2143,14 @@ class TestFunctionalController(TrackerTestController):
         attachs = r.html.findAll('div', attrs={'class': 'attachment_thumb'})
         ta = str(attachs[1])  # ticket's attachments
         ca = str(attachs[2])  # comment's attachments
-        assert_in('<a href="/p/test2/bugs2/1/attachment/neo-icon-set-454545-256x350.png"', ta)
-        assert_in('<img src="/p/test2/bugs2/1/attachment/neo-icon-set-454545-256x350.png/thumb"', ta)
+        assert_in(
+            '<a href="/p/test2/bugs2/1/attachment/neo-icon-set-454545-256x350.png"', ta)
+        assert_in(
+            '<img src="/p/test2/bugs2/1/attachment/neo-icon-set-454545-256x350.png/thumb"', ta)
         p = M.Post.query.find().sort('timestamp', 1).first()
-        assert_in('<a href="/p/test2/bugs2/_discuss/thread/%s/%s/attachment/test.txt"' % (p.thread_id, p.slug), ca)
+        assert_in(
+            '<a href="/p/test2/bugs2/_discuss/thread/%s/%s/attachment/test.txt"' %
+            (p.thread_id, p.slug), ca)
         for attach in M.BaseAttachment.query.find():
             assert_equal(attach.app_config_id, bugs2.config._id)
             if attach.attachment_type == 'DiscussionAttachment':
@@ -2079,7 +2174,7 @@ class TestFunctionalController(TrackerTestController):
         p = M.Project.query.get(shortname='test')
         dummy_tracker = p.app_instance('dummy')
         r = self.app.post('/p/test/bugs/1/move',
-                params={'tracker': str(dummy_tracker.config._id)}).follow()
+                          params={'tracker': str(dummy_tracker.config._id)}).follow()
         assert_equal(r.request.path, '/p/test/dummy/1/')
         assert_in('I am comment', r)
 
@@ -2089,7 +2184,8 @@ class TestFunctionalController(TrackerTestController):
         self.new_ticket(summary='a', labels='tag1,tag2')
         self.new_ticket(summary='b', labels='tag2')
         self.new_ticket(summary='c', labels='42cc,test')
-        # Testing only empty 'term', because mim doesn't support aggregation calls
+        # Testing only empty 'term', because mim doesn't support aggregation
+        # calls
         r = self.app.get('/p/test/bugs/tags')
         assert_equal(json.loads(r.body), [])
         r = self.app.get('/p/test/bugs/tags?term=')
@@ -2104,47 +2200,57 @@ class TestFunctionalController(TrackerTestController):
         inputs = f.findAll('input')
         for field in inputs:
             if field.has_key('name'):
-                params[field['name']] = field.has_key('value') and field['value'] or ''
+                params[field['name']] = field.has_key(
+                    'value') and field['value'] or ''
         params[f.find('textarea')['name']] = 'test comment'
         self.app.post(f['action'].encode('utf-8'), params=params,
-                          headers={'Referer': '/bugs/1/'.encode("utf-8")})
+                      headers={'Referer': '/bugs/1/'.encode("utf-8")})
         r = self.app.get('/bugs/1/', dict(page=1))
-        post_link = str(r.html.find('div', {'class':'edit_post_form reply'}).find('form')['action'])
+        post_link = str(
+            r.html.find('div', {'class': 'edit_post_form reply'}).find('form')['action'])
         self.app.post(post_link + 'attach',
-                          upload_files=[('file_info', 'test.txt', 'test attach')])
+                      upload_files=[('file_info', 'test.txt', 'test attach')])
         r = self.app.get('/p/test/bugs/1/')
         discussion_url = r.html.findAll('form')[-1]['action'][:-4]
         r = self.app.get('/rest/p/test/bugs/1/')
         r = json.loads(r.body)
-        assert_equal(r['ticket']['discussion_thread_url'],'http://localhost/rest%s' % discussion_url)
+        assert_equal(r['ticket']['discussion_thread_url'],
+                     'http://localhost/rest%s' % discussion_url)
         slug = r['ticket']['discussion_thread']['posts'][0]['slug']
-        assert_equal(r['ticket']['discussion_thread']['posts'][0]['attachments'][0]['url'],
-                     'http://localhost%s%s/attachment/test.txt' % (discussion_url, slug))
-        assert_equal(r['ticket']['discussion_thread']['posts'][0]['attachments'][0]['bytes'], 11)
+        assert_equal(
+            r['ticket']['discussion_thread'][
+                'posts'][0]['attachments'][0]['url'],
+            'http://localhost%s%s/attachment/test.txt' % (discussion_url, slug))
+        assert_equal(r['ticket']['discussion_thread']['posts']
+                     [0]['attachments'][0]['bytes'], 11)
 
         file_name = 'test_root.py'
         file_data = file(__file__).read()
         upload = ('attachment', file_name, file_data)
-        r = self.app.post('/bugs/1/update_ticket',{
-            'summary':'test rest attach'
+        r = self.app.post('/bugs/1/update_ticket', {
+            'summary': 'test rest attach'
         }, upload_files=[upload]).follow()
         r = self.app.get('/rest/p/test/bugs/1/')
         r = json.loads(r.body)
-        assert_equal(r['ticket']['attachments'][0]['url'], 'http://localhost/p/test/bugs/1/attachment/test_root.py')
+        assert_equal(r['ticket']['attachments'][0]['url'],
+                     'http://localhost/p/test/bugs/1/attachment/test_root.py')
 
     def test_html_escaping(self):
         with mock.patch.object(mail_tasks.smtp_client, '_client') as _client:
-            self.new_ticket(summary='test <h2> ticket', status='open', _milestone='2.0')
+            self.new_ticket(summary='test <h2> ticket',
+                            status='open', _milestone='2.0')
             ThreadLocalORMSession.flush_all()
             M.MonQTask.run_ready()
             ThreadLocalORMSession.flush_all()
-            email = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendmail')).first()
-            assert_equal(email.kwargs.subject, '[test:bugs] #1 test <h2> ticket')
+            email = M.MonQTask.query.find(
+                dict(task_name='allura.tasks.mail_tasks.sendmail')).first()
+            assert_equal(email.kwargs.subject,
+                         '[test:bugs] #1 test <h2> ticket')
             text = email.kwargs.text
             assert '** [bugs:#1] test &lt;h2&gt; ticket**' in text
             mail_tasks.sendmail(
                 fromaddr=str(c.user._id),
-                destinations=[ str(c.user._id) ],
+                destinations=[str(c.user._id)],
                 text=text,
                 reply_to=u'noreply@sf.net',
                 subject=email.kwargs.subject,
@@ -2156,8 +2262,8 @@ class TestFunctionalController(TrackerTestController):
             assert '<p><strong> <a class="alink" href="http://localhost/p/test/bugs/1/">[bugs:#1]</a> test &lt;h2&gt; ticket</strong></p>' in body
 
 
-
 class TestMilestoneAdmin(TrackerTestController):
+
     def _post(self, params, **kw):
         params['open_status_names'] = 'aa bb'
         params['closed_status_names'] = 'cc'
@@ -2171,7 +2277,7 @@ class TestMilestoneAdmin(TrackerTestController):
                  show_in_search='on',
                  type='milestone',
                  milestones=[
-                    dict((k, v) for k, v in d.iteritems()) for d in mf['milestones']])
+                     dict((k, v) for k, v in d.iteritems()) for d in mf['milestones']])
             for mf in milestones]}
         return self._post(params)
 
@@ -2187,7 +2293,7 @@ class TestMilestoneAdmin(TrackerTestController):
             dict(label='releases', milestones=[dict(name='1.0/beta')])
         ])
         self.new_ticket(summary='test new milestone',
-                        **{'custom_fields._releases':'1.0-beta'})
+                        **{'custom_fields._releases': '1.0-beta'})
         assert tm.Ticket.query.find({
             'custom_fields._releases': '1.0-beta'}).count() == 1
         r = self._post_milestones([])
@@ -2201,7 +2307,7 @@ class TestMilestoneAdmin(TrackerTestController):
             dict(label='releases', milestones=[dict(name='1.0/beta')])
         ])
         self.new_ticket(summary='test new milestone',
-                        **{'custom_fields._releases':'1.0-beta'})
+                        **{'custom_fields._releases': '1.0-beta'})
         r = self._post_milestones([
             dict(label='versions', milestones=[dict(name='1.0/beta')])
         ])
@@ -2210,7 +2316,7 @@ class TestMilestoneAdmin(TrackerTestController):
         assert '1.0-beta' in r
         # TODO: This doesn't work - need to make milestone custom fields
         #       renameable.
-        #assert tm.Ticket.query.find({
+        # assert tm.Ticket.query.find({
         #    'custom_fields._versions': '1.0-beta'}).count() == 1
 
     def test_create_milestone(self):
@@ -2229,7 +2335,7 @@ class TestMilestoneAdmin(TrackerTestController):
             dict(label='releases', milestones=[dict(name='1.0/beta')])
         ])
         self.new_ticket(summary='test new milestone',
-                        **{'custom_fields._releases':'1.0-beta'})
+                        **{'custom_fields._releases': '1.0-beta'})
         assert tm.Ticket.query.find({
             'custom_fields._releases': '1.0-beta'}).count() == 1
         r = self._post_milestones([
@@ -2245,7 +2351,7 @@ class TestMilestoneAdmin(TrackerTestController):
             dict(label='releases', milestones=[dict(name='1.0')])
         ])
         self.new_ticket(summary='test new milestone',
-                        **{'custom_fields._releases':'1.0'})
+                        **{'custom_fields._releases': '1.0'})
         r = self._post_milestones([
             dict(label='releases', milestones=[
                 dict(name='1.1', old_name='1.0')])
@@ -2258,13 +2364,16 @@ class TestMilestoneAdmin(TrackerTestController):
         assert tm.Ticket.query.find({
             'custom_fields._releases': '1.1'}).count() == 1
 
+
 def post_install_hook(app):
     role_anon = M.ProjectRole.by_name('*anonymous')._id
     app.config.acl.append(M.ACE.allow(role_anon, 'post'))
     app.config.acl.append(M.ACE.allow(role_anon, 'create'))
     app.config.acl.append(M.ACE.allow(role_anon, 'update'))
 
+
 class TestEmailMonitoring(TrackerTestController):
+
     def __init__(self):
         super(TestEmailMonitoring, self).__init__()
         self.test_email = 'mailinglist@example.com'
@@ -2273,7 +2382,7 @@ class TestEmailMonitoring(TrackerTestController):
         r = self.app.post('/admin/bugs/set_options', params={
             'TicketMonitoringEmail': self.test_email,
             'TicketMonitoringType': monitoring_type,
-            })
+        })
         return r
 
     def test_set_options(self):
@@ -2290,11 +2399,12 @@ class TestEmailMonitoring(TrackerTestController):
     def test_notifications_moderators(self, is_spam, send_direct):
         is_spam.return_value = True
         self.new_ticket(summary='test moderation', mount_point='/doc-bugs/')
-        self.app.post('/doc-bugs/1/update_ticket',{
-            'summary':'test moderation',
-            'comment':'test unmoderated post'
+        self.app.post('/doc-bugs/1/update_ticket', {
+            'summary': 'test moderation',
+            'comment': 'test unmoderated post'
         }, extra_environ=dict(username='*anonymous'))
-        send_direct.assert_called_with(str(M.User.query.get(username='test-admin')._id))
+        send_direct.assert_called_with(
+            str(M.User.query.get(username='test-admin')._id))
 
     @patch('forgetracker.model.ticket.Notification.send_simple')
     def test_notifications_new(self, send_simple):
@@ -2347,14 +2457,14 @@ class TestEmailMonitoring(TrackerTestController):
         send_simple.assert_called_once_with(self.test_email)
         send_simple.reset_mock()
         self.app.post('/bugs/1/update_ticket', {
-                'summary': 'test',
-                'description': 'update 1'})
+            'summary': 'test',
+            'description': 'update 1'})
         send_simple.assert_called_once_with(self.test_email)
         send_simple.reset_mock()
         self.app.post('/bugs/1/update_ticket', {
-                'summary': 'test',
-                'description': 'update 2',
-                'private': '1'})
+            'summary': 'test',
+            'description': 'update 2',
+            'private': '1'})
         assert not send_simple.called
 
     @patch('forgetracker.tracker_main.M.Notification.send_simple')
@@ -2366,7 +2476,8 @@ class TestEmailMonitoring(TrackerTestController):
         p.notifications_disabled = True
         self._set_options()
         with patch.object(M.Project.query, 'get') as get:
-            get.side_effect = lambda *a,**k: None if 'bugs' in k.get('shortname', '') else p
+            get.side_effect = lambda * \
+                a, **k: None if 'bugs' in k.get('shortname', '') else p
             self.new_ticket(summary='test')
         assert send_simple.call_count == 0, send_simple.call_count
 
@@ -2376,11 +2487,16 @@ class TestEmailMonitoring(TrackerTestController):
         self.new_ticket(summary='test')
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
-        email_tasks = M.MonQTask.query.find(dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
-        assert 'Sent from sourceforge.net because mailinglist@example.com is subscribed to http://localhost/p/test/bugs/' in email_tasks[0].kwargs['text']
-        assert 'a project admin can change settings at http://localhost/p/test/admin/bugs/options' in email_tasks[0].kwargs['text']
+        email_tasks = M.MonQTask.query.find(
+            dict(task_name='allura.tasks.mail_tasks.sendsimplemail')).all()
+        assert 'Sent from sourceforge.net because mailinglist@example.com is subscribed to http://localhost/p/test/bugs/' in email_tasks[
+            0].kwargs['text']
+        assert 'a project admin can change settings at http://localhost/p/test/admin/bugs/options' in email_tasks[
+            0].kwargs['text']
+
 
 class TestCustomUserField(TrackerTestController):
+
     def setUp(self):
         super(TestCustomUserField, self).setUp()
         params = dict(
@@ -2389,20 +2505,21 @@ class TestCustomUserField(TrackerTestController):
                      show_in_search='on')],
             open_status_names='aa bb',
             closed_status_names='cc',
-            )
+        )
         self.app.post(
             '/admin/bugs/set_custom_fields',
             params=variable_encode(params))
 
     def test_blank_user(self):
         kw = {'custom_fields._code_review': ''}
-        ticket_view = self.new_ticket(summary='test custom fields', **kw).follow()
+        ticket_view = self.new_ticket(
+            summary='test custom fields', **kw).follow()
         # summary header shows 'nobody'
         assert ticket_view.html.findAll('label', '

<TRUNCATED>

[35/36] git commit: [#6484] ticket:492 Move trac_export script to tracwikiimporter

Posted by jo...@apache.org.
[#6484] ticket:492 Move trac_export script to tracwikiimporter


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/303512a1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/303512a1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/303512a1

Branch: refs/heads/cj/6484
Commit: 303512a1abd0eb0523042b1368afabefe11c40b4
Parents: 2050da0
Author: Igor Bondarenko <je...@gmail.com>
Authored: Thu Jan 2 11:03:56 2014 +0200
Committer: Cory Johns <cj...@slashdotmedia.com>
Committed: Fri Jan 10 18:58:37 2014 +0000

----------------------------------------------------------------------
 Allura/allura/scripts/trac_export.py            | 331 -------------------
 .../forgeimporters/trac/tests/test_tickets.py   |   5 +-
 ForgeImporters/forgeimporters/trac/tickets.py   |   2 +-
 scripts/trac_export.py                          |   2 +-
 4 files changed, 5 insertions(+), 335 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/303512a1/Allura/allura/scripts/trac_export.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/trac_export.py b/Allura/allura/scripts/trac_export.py
deleted file mode 100644
index f5cd7c3..0000000
--- a/Allura/allura/scripts/trac_export.py
+++ /dev/null
@@ -1,331 +0,0 @@
-#!/usr/bin/env python
-
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-import sys
-import csv
-import urlparse
-import urllib2
-import json
-import time
-import re
-from optparse import OptionParser
-from itertools import islice
-
-import feedparser
-from BeautifulSoup import BeautifulSoup, NavigableString
-import dateutil.parser
-import pytz
-
-try:
-    from forgeimporters.base import ProjectExtractor
-    urlopen = ProjectExtractor.urlopen
-except ImportError:
-    try:
-        from allura.lib.helpers import urlopen
-    except ImportError:
-        from urllib2 import urlopen
-
-log = logging.getLogger(__name__)
-
-
-def parse_options():
-    optparser = OptionParser(usage=''' %prog <Trac URL>
-
-Export ticket data from a Trac instance''')
-    optparser.add_option('-o', '--out-file', dest='out_filename',
-                         help='Write to file (default stdout)')
-    optparser.add_option('--no-attachments', dest='do_attachments',
-                         action='store_false', default=True, help='Export attachment info')
-    optparser.add_option('--only-tickets', dest='only_tickets',
-                         action='store_true', help='Export only ticket list')
-    optparser.add_option('--start', dest='start_id', type='int', default=1,
-                         help='Start with given ticket numer (or next accessible)')
-    optparser.add_option('--limit', dest='limit', type='int',
-                         default=None, help='Limit number of tickets')
-    optparser.add_option('-v', '--verbose', dest='verbose',
-                         action='store_true', help='Verbose operation')
-    options, args = optparser.parse_args()
-    if len(args) != 1:
-        optparser.error("Wrong number of arguments.")
-    return options, args
-
-
-class TracExport(object):
-
-    PAGE_SIZE = 100
-    TICKET_URL = 'ticket/%d'
-    QUERY_MAX_ID_URL = 'query?col=id&order=id&desc=1&max=2'
-    QUERY_BY_PAGE_URL = 'query?col=id&col=time&col=changetime&order=id&max=' + \
-        str(PAGE_SIZE) + '&page=%d'
-    ATTACHMENT_LIST_URL = 'attachment/ticket/%d/'
-    ATTACHMENT_URL = 'raw-attachment/ticket/%d/%s'
-
-    FIELD_MAP = {
-        'reporter': 'submitter',
-        'owner': 'assigned_to',
-    }
-
-    def __init__(self, base_url, start_id=1, verbose=False, do_attachments=True):
-        """start_id - start with at least that ticket number (actual returned
-                      ticket may have higher id if we don't have access to exact
-                      one).
-        """
-        self.base_url = base_url.rstrip('/') + '/'
-        # Contains additional info for a ticket which cannot
-        # be get with single-ticket export (create/mod times is
-        # and example).
-        self.ticket_map = {}
-        self.start_id = start_id
-        self.page = (start_id - 1) / self.PAGE_SIZE + 1
-        self.verbose = verbose
-        self.do_attachments = do_attachments
-        self.exhausted = False
-        self.ticket_queue = self.next_ticket_ids()
-
-    def remap_fields(self, dict):
-        "Remap fields to adhere to standard taxonomy."
-        out = {}
-        for k, v in dict.iteritems():
-            out[self.FIELD_MAP.get(k, k)] = v
-
-        out['id'] = int(out['id'])
-        if 'private' in out:
-            out['private'] = bool(int(out['private']))
-        return out
-
-    def full_url(self, suburl, type=None):
-        url = urlparse.urljoin(self.base_url, suburl)
-        if type is None:
-            return url
-        glue = '&' if '?' in suburl else '?'
-        return url + glue + 'format=' + type
-
-    def log_url(self, url):
-        log.info(url)
-        if self.verbose:
-            print >>sys.stderr, url
-
-    @classmethod
-    def trac2z_date(cls, s):
-        d = dateutil.parser.parse(s)
-        d = d.astimezone(pytz.UTC)
-        return d.strftime("%Y-%m-%dT%H:%M:%SZ")
-
-    @staticmethod
-    def match_pattern(regexp, string):
-        m = re.match(regexp, string)
-        assert m
-        return m.group(1)
-
-    def csvopen(self, url):
-        self.log_url(url)
-        f = urlopen(url)
-        # Trac doesn't throw 403 error, just shows normal 200 HTML page
-        # telling that access denied. So, we'll emulate 403 ourselves.
-        # TODO: currently, any non-csv result treated as 403.
-        if not f.info()['Content-Type'].startswith('text/csv'):
-            raise urllib2.HTTPError(
-                url, 403, 'Forbidden - emulated', f.info(), f)
-        return f
-
-    def parse_ticket(self, id):
-        # Use CSV export to get ticket fields
-        url = self.full_url(self.TICKET_URL % id, 'csv')
-        f = self.csvopen(url)
-        reader = csv.DictReader(f)
-        ticket_fields = reader.next()
-        ticket_fields['class'] = 'ARTIFACT'
-        ticket = self.remap_fields(ticket_fields)
-
-        # Use HTML export to get ticket description and comments
-        import html2text
-        html2text.BODY_WIDTH = 0
-        url = self.full_url(self.TICKET_URL % id)
-        self.log_url(url)
-        d = BeautifulSoup(urlopen(url))
-        self.clean_missing_wiki_links(d)
-        desc = d.find('div', 'description').find('div', 'searchable')
-        ticket['description'] = html2text.html2text(
-            desc.renderContents('utf8').decode('utf8')) if desc else ''
-        comments = []
-        for comment in d.findAll('form', action='#comment'):
-            c = {}
-            c['submitter'] = re.sub(
-                r'.* by ', '', comment.find('h3', 'change').text).strip()
-            c['date'] = self.trac2z_date(
-                comment.find('a', 'timeline')['title'].replace(' in Timeline', ''))
-            changes = unicode(comment.find('ul', 'changes') or '')
-            body = comment.find('div', 'comment')
-            body = body.renderContents('utf8').decode('utf8') if body else ''
-            c['comment'] = html2text.html2text(changes + body)
-            c['class'] = 'COMMENT'
-            comments.append(c)
-        ticket['comments'] = comments
-        return ticket
-
-    def parse_ticket_attachments(self, id):
-        SIZE_PATTERN = r'(\d+) bytes'
-        TIMESTAMP_PATTERN = r'(.+) in Timeline'
-        # Scrape HTML to get ticket attachments
-        url = self.full_url(self.ATTACHMENT_LIST_URL % id)
-        self.log_url(url)
-        f = urlopen(url)
-        soup = BeautifulSoup(f)
-        attach = soup.find('div', id='attachments')
-        list = []
-        while attach:
-            attach = attach.findNext('dt')
-            if not attach:
-                break
-            d = {}
-            d['filename'] = attach.a['href'].rsplit('/', 1)[1]
-            d['url'] = self.full_url(self.ATTACHMENT_URL % (id, d['filename']))
-            size_s = attach.span['title']
-            d['size'] = int(self.match_pattern(SIZE_PATTERN, size_s))
-            timestamp_s = attach.find('a', {'class': 'timeline'})['title']
-            d['date'] = self.trac2z_date(
-                self.match_pattern(TIMESTAMP_PATTERN, timestamp_s))
-            d['by'] = attach.find(
-                text=re.compile('added by')).nextSibling.renderContents()
-            d['description'] = ''
-            # Skip whitespace
-            while attach.nextSibling and type(attach.nextSibling) is NavigableString:
-                attach = attach.nextSibling
-            # if there's a description, there will be a <dd> element, other
-            # immediately next <dt>
-            if attach.nextSibling and attach.nextSibling.name == 'dd':
-                desc_el = attach.nextSibling
-                if desc_el:
-                    # TODO: Convert to Allura link syntax as needed
-                    d['description'] = ''.join(
-                        desc_el.findAll(text=True)).strip()
-            list.append(d)
-        return list
-
-    def get_max_ticket_id(self):
-        url = self.full_url(self.QUERY_MAX_ID_URL, 'csv')
-        f = self.csvopen(url)
-        reader = csv.DictReader(f)
-        fields = reader.next()
-        print fields
-        return int(fields['id'])
-
-    def get_ticket(self, id, extra={}):
-        '''Get ticket with given id
-        extra: extra fields to add to ticket (parsed elsewhere)
-        '''
-        t = self.parse_ticket(id)
-        if self.do_attachments:
-            atts = self.parse_ticket_attachments(id)
-            if atts:
-                t['attachments'] = atts
-        t.update(extra)
-        return t
-
-    def next_ticket_ids(self):
-        'Go thru ticket list and collect available ticket ids.'
-        # We could just do CSV export, which by default dumps entire list
-        # Alas, for many busy servers with long ticket list, it will just
-        # time out. So, let's paginate it instead.
-        res = []
-
-        url = self.full_url(self.QUERY_BY_PAGE_URL % self.page, 'csv')
-        try:
-            f = self.csvopen(url)
-        except urllib2.HTTPError, e:
-            if 'emulated' in e.msg:
-                body = e.fp.read()
-                if 'beyond the number of pages in the query' in body or 'Log in with a SourceForge account' in body:
-                    raise StopIteration
-            raise
-        reader = csv.reader(f)
-        cols = reader.next()
-        for r in reader:
-            if r and r[0].isdigit():
-                id = int(r[0])
-                extra = {'date': self.trac2z_date(
-                    r[1]), 'date_updated': self.trac2z_date(r[2])}
-                res.append((id, extra))
-        self.page += 1
-
-        if len(res) < self.PAGE_SIZE:
-            self.exhausted = True
-
-        return res
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        while True:
-            # queue empty, try to fetch more
-            if len(self.ticket_queue) == 0 and not self.exhausted:
-                self.ticket_queue = self.next_ticket_ids()
-            # there aren't any more, we're really done
-            if len(self.ticket_queue) == 0:
-                raise StopIteration
-            id, extra = self.ticket_queue.pop(0)
-            if id >= self.start_id:
-                break
-        return self.get_ticket(id, extra)
-
-    def clean_missing_wiki_links(self, doc):
-        for link in doc.findAll('a', 'missing wiki'):
-            link.string = link.string.rstrip('?')
-
-
-class DateJSONEncoder(json.JSONEncoder):
-
-    def default(self, obj):
-        if isinstance(obj, time.struct_time):
-            return time.strftime('%Y-%m-%dT%H:%M:%SZ', obj)
-        return json.JSONEncoder.default(self, obj)
-
-
-def export(url, start_id=1, verbose=False, do_attachments=True,
-           only_tickets=False, limit=None):
-    ex = TracExport(url, start_id=start_id,
-                    verbose=verbose, do_attachments=do_attachments)
-
-    doc = [t for t in islice(ex, limit)]
-
-    if not only_tickets:
-        doc = {
-            'class': 'PROJECT',
-            'trackers': {'default': {'artifacts': doc}}
-        }
-    return doc
-
-
-def main():
-    options, args = parse_options()
-    doc = export(args[0], **vars(options))
-
-    out_file = sys.stdout
-    if options.out_filename:
-        out_file = open(options.out_filename, 'w')
-    out_file.write(
-        json.dumps(doc, cls=DateJSONEncoder, indent=2, sort_keys=True))
-    # It's bad habit not to terminate lines
-    out_file.write('\n')
-
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/303512a1/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
index 7ddb729..18a88ea 100644
--- a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
@@ -238,15 +238,16 @@ class TestTracImportSupportFunctional(TestRestApiBase, TestCase):
 
     @with_tracker
     @skipif(module_not_available('html2text'))
+    @skipif(module_not_available('tracwikiimporter'))
     def test_list(self):
-        from allura.scripts.trac_export import TracExport, DateJSONEncoder
+        from tracwikiimporter.scripts.trac_export import TracExport, DateJSONEncoder
         csv_fp = open(os.path.dirname(__file__) + '/data/test-list.csv')
         html_fp = open(os.path.dirname(__file__) + '/data/test-list.html')
         with patch.object(TracExport, 'next_ticket_ids', return_value=[(390, {})]):
             te = TracExport('url', do_attachments=False)
             te.exhausted = True
             te.csvopen = lambda s: csv_fp
-        with patch('allura.scripts.trac_export.urlopen', return_value=html_fp):
+        with patch('tracwikiimporter.scripts.trac_export.urlopen', return_value=html_fp):
             json_data = {
                 'class': 'PROJECT',
                 'trackers': {'default': {'artifacts': list(te)}},

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/303512a1/ForgeImporters/forgeimporters/trac/tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tickets.py b/ForgeImporters/forgeimporters/trac/tickets.py
index c2c28c1..231ce71 100644
--- a/ForgeImporters/forgeimporters/trac/tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tickets.py
@@ -39,7 +39,7 @@ from allura.lib.decorators import require_post
 from allura.lib import validators as v
 from allura.lib import helpers as h
 from allura.model import AuditLog
-from allura.scripts.trac_export import (
+from tracwikiimporter.scripts.trac_export import (
     export,
     DateJSONEncoder,
 )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/303512a1/scripts/trac_export.py
----------------------------------------------------------------------
diff --git a/scripts/trac_export.py b/scripts/trac_export.py
index ac90b17..576d99c 100755
--- a/scripts/trac_export.py
+++ b/scripts/trac_export.py
@@ -18,5 +18,5 @@
 #       under the License.
 
 if __name__ == '__main__':
-    from allura.scripts.trac_export import main
+    from tracwikiimporter.scripts.trac_export import main
     main()


[17/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_discussion.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_discussion.py b/Allura/allura/tests/model/test_discussion.py
index 51a13ee..6a7878f 100644
--- a/Allura/allura/tests/model/test_discussion.py
+++ b/Allura/allura/tests/model/test_discussion.py
@@ -41,6 +41,7 @@ from allura.lib import helpers as h
 from allura.tests import TestController
 from alluratest.controller import setup_global_objects
 
+
 def setUp():
     controller = TestController()
     controller.setUp()
@@ -55,6 +56,7 @@ def setUp():
 def tearDown():
     ThreadLocalORMSession.close_all()
 
+
 @with_setup(setUp, tearDown)
 def test_discussion_methods():
     d = M.Discussion(shortname='test', name='test')
@@ -75,6 +77,7 @@ def test_discussion_methods():
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
 
+
 @with_setup(setUp, tearDown)
 def test_thread_methods():
     d = M.Discussion(shortname='test', name='test')
@@ -120,6 +123,7 @@ def test_thread_methods():
     assert len(t.find_posts()) == 0
     t.delete()
 
+
 @with_setup(setUp, tearDown)
 def test_thread_new():
     with mock.patch('allura.model.discuss.h.nonce') as nonce:
@@ -137,6 +141,7 @@ def test_thread_new():
         assert_equals(t1_2.subject, 'Test Thread One')
         assert_equals(t2_2.subject, 'Test Thread Two')
 
+
 @with_setup(setUp, tearDown)
 def test_post_methods():
     d = M.Discussion(shortname='test', name='test')
@@ -170,20 +175,21 @@ def test_post_methods():
     p.delete()
     assert t.num_replies == 0
 
+
 @with_setup(setUp, tearDown)
 def test_attachment_methods():
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     p = t.post('This is a post')
     p_att = p.attach('foo.text', StringIO('Hello, world!'),
-                discussion_id=d._id,
-                thread_id=t._id,
-                post_id=p._id)
+                     discussion_id=d._id,
+                     thread_id=t._id,
+                     post_id=p._id)
     t_att = p.attach('foo2.text', StringIO('Hello, thread!'),
-                discussion_id=d._id,
-                thread_id=t._id)
+                     discussion_id=d._id,
+                     thread_id=t._id)
     d_att = p.attach('foo3.text', StringIO('Hello, discussion!'),
-                discussion_id=d._id)
+                     discussion_id=d._id)
 
     ThreadLocalORMSession.flush_all()
     assert p_att.post == p
@@ -196,27 +202,29 @@ def test_attachment_methods():
     # Test notification in mail
     t = M.Thread.new(discussion_id=d._id, subject='Test comment notification')
     fs = FieldStorage()
-    fs.name='file_info'
-    fs.filename='fake.txt'
+    fs.name = 'file_info'
+    fs.filename = 'fake.txt'
     fs.type = 'text/plain'
-    fs.file=StringIO('this is the content of the fake file\n')
-    p = t.post(text=u'test message', forum= None, subject= '', file_info=fs)
+    fs.file = StringIO('this is the content of the fake file\n')
+    p = t.post(text=u'test message', forum=None, subject='', file_info=fs)
     ThreadLocalORMSession.flush_all()
-    n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification')
+    n = M.Notification.query.get(
+        subject=u'[test:wiki] Test comment notification')
     assert '\nAttachment: fake.txt (37 Bytes; text/plain)' in n.text
 
+
 @with_setup(setUp, tearDown())
 def test_multiple_attachments():
     test_file1 = FieldStorage()
     test_file1.name = 'file_info'
     test_file1.filename = 'test1.txt'
     test_file1.type = 'text/plain'
-    test_file1.file=StringIO('test file1\n')
+    test_file1.file = StringIO('test file1\n')
     test_file2 = FieldStorage()
     test_file2.name = 'file_info'
     test_file2.filename = 'test2.txt'
     test_file2.type = 'text/plain'
-    test_file2.file=StringIO('test file2\n')
+    test_file2.file = StringIO('test file2\n')
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     test_post = t.post('test post')
@@ -227,13 +235,14 @@ def test_multiple_attachments():
     assert 'test1.txt' in [attaches[0].filename, attaches[1].filename]
     assert 'test2.txt' in [attaches[0].filename, attaches[1].filename]
 
+
 @with_setup(setUp, tearDown)
 def test_add_attachment():
     test_file = FieldStorage()
     test_file.name = 'file_info'
     test_file.filename = 'test.txt'
     test_file.type = 'text/plain'
-    test_file.file=StringIO('test file\n')
+    test_file.file = StringIO('test file\n')
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     test_post = t.post('test post')
@@ -244,6 +253,7 @@ def test_add_attachment():
     assert attach.filename == 'test.txt', attach.filename
     assert attach.content_type == 'text/plain', attach.content_type
 
+
 def test_notification_two_attaches():
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test comment notification')
@@ -259,18 +269,20 @@ def test_notification_two_attaches():
     fs2.file = StringIO('this is the content of the fake file\n')
     t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2])
     ThreadLocalORMSession.flush_all()
-    n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification')
+    n = M.Notification.query.get(
+        subject=u'[test:wiki] Test comment notification')
     assert '\nAttachment: fake.txt (37 Bytes; text/plain)  fake2.txt (37 Bytes; text/plain)' in n.text
 
+
 @with_setup(setUp, tearDown)
 def test_discussion_delete():
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     p = t.post('This is a post')
     p.attach('foo.text', StringIO(''),
-                discussion_id=d._id,
-                thread_id=t._id,
-                post_id=p._id)
+             discussion_id=d._id,
+             thread_id=t._id,
+             post_id=p._id)
     r = M.ArtifactReference.from_artifact(d)
     rid = d.index_id()
     ThreadLocalORMSession.flush_all()
@@ -278,30 +290,33 @@ def test_discussion_delete():
     ThreadLocalORMSession.flush_all()
     assert_equals(M.ArtifactReference.query.find(dict(_id=rid)).count(), 0)
 
+
 @with_setup(setUp, tearDown)
 def test_thread_delete():
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     p = t.post('This is a post')
     p.attach('foo.text', StringIO(''),
-                discussion_id=d._id,
-                thread_id=t._id,
-                post_id=p._id)
+             discussion_id=d._id,
+             thread_id=t._id,
+             post_id=p._id)
     ThreadLocalORMSession.flush_all()
     t.delete()
 
+
 @with_setup(setUp, tearDown)
 def test_post_delete():
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
     p = t.post('This is a post')
     p.attach('foo.text', StringIO(''),
-                discussion_id=d._id,
-                thread_id=t._id,
-                post_id=p._id)
+             discussion_id=d._id,
+             thread_id=t._id,
+             post_id=p._id)
     ThreadLocalORMSession.flush_all()
     p.delete()
 
+
 @with_setup(setUp, tearDown)
 def test_post_permission_check():
     d = M.Discussion(shortname='test', name='test')
@@ -397,16 +412,18 @@ def test_post_notify():
         else:
             assert False, 'send_simple must not be called'
 
+
 @with_setup(setUp, tearDown)
 @patch('allura.model.discuss.c.project.users_with_role')
 def test_is_spam_for_admin(users):
-    users.return_value = [c.user,]
+    users.return_value = [c.user, ]
     d = M.Discussion(shortname='test', name='test')
     t = M.Thread(discussion_id=d._id, subject='Test Thread')
     t.post('This is a post')
     post = M.Post.query.get(text='This is a post')
     assert not t.is_spam(post), t.is_spam(post)
 
+
 @with_setup(setUp, tearDown)
 @patch('allura.model.discuss.c.project.users_with_role')
 def test_is_spam(role):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_filesystem.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_filesystem.py b/Allura/allura/tests/model/test_filesystem.py
index 3b625f7..312807f 100644
--- a/Allura/allura/tests/model/test_filesystem.py
+++ b/Allura/allura/tests/model/test_filesystem.py
@@ -33,10 +33,12 @@ from alluratest.controller import setup_unit_test
 
 
 class File(M.File):
+
     class __mongometa__:
         session = M.session.main_orm_session
 Mapper.compile_all()
 
+
 class TestFile(TestCase):
 
     def setUp(self):
@@ -128,8 +130,8 @@ class TestFile(TestCase):
                 patch('allura.lib.utils.etag_cache') as etag_cache:
             response_body = list(f.serve())
             etag_cache.assert_called_once_with(u'{}?{}'.format(f.filename,
-                f._id.generation_time).encode('utf-8'))
-            assert_equal([ 'test1' ], response_body)
+                                                               f._id.generation_time).encode('utf-8'))
+            assert_equal(['test1'], response_body)
             assert_equal(response.content_type, f.content_type)
             assert 'Content-Disposition' not in response.headers
 
@@ -141,11 +143,11 @@ class TestFile(TestCase):
                 patch('allura.lib.utils.etag_cache') as etag_cache:
             response_body = list(f.serve(embed=False))
             etag_cache.assert_called_once_with(u'{}?{}'.format(f.filename,
-                f._id.generation_time).encode('utf-8'))
-            assert_equal([ 'test1' ], response_body)
+                                                               f._id.generation_time).encode('utf-8'))
+            assert_equal(['test1'], response_body)
             assert_equal(response.content_type, f.content_type)
             assert_equal(response.headers['Content-Disposition'],
-                'attachment;filename="te s\xe0\xad\xae1.txt"')
+                         'attachment;filename="te s\xe0\xad\xae1.txt"')
 
     def test_image(self):
         path = os.path.join(
@@ -154,7 +156,7 @@ class TestFile(TestCase):
             f, t = File.save_image(
                 'user.png',
                 fp,
-                thumbnail_size=(16,16),
+                thumbnail_size=(16, 16),
                 square=True,
                 save_original=True)
         self.session.flush()
@@ -171,7 +173,7 @@ class TestFile(TestCase):
         f, t = File.save_image(
             'file.txt',
             StringIO('blah'),
-            thumbnail_size=(16,16),
+            thumbnail_size=(16, 16),
             square=True,
             save_original=True)
         assert f == None
@@ -181,14 +183,15 @@ class TestFile(TestCase):
         f, t = File.save_image(
             'bogus.png',
             StringIO('bogus data here!'),
-            thumbnail_size=(16,16),
+            thumbnail_size=(16, 16),
             square=True,
             save_original=True)
         assert f == None
         assert t == None
 
     def test_partial_image_as_attachment(self):
-        path = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png')
+        path = os.path.join(os.path.dirname(__file__),
+                            '..', 'data', 'user.png')
         fp = BytesIO(open(path, 'rb').read(500))
         c.app.config._id = None
         attachment = M.BaseAttachment.save_attachment('user.png', fp,
@@ -198,11 +201,13 @@ class TestFile(TestCase):
         assert_equal(attachment.filename, 'user.png')
 
     def test_attachment_name_encoding(self):
-        path = os.path.join(os.path.dirname(__file__), '..', 'data', 'user.png')
+        path = os.path.join(os.path.dirname(__file__),
+                            '..', 'data', 'user.png')
         fp = open(path, 'rb')
         c.app.config._id = None
-        attachment = M.BaseAttachment.save_attachment(b'Strukturpr\xfcfung.dvi', fp,
-                                                      save_original=True)
+        attachment = M.BaseAttachment.save_attachment(
+            b'Strukturpr\xfcfung.dvi', fp,
+            save_original=True)
         assert type(attachment) != tuple   # tuple is for (img, thumb) pairs
         assert_equal(attachment.filename, u'Strukturpr\xfcfung.dvi')
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_monq.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_monq.py b/Allura/allura/tests/model/test_monq.py
index 217fbd4..4ba2f0b 100644
--- a/Allura/allura/tests/model/test_monq.py
+++ b/Allura/allura/tests/model/test_monq.py
@@ -23,15 +23,17 @@ from ming.orm import ThreadLocalORMSession
 from alluratest.controller import setup_basic_test, setup_global_objects
 from allura import model as M
 
+
 def setUp():
     setup_basic_test()
     ThreadLocalORMSession.close_all()
     setup_global_objects()
     M.MonQTask.query.remove({})
 
+
 @with_setup(setUp)
 def test_basic_task():
-    task = M.MonQTask.post(pprint.pformat, ([5,6],))
+    task = M.MonQTask.post(pprint.pformat, ([5, 6],))
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
     task = M.MonQTask.get()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_neighborhood.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_neighborhood.py b/Allura/allura/tests/model/test_neighborhood.py
index 0f77bcc..afc53c6 100644
--- a/Allura/allura/tests/model/test_neighborhood.py
+++ b/Allura/allura/tests/model/test_neighborhood.py
@@ -34,10 +34,12 @@ def setUp():
     setup_basic_test()
     setup_with_tools()
 
+
 @td.with_wiki
 def setup_with_tools():
     setup_global_objects()
 
+
 @with_setup(setUp)
 def test_neighborhood():
     neighborhood = M.Neighborhood.query.get(name='Projects')
@@ -75,7 +77,8 @@ def test_neighborhood():
     for style in styles_list:
         assert test_css_dict[style['name']] == style['value']
         if style['name'] == 'titlebarcolor':
-            assert '<option value="dark" selected="selected">' in style['additional']
+            assert '<option value="dark" selected="selected">' in style[
+                'additional']
 
     # Check neighborhood custom css showing
     neighborhood.features['css'] = 'none'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_notification.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_notification.py b/Allura/allura/tests/model/test_notification.py
index 78993ae..d5f0a14 100644
--- a/Allura/allura/tests/model/test_notification.py
+++ b/Allura/allura/tests/model/test_notification.py
@@ -32,6 +32,7 @@ from allura.lib import helpers as h
 from allura.tests import decorators as td
 from forgewiki import model as WM
 
+
 class TestNotification(unittest.TestCase):
 
     def setUp(self):
@@ -45,7 +46,7 @@ class TestNotification(unittest.TestCase):
         _clear_notifications()
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
-        M.notification.MAILBOX_QUIESCENT=None # disable message combining
+        M.notification.MAILBOX_QUIESCENT = None  # disable message combining
 
     def test_subscribe_unsubscribe(self):
         M.Mailbox.subscribe(type='direct')
@@ -56,7 +57,7 @@ class TestNotification(unittest.TestCase):
             app_config_id=c.app.config._id,
             user_id=c.user._id)).all()
         assert len(subscriptions) == 1
-        assert subscriptions[0].type=='direct'
+        assert subscriptions[0].type == 'direct'
         assert M.Mailbox.query.find().count() == 1
         M.Mailbox.unsubscribe()
         ThreadLocalORMSession.flush_all()
@@ -74,28 +75,28 @@ class TestNotification(unittest.TestCase):
         wiki = c.project.app_instance('wiki')
         page = WM.Page.query.get(app_config_id=wiki.config._id)
         notification = M.Notification(
-                _id='_id',
-                ref=page.ref,
-                from_address='from_address',
-                reply_to_address='reply_to_address',
-                in_reply_to='in_reply_to',
-                references=['a'],
-                subject='subject',
-                text='text',
-            )
+            _id='_id',
+            ref=page.ref,
+            from_address='from_address',
+            reply_to_address='reply_to_address',
+            in_reply_to='in_reply_to',
+            references=['a'],
+            subject='subject',
+            text='text',
+        )
         notification.footer = lambda: ' footer'
         notification.send_direct(c.user._id)
         sendmail.post.assert_called_once_with(
-                destinations=[str(c.user._id)],
-                fromaddr='from_address',
-                reply_to='reply_to_address',
-                subject='subject',
-                message_id='_id',
-                in_reply_to='in_reply_to',
-                references=['a'],
-                sender='wiki@test.p.in.sf.net',
-                text='text footer',
-            )
+            destinations=[str(c.user._id)],
+            fromaddr='from_address',
+            reply_to='reply_to_address',
+            subject='subject',
+            message_id='_id',
+            in_reply_to='in_reply_to',
+            references=['a'],
+            sender='wiki@test.p.in.sf.net',
+            text='text footer',
+        )
 
     @mock.patch('allura.tasks.mail_tasks.sendmail')
     def test_send_direct_no_access(self, sendmail):
@@ -106,14 +107,14 @@ class TestNotification(unittest.TestCase):
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
         notification = M.Notification(
-                _id='_id',
-                ref=page.ref,
-                from_address='from_address',
-                reply_to_address='reply_to_address',
-                in_reply_to='in_reply_to',
-                subject='subject',
-                text='text',
-            )
+            _id='_id',
+            ref=page.ref,
+            from_address='from_address',
+            reply_to_address='reply_to_address',
+            in_reply_to='in_reply_to',
+            subject='subject',
+            text='text',
+        )
         notification.footer = lambda: ' footer'
         notification.send_direct(c.user._id)
         assert_equal(sendmail.post.call_count, 0)
@@ -136,29 +137,30 @@ class TestNotification(unittest.TestCase):
         wiki = project1.app_instance('wiki')
         page = WM.Page.query.get(app_config_id=wiki.config._id)
         notification = M.Notification(
-                _id='_id',
-                ref=page.ref,
-                from_address='from_address',
-                reply_to_address='reply_to_address',
-                in_reply_to='in_reply_to',
-                references=['a'],
-                subject='subject',
-                text='text',
-            )
+            _id='_id',
+            ref=page.ref,
+            from_address='from_address',
+            reply_to_address='reply_to_address',
+            in_reply_to='in_reply_to',
+            references=['a'],
+            subject='subject',
+            text='text',
+        )
         notification.footer = lambda: ' footer'
         c.project = project2
         notification.send_direct(c.user._id)
         sendmail.post.assert_called_once_with(
-                destinations=[str(c.user._id)],
-                fromaddr='from_address',
-                reply_to='reply_to_address',
-                subject='subject',
-                message_id='_id',
-                in_reply_to='in_reply_to',
-                references=['a'],
-                sender='wiki@test.p.in.sf.net',
-                text='text footer',
-            )
+            destinations=[str(c.user._id)],
+            fromaddr='from_address',
+            reply_to='reply_to_address',
+            subject='subject',
+            message_id='_id',
+            in_reply_to='in_reply_to',
+            references=['a'],
+            sender='wiki@test.p.in.sf.net',
+            text='text footer',
+        )
+
 
 class TestPostNotifications(unittest.TestCase):
 
@@ -175,7 +177,7 @@ class TestPostNotifications(unittest.TestCase):
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
         self.pg = WM.Page.query.get(app_config_id=c.app.config._id)
-        M.notification.MAILBOX_QUIESCENT=None # disable message combining
+        M.notification.MAILBOX_QUIESCENT = None  # disable message combining
         while M.MonQTask.run_ready('setup'):
             ThreadLocalORMSession.flush_all()
 
@@ -206,7 +208,7 @@ class TestPostNotifications(unittest.TestCase):
         ThreadLocalORMSession.flush_all()
         M.MonQTask.run_ready()
         ThreadLocalORMSession.flush_all()
-        assert M.Mailbox.query.find().count()==1
+        assert M.Mailbox.query.find().count() == 1
         mbox = M.Mailbox.query.get()
         assert len(mbox.queue) == 1
         assert not mbox.queue_empty
@@ -218,10 +220,13 @@ class TestPostNotifications(unittest.TestCase):
         self._post_notification()
         ThreadLocalORMSession.flush_all()
 
-        assert_equal(M.Notification.query.get()['from_address'], '"Test Admin" <te...@users.localhost>')
+        assert_equal(M.Notification.query.get()
+                     ['from_address'], '"Test Admin" <te...@users.localhost>')
         assert_equal(M.Mailbox.query.find().count(), 2)
 
-        M.MonQTask.run_ready()  # sends the notification out into "mailboxes", and from mailboxes into email tasks
+        # sends the notification out into "mailboxes", and from mailboxes into
+        # email tasks
+        M.MonQTask.run_ready()
         mboxes = M.Mailbox.query.find().all()
         assert_equal(len(mboxes), 2)
         assert_equal(len(mboxes[0].queue), 1)
@@ -230,16 +235,20 @@ class TestPostNotifications(unittest.TestCase):
         assert not mboxes[1].queue_empty
 
         email_tasks = M.MonQTask.query.find({'state': 'ready'}).all()
-        assert_equal(len(email_tasks), 2)  # make sure both subscribers will get an email
+        # make sure both subscribers will get an email
+        assert_equal(len(email_tasks), 2)
 
         first_destinations = [e.kwargs['destinations'][0] for e in email_tasks]
         assert_in(str(c.user._id), first_destinations)
         assert_in(str(user2._id), first_destinations)
-        assert_equal(email_tasks[0].kwargs['fromaddr'], '"Test Admin" <te...@users.localhost>')
-        assert_equal(email_tasks[1].kwargs['fromaddr'], '"Test Admin" <te...@users.localhost>')
+        assert_equal(email_tasks[0].kwargs['fromaddr'],
+                     '"Test Admin" <te...@users.localhost>')
+        assert_equal(email_tasks[1].kwargs['fromaddr'],
+                     '"Test Admin" <te...@users.localhost>')
         assert_equal(email_tasks[0].kwargs['sender'], 'wiki@test.p.in.sf.net')
         assert_equal(email_tasks[1].kwargs['sender'], 'wiki@test.p.in.sf.net')
-        assert email_tasks[0].kwargs['text'].startswith('Home modified by Test Admin')
+        assert email_tasks[0].kwargs['text'].startswith(
+            'Home modified by Test Admin')
         assert 'you indicated interest in ' in email_tasks[0].kwargs['text']
 
     def test_permissions(self):
@@ -249,6 +258,7 @@ class TestPostNotifications(unittest.TestCase):
         u = M.User.query.get(username='test-admin')
         self._subscribe(user=u)
         # Simulate a permission check failure.
+
         def patched_has_access(*args, **kw):
             def predicate(*args, **kw):
                 return False
@@ -272,15 +282,14 @@ class TestPostNotifications(unittest.TestCase):
 
     def test_footer(self):
         footer = MailFooter.monitored(
-                'test@mail.com',
-                'http://test1.com',
-                'http://test2.com')
+            'test@mail.com',
+            'http://test1.com',
+            'http://test2.com')
         assert 'test@mail.com is subscribed to http://test1.com' in footer
         assert 'admin can change settings at http://test2.com' in footer
         footer = MailFooter.standard(M.Notification())
         assert 'Sent from sourceforge.net because you indicated interest in' in footer
 
-
     def _subscribe(self, **kw):
         self.pg.subscribe(type='direct', **kw)
         ThreadLocalORMSession.flush_all()
@@ -289,6 +298,7 @@ class TestPostNotifications(unittest.TestCase):
     def _post_notification(self):
         return M.Notification.post(self.pg, 'metadata')
 
+
 class TestSubscriptionTypes(unittest.TestCase):
 
     def setUp(self):
@@ -304,7 +314,7 @@ class TestSubscriptionTypes(unittest.TestCase):
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
         self.pg = WM.Page.query.get(app_config_id=c.app.config._id)
-        M.notification.MAILBOX_QUIESCENT=None # disable message combining
+        M.notification.MAILBOX_QUIESCENT = None  # disable message combining
 
     def test_direct_sub(self):
         self._subscribe()
@@ -316,13 +326,13 @@ class TestSubscriptionTypes(unittest.TestCase):
 
     def test_digest_sub(self):
         self._subscribe(type='digest')
-        self._post_notification(text='x'*1024)
+        self._post_notification(text='x' * 1024)
         self._post_notification()
         M.Mailbox.fire_ready()
 
     def test_summary_sub(self):
         self._subscribe(type='summary')
-        self._post_notification(text='x'*1024)
+        self._post_notification(text='x' * 1024)
         self._post_notification()
         M.Mailbox.fire_ready()
 
@@ -333,8 +343,9 @@ class TestSubscriptionTypes(unittest.TestCase):
         self._test_message()
 
         self.setUp()
-        M.notification.MAILBOX_QUIESCENT=timedelta(minutes=1)
-        # will raise "assert msg is not None" since the new message is not 1 min old:
+        M.notification.MAILBOX_QUIESCENT = timedelta(minutes=1)
+        # will raise "assert msg is not None" since the new message is not 1
+        # min old:
         self.assertRaises(AssertionError, self._test_message)
 
     def _test_message(self):
@@ -347,8 +358,8 @@ class TestSubscriptionTypes(unittest.TestCase):
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
         msg = M.MonQTask.query.get(
-                task_name='allura.tasks.mail_tasks.sendmail',
-                state='ready')
+            task_name='allura.tasks.mail_tasks.sendmail',
+            state='ready')
         assert msg is not None
         assert 'Home@wiki.test.p' in msg.kwargs['reply_to']
         u = M.User.by_username('test-admin')
@@ -371,6 +382,7 @@ class TestSubscriptionTypes(unittest.TestCase):
     @mock.patch('allura.model.notification.Notification')
     def test_direct_accumulation(self, mocked_notification, mocked_defaultdict):
         class OrderedDefaultDict(collections.OrderedDict):
+
             def __init__(self, factory=list, *a, **kw):
                 self._factory = factory
                 super(OrderedDefaultDict, self).__init__(*a, **kw)
@@ -383,24 +395,35 @@ class TestSubscriptionTypes(unittest.TestCase):
                 return value
 
         notifications = mocked_notification.query.find.return_value.all.return_value = [
-                mock.Mock(_id='n0', topic='metadata', subject='s1', from_address='f1', reply_to_address='rt1', author_id='a1'),
-                mock.Mock(_id='n1', topic='metadata', subject='s2', from_address='f2', reply_to_address='rt2', author_id='a2'),
-                mock.Mock(_id='n2', topic='metadata', subject='s2', from_address='f2', reply_to_address='rt2', author_id='a2'),
-                mock.Mock(_id='n3', topic='message', subject='s3', from_address='f3', reply_to_address='rt3', author_id='a3'),
-                mock.Mock(_id='n4', topic='message', subject='s3', from_address='f3', reply_to_address='rt3', author_id='a3'),
-            ]
+            mock.Mock(_id='n0', topic='metadata', subject='s1',
+                      from_address='f1', reply_to_address='rt1', author_id='a1'),
+            mock.Mock(_id='n1', topic='metadata', subject='s2',
+                      from_address='f2', reply_to_address='rt2', author_id='a2'),
+            mock.Mock(_id='n2', topic='metadata', subject='s2',
+                      from_address='f2', reply_to_address='rt2', author_id='a2'),
+            mock.Mock(_id='n3', topic='message', subject='s3',
+                      from_address='f3', reply_to_address='rt3', author_id='a3'),
+            mock.Mock(_id='n4', topic='message', subject='s3',
+                      from_address='f3', reply_to_address='rt3', author_id='a3'),
+        ]
         mocked_defaultdict.side_effect = OrderedDefaultDict
 
         u0 = bson.ObjectId()
-        mbox = M.Mailbox(type='direct', user_id=u0, queue=['n0', 'n1', 'n2', 'n3', 'n4'])
+        mbox = M.Mailbox(type='direct', user_id=u0,
+                         queue=['n0', 'n1', 'n2', 'n3', 'n4'])
         mbox.fire('now')
 
-        mocked_notification.query.find.assert_called_once_with({'_id': {'$in': ['n0', 'n1', 'n2', 'n3', 'n4']}})
-        # first notification should be sent direct, as its key values are unique
+        mocked_notification.query.find.assert_called_once_with(
+            {'_id': {'$in': ['n0', 'n1', 'n2', 'n3', 'n4']}})
+        # first notification should be sent direct, as its key values are
+        # unique
         notifications[0].send_direct.assert_called_once_with(u0)
-        # next two notifications should be sent as a digest as they have matching key values
-        mocked_notification.send_digest.assert_called_once_with(u0, 'f2', 's2', [notifications[1], notifications[2]], 'rt2')
-        # final two should be sent direct even though they matching keys, as they are messages
+        # next two notifications should be sent as a digest as they have
+        # matching key values
+        mocked_notification.send_digest.assert_called_once_with(
+            u0, 'f2', 's2', [notifications[1], notifications[2]], 'rt2')
+        # final two should be sent direct even though they matching keys, as
+        # they are messages
         notifications[3].send_direct.assert_called_once_with(u0)
         notifications[4].send_direct.assert_called_once_with(u0)
 
@@ -433,21 +456,25 @@ class TestSubscriptionTypes(unittest.TestCase):
         user = M.User.by_username('test-admin')
         user.disabled = True
         ThreadLocalORMSession.flush_all()
-        M.Notification.send_digest(user._id, 'test@mail.com', 'subject', [notification])
+        M.Notification.send_digest(
+            user._id, 'test@mail.com', 'subject', [notification])
         count = M.MonQTask.query.find(dict(
             task_name='allura.tasks.mail_tasks.sendmail',
             state='ready')).count()
         assert_equal(count, 0)
         user.disabled = False
         ThreadLocalORMSession.flush_all()
-        M.Notification.send_digest(user._id, 'test@mail.com', 'subject', [notification])
+        M.Notification.send_digest(
+            user._id, 'test@mail.com', 'subject', [notification])
         count = M.MonQTask.query.find(dict(
             task_name='allura.tasks.mail_tasks.sendmail',
             state='ready')).count()
         assert_equal(count, 1)
 
+
 def _clear_subscriptions():
         M.Mailbox.query.remove({})
 
+
 def _clear_notifications():
         M.Notification.query.remove({})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_openid.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_openid.py b/Allura/allura/tests/model/test_openid.py
index 48abe62..056f030 100644
--- a/Allura/allura/tests/model/test_openid.py
+++ b/Allura/allura/tests/model/test_openid.py
@@ -34,6 +34,7 @@ from allura.lib.app_globals import Globals
 from allura import model as M
 from allura.lib import helpers as h
 
+
 def setUp():
     g._push_object(Globals())
     c._push_object(mock.Mock())
@@ -44,14 +45,15 @@ def setUp():
     M.OpenIdAssociation.query.remove({})
     #conn = M.main_doc_session.bind.conn
 
+
 def test_oid_model():
     oid = M.OpenIdAssociation(_id='http://example.com')
     assoc = mock.Mock()
     assoc.handle = 'foo'
-    assoc.serialize = lambda:'bar'
-    assoc.getExpiresIn = lambda:0
+    assoc.serialize = lambda: 'bar'
+    assoc.getExpiresIn = lambda: 0
     with h.push_config(Association,
-                       deserialize=staticmethod(lambda v:assoc)):
+                       deserialize=staticmethod(lambda v: assoc)):
         oid.set_assoc(assoc)
         assert assoc == oid.get_assoc('foo')
         oid.set_assoc(assoc)
@@ -62,14 +64,15 @@ def test_oid_model():
         oid.cleanup_assocs()
         assert oid.get_assoc('foo') is None
 
+
 def test_oid_store():
     assoc = mock.Mock()
     assoc.handle = 'foo'
-    assoc.serialize = lambda:'bar'
-    assoc.getExpiresIn = lambda:0
+    assoc.serialize = lambda: 'bar'
+    assoc.getExpiresIn = lambda: 0
     store = M.OpenIdStore()
     with h.push_config(Association,
-                       deserialize=staticmethod(lambda v:assoc)):
+                       deserialize=staticmethod(lambda v: assoc)):
         store.storeAssociation('http://example.com', assoc)
         assert assoc == store.getAssociation('http://example.com', 'foo')
         assert assoc == store.getAssociation('http://example.com')
@@ -78,6 +81,6 @@ def test_oid_store():
         assert store.useNonce('http://www.example.com', t0, 'abcd')
         ThreadLocalORMSession.flush_all()
         assert not store.useNonce('http://www.example.com', t0, 'abcd')
-        assert not store.useNonce('http://www.example.com', t0-1e9, 'abcd')
+        assert not store.useNonce('http://www.example.com', t0 - 1e9, 'abcd')
         assert store.getAssociation('http://example.com') is None
         store.cleanupNonces()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_project.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_project.py b/Allura/allura/tests/model/test_project.py
index 1005ba8..855432a 100644
--- a/Allura/allura/tests/model/test_project.py
+++ b/Allura/allura/tests/model/test_project.py
@@ -36,10 +36,12 @@ def setUp():
     setup_basic_test()
     setup_with_tools()
 
+
 @td.with_wiki
 def setup_with_tools():
     setup_global_objects()
 
+
 def test_project():
     assert_equals(type(c.project.sidebar_menu()), list)
     assert_in(c.project.script_name, c.project.url())
@@ -51,8 +53,10 @@ def test_project():
     assert_in(old_proj, list(c.project.parent_iter()))
     h.set_context('test', 'wiki', neighborhood='Projects')
     adobe_nbhd = M.Neighborhood.query.get(name='Adobe')
-    p = M.Project.query.get(shortname='adobe-1', neighborhood_id=adobe_nbhd._id)
-    # assert 'http' in p.url() # We moved adobe into /adobe/, not http://adobe....
+    p = M.Project.query.get(
+        shortname='adobe-1', neighborhood_id=adobe_nbhd._id)
+    # assert 'http' in p.url() # We moved adobe into /adobe/, not
+    # http://adobe....
     assert_in(p.script_name, p.url())
     assert_equals(c.project.shortname, 'test')
     assert_in('<p>', c.project.description_html)
@@ -90,11 +94,13 @@ def test_project():
     c.project.breadcrumbs()
     c.app.config.breadcrumbs()
 
+
 def test_subproject():
     project = M.Project.query.get(shortname='test')
     with td.raises(ToolError):
         with patch('allura.lib.plugin.ProjectRegistrationProvider') as Provider:
-            Provider.get().shortname_validator.to_python.side_effect = Invalid('name', 'value', {})
+            Provider.get().shortname_validator.to_python.side_effect = Invalid(
+                'name', 'value', {})
             # name doesn't validate
             sp = project.new_subproject('test-proj-nose')
     sp = project.new_subproject('test-proj-nose')
@@ -103,6 +109,7 @@ def test_subproject():
     sp.delete()
     ThreadLocalORMSession.flush_all()
 
+
 @td.with_wiki
 def test_anchored_tools():
     c.project.neighborhood.anchored_tools = 'wiki:Wiki, tickets:Ticket'
@@ -119,6 +126,7 @@ def test_set_ordinal_to_admin_tool():
         sm = c.project.sitemap()
         assert_equals(sm[-1].tool_name, 'admin')
 
+
 def test_users_and_roles():
     p = M.Project.query.get(shortname='test')
     sub = p.direct_subprojects[0]
@@ -127,6 +135,7 @@ def test_users_and_roles():
     assert p.users_with_role('Admin') == sub.users_with_role('Admin')
     assert p.users_with_role('Admin') == p.admins()
 
+
 def test_project_disabled_users():
     p = M.Project.query.get(shortname='test')
     users = p.users()
@@ -136,5 +145,3 @@ def test_project_disabled_users():
     ThreadLocalORMSession.flush_all()
     users = p.users()
     assert users == []
-
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/model/test_repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/model/test_repo.py b/Allura/allura/tests/model/test_repo.py
index f0bf936..24ab25d 100644
--- a/Allura/allura/tests/model/test_repo.py
+++ b/Allura/allura/tests/model/test_repo.py
@@ -30,6 +30,7 @@ from alluratest.controller import setup_basic_test, setup_global_objects
 from allura import model as M
 from allura.lib import helpers as h
 
+
 class TestGitLikeTree(object):
 
     def test_set_blob(self):
@@ -38,7 +39,8 @@ class TestGitLikeTree(object):
 
         assert_equal(tree.blobs, {})
         assert_equal(tree.get_tree('dir').blobs, {})
-        assert_equal(tree.get_tree('dir').get_tree('dir2').blobs, {'file': 'file-oid'})
+        assert_equal(tree.get_tree('dir').get_tree('dir2')
+                     .blobs, {'file': 'file-oid'})
 
     def test_hex(self):
         tree = M.GitLikeTree()
@@ -46,8 +48,10 @@ class TestGitLikeTree(object):
         hex = tree.hex()
 
         # check the reprs. In case hex (below) fails, this'll be useful
-        assert_equal(repr(tree.get_tree('dir').get_tree('dir2')), 'b file-oid file')
-        assert_equal(repr(tree), 't 96af1772ecce1e6044e6925e595d9373ffcd2615 dir')
+        assert_equal(repr(tree.get_tree('dir').get_tree('dir2')),
+                     'b file-oid file')
+        assert_equal(repr(tree),
+                     't 96af1772ecce1e6044e6925e595d9373ffcd2615 dir')
         # the hex() value shouldn't change, it's an important key
         assert_equal(hex, '4abba29a43411b9b7cecc1a74f0b27920554350d')
 
@@ -96,6 +100,7 @@ class RepoImplTestBase(object):
 
 
 class RepoTestBase(unittest.TestCase):
+
     def setUp(self):
         setup_basic_test()
 
@@ -126,10 +131,12 @@ class RepoTestBase(unittest.TestCase):
 
 
 class TestLastCommit(unittest.TestCase):
+
     def setUp(self):
         setup_basic_test()
         setup_global_objects()
-        self.repo = mock.Mock('repo', _commits=OrderedDict(), _last_commit=None)
+        self.repo = mock.Mock(
+            'repo', _commits=OrderedDict(), _last_commit=None)
         self.repo.shorthand_for_commit = lambda _id: _id[:6]
         self.repo.rev_to_commit_id = lambda rev: rev
         self.repo.log = self._log
@@ -145,44 +152,47 @@ class TestLastCommit(unittest.TestCase):
         tree_nodes = []
         blob_nodes = []
         sub_paths = defaultdict(list)
+
         def n(p):
             m = mock.Mock()
             m.name = p
             return m
         for p in tree_paths:
             if '/' in p:
-                node, sub = p.split('/',1)
+                node, sub = p.split('/', 1)
                 if node not in sub_paths:
                     tree_nodes.append(n(node))
                 sub_paths[node].append(sub)
             else:
                 blob_nodes.append(n(p))
         tree = mock.Mock(
-                commit=commit,
-                path=mock.Mock(return_value=path),
-                tree_ids=tree_nodes,
-                blob_ids=blob_nodes,
-                other_ids=[],
-                repo=self.repo,
-            )
-        tree.get_obj_by_path = lambda p: self._build_tree(commit, p, sub_paths[p])
-        tree.__getitem__ = lambda s, p: self._build_tree(commit, p, sub_paths[p])
+            commit=commit,
+            path=mock.Mock(return_value=path),
+            tree_ids=tree_nodes,
+            blob_ids=blob_nodes,
+            other_ids=[],
+            repo=self.repo,
+        )
+        tree.get_obj_by_path = lambda p: self._build_tree(
+            commit, p, sub_paths[p])
+        tree.__getitem__ = lambda s, p: self._build_tree(
+            commit, p, sub_paths[p])
         return tree
 
     def _add_commit(self, msg, tree_paths, diff_paths=None, parents=[]):
         suser = dict(
-                name='test',
-                email='test@example.com',
-                date=datetime(2013, 1, 1 + len(self.repo._commits)),
-            )
+            name='test',
+            email='test@example.com',
+            date=datetime(2013, 1, 1 + len(self.repo._commits)),
+        )
         commit = M.repo.Commit(
-                _id=str(ObjectId()),
-                message=msg,
-                parent_ids=[parent._id for parent in parents],
-                commited=suser,
-                authored=suser,
-                repo=self.repo,
-            )
+            _id=str(ObjectId()),
+            message=msg,
+            parent_ids=[parent._id for parent in parents],
+            commited=suser,
+            authored=suser,
+            repo=self.repo,
+        )
         commit.tree = self._build_tree(commit, '/', tree_paths)
         commit.get_tree = lambda c: commit.tree
         self._changes[commit._id].extend(diff_paths or tree_paths)
@@ -197,11 +207,12 @@ class TestLastCommit(unittest.TestCase):
 
     def test_single_commit(self):
         commit1 = self._add_commit('Commit 1', [
-                'file1',
-                'dir1/file2',
-            ])
+            'file1',
+            'dir1/file2',
+        ])
         lcd = M.repo.LastCommit.get(commit1.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit1.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit1.message)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 2)
         self.assertEqual(lcd.by_name['file1'], commit1._id)
@@ -209,10 +220,13 @@ class TestLastCommit(unittest.TestCase):
 
     def test_multiple_commits_no_overlap(self):
         commit1 = self._add_commit('Commit 1', ['file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
         lcd = M.repo.LastCommit.get(commit3.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.commit_id, commit3._id)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 3)
@@ -222,10 +236,13 @@ class TestLastCommit(unittest.TestCase):
 
     def test_multiple_commits_with_overlap(self):
         commit1 = self._add_commit('Commit 1', ['file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'file2'], ['file1', 'file2'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'file2'], ['file1', 'file2'], [commit2])
         lcd = M.repo.LastCommit.get(commit3.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 3)
         self.assertEqual(lcd.by_name['file1'], commit3._id)
@@ -234,10 +251,13 @@ class TestLastCommit(unittest.TestCase):
 
     def test_multiple_commits_subdir_change(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
         lcd = M.repo.LastCommit.get(commit3.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 2)
         self.assertEqual(lcd.by_name['file1'], commit1._id)
@@ -245,11 +265,14 @@ class TestLastCommit(unittest.TestCase):
 
     def test_subdir_lcd(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
         tree = self._build_tree(commit3, '/dir1', ['file1', 'file2'])
         lcd = M.repo.LastCommit.get(tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.path, 'dir1')
         self.assertEqual(len(lcd.entries), 2)
         self.assertEqual(lcd.by_name['file1'], commit3._id)
@@ -257,12 +280,16 @@ class TestLastCommit(unittest.TestCase):
 
     def test_subdir_lcd_prev_commit(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
-        commit4 = self._add_commit('Commit 4', ['file1', 'dir1/file1', 'dir1/file2', 'file2'], ['file2'], [commit3])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file1'], [commit2])
+        commit4 = self._add_commit(
+            'Commit 4', ['file1', 'dir1/file1', 'dir1/file2', 'file2'], ['file2'], [commit3])
         tree = self._build_tree(commit4, '/dir1', ['file1', 'file2'])
         lcd = M.repo.LastCommit.get(tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.path, 'dir1')
         self.assertEqual(len(lcd.entries), 2)
         self.assertEqual(lcd.by_name['file1'], commit3._id)
@@ -270,69 +297,80 @@ class TestLastCommit(unittest.TestCase):
 
     def test_subdir_lcd_always_empty(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'file2'], ['file2'], [commit1])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'file2'], ['file2'], [commit1])
         tree = self._build_tree(commit2, '/dir1', [])
         lcd = M.repo.LastCommit.get(tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit1.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit1.message)
         self.assertEqual(lcd.path, 'dir1')
         self.assertEqual(lcd.entries, [])
 
     def test_subdir_lcd_emptied(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
-        commit2 = self._add_commit('Commit 2', ['file1'], ['dir1/file1'], [commit1])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1'], ['dir1/file1'], [commit1])
         tree = self._build_tree(commit2, '/dir1', [])
         lcd = M.repo.LastCommit.get(tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit2.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit2.message)
         self.assertEqual(lcd.path, 'dir1')
         self.assertEqual(lcd.entries, [])
 
     def test_existing_lcd_unchained(self):
         commit1 = self._add_commit('Commit 1', ['file1', 'dir1/file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['file1'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1', 'dir1/file2'], ['dir1/file2'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'dir1/file2'], ['file1'], [commit2])
         prev_lcd = M.repo.LastCommit(
-                path='dir1',
-                commit_id=commit2._id,
-                entries=[
-                    dict(
-                        name='file1',
-                        commit_id=commit1._id),
-                    dict(
-                        name='file2',
-                        commit_id=commit2._id),
-                ],
-            )
+            path='dir1',
+            commit_id=commit2._id,
+            entries=[
+                dict(
+                    name='file1',
+                    commit_id=commit1._id),
+                dict(
+                    name='file2',
+                    commit_id=commit2._id),
+            ],
+        )
         session(prev_lcd).flush()
         tree = self._build_tree(commit3, '/dir1', ['file1', 'file2'])
         lcd = M.repo.LastCommit.get(tree)
         self.assertEqual(lcd._id, prev_lcd._id)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit2.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit2.message)
         self.assertEqual(lcd.path, 'dir1')
         self.assertEqual(lcd.entries, prev_lcd.entries)
 
     def test_existing_lcd_partial(self):
         commit1 = self._add_commit('Commit 1', ['file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'file2'], ['file2'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'file2', 'file3'], ['file3'], [commit2])
-        commit4 = self._add_commit('Commit 4', ['file1', 'file2', 'file3', 'file4'], ['file2', 'file4'], [commit3])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'file2'], ['file2'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'file2', 'file3'], ['file3'], [commit2])
+        commit4 = self._add_commit(
+            'Commit 4', ['file1', 'file2', 'file3', 'file4'], ['file2', 'file4'], [commit3])
         prev_lcd = M.repo.LastCommit(
-                path='',
-                commit_id=commit3._id,
-                entries=[
-                    dict(
-                        name='file1',
-                        commit_id=commit1._id),
-                    dict(
-                        name='file2',
-                        commit_id=commit2._id),
-                    dict(
-                        name='file3',
-                        commit_id=commit3._id),
-                ],
-            )
+            path='',
+            commit_id=commit3._id,
+            entries=[
+                dict(
+                    name='file1',
+                    commit_id=commit1._id),
+                dict(
+                    name='file2',
+                    commit_id=commit2._id),
+                dict(
+                    name='file3',
+                    commit_id=commit3._id),
+            ],
+        )
         session(prev_lcd).flush()
         lcd = M.repo.LastCommit.get(commit4.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit4.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit4.message)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 4)
         self.assertEqual(lcd.by_name['file1'], commit1._id)
@@ -355,11 +393,14 @@ class TestLastCommit(unittest.TestCase):
 
     def test_timeout(self):
         commit1 = self._add_commit('Commit 1', ['file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
         with h.push_config(config, lcd_timeout=-1000):
             lcd = M.repo.LastCommit.get(commit3.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.commit_id, commit3._id)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 1)
@@ -367,12 +408,15 @@ class TestLastCommit(unittest.TestCase):
 
     def test_loop(self):
         commit1 = self._add_commit('Commit 1', ['file1'])
-        commit2 = self._add_commit('Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
-        commit3 = self._add_commit('Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
+        commit2 = self._add_commit(
+            'Commit 2', ['file1', 'dir1/file1'], ['dir1/file1'], [commit1])
+        commit3 = self._add_commit(
+            'Commit 3', ['file1', 'dir1/file1', 'file2'], ['file2'], [commit2])
         commit2.parent_ids = [commit3._id]
         session(commit2).flush(commit2)
         lcd = M.repo.LastCommit.get(commit3.tree)
-        self.assertEqual(self.repo._commits[lcd.commit_id].message, commit3.message)
+        self.assertEqual(
+            self.repo._commits[lcd.commit_id].message, commit3.message)
         self.assertEqual(lcd.commit_id, commit3._id)
         self.assertEqual(lcd.path, '')
         self.assertEqual(len(lcd.entries), 3)
@@ -381,11 +425,13 @@ class TestLastCommit(unittest.TestCase):
 
 
 class TestModelCache(unittest.TestCase):
+
     def setUp(self):
         self.cache = M.repo.ModelCache()
 
     def test_normalize_query(self):
-        self.assertEqual(self.cache._normalize_query({'foo': 1, 'bar': 2}), (('bar', 2), ('foo', 1)))
+        self.assertEqual(self.cache._normalize_query(
+            {'foo': 1, 'bar': 2}), (('bar', 2), ('foo', 1)))
 
     def test_model_query(self):
         q = mock.Mock(spec_set=['query'], query='foo')
@@ -398,8 +444,10 @@ class TestModelCache(unittest.TestCase):
     @mock.patch.object(M.repo.Tree.query, 'get')
     @mock.patch.object(M.repo.LastCommit.query, 'get')
     def test_get(self, lc_get, tr_get):
-        tree = tr_get.return_value = mock.Mock(spec=['_id', 'val'], _id='foo', val='bar')
-        lcd = lc_get.return_value = mock.Mock(spec=['_id', 'val'], _id='foo', val='qux')
+        tree = tr_get.return_value = mock.Mock(
+            spec=['_id', 'val'], _id='foo', val='bar')
+        lcd = lc_get.return_value = mock.Mock(
+            spec=['_id', 'val'], _id='foo', val='qux')
 
         val = self.cache.get(M.repo.Tree, {'_id': 'foo'})
         tr_get.assert_called_with(_id='foo')
@@ -411,7 +459,8 @@ class TestModelCache(unittest.TestCase):
 
     @mock.patch.object(M.repo.Tree.query, 'get')
     def test_get_no_query(self, tr_get):
-        tree1 = tr_get.return_value = mock.Mock(spec=['_id', 'val'], _id='foo', val='bar')
+        tree1 = tr_get.return_value = mock.Mock(
+            spec=['_id', 'val'], _id='foo', val='bar')
         val = self.cache.get(M.repo.Tree, {'_id': 'foo'})
         tr_get.assert_called_once_with(_id='foo')
         self.assertEqual(val, tree1)
@@ -423,7 +472,8 @@ class TestModelCache(unittest.TestCase):
 
     @mock.patch.object(M.repo.TreesDoc.m, 'get')
     def test_get_doc(self, tr_get):
-        trees = tr_get.return_value = mock.Mock(spec=['_id', 'val'], _id='foo', val='bar')
+        trees = tr_get.return_value = mock.Mock(
+            spec=['_id', 'val'], _id='foo', val='bar')
         val = self.cache.get(M.repo.TreesDoc, {'_id': 'foo'})
         tr_get.assert_called_once_with(_id='foo')
         self.assertEqual(val, trees)
@@ -431,8 +481,10 @@ class TestModelCache(unittest.TestCase):
     def test_set(self):
         tree = mock.Mock(spec=['_id', 'test_set'], _id='foo', val='test_set')
         self.cache.set(M.repo.Tree, {'val': 'test_set'}, tree)
-        self.assertEqual(self.cache._query_cache, {M.repo.Tree: {(('val', 'test_set'),): 'foo'}})
-        self.assertEqual(self.cache._instance_cache, {M.repo.Tree: {'foo': tree}})
+        self.assertEqual(self.cache._query_cache,
+                         {M.repo.Tree: {(('val', 'test_set'),): 'foo'}})
+        self.assertEqual(self.cache._instance_cache,
+                         {M.repo.Tree: {'foo': tree}})
 
     @mock.patch('bson.ObjectId')
     def test_set_none_id(self, obj_id):
@@ -441,16 +493,21 @@ class TestModelCache(unittest.TestCase):
         self.cache.set(M.repo.Tree, {'val1': 'test_set1'}, tree)
         self.cache.set(M.repo.Tree, {'val2': 'test_set2'}, tree)
         self.assertEqual(dict(self.cache._query_cache[M.repo.Tree]), {
-                (('val1', 'test_set1'),): 'OBJID',
-                (('val2', 'test_set2'),): 'OBJID',
-            })
-        self.assertEqual(self.cache._instance_cache, {M.repo.Tree: {'OBJID': tree}})
+            (('val1', 'test_set1'),): 'OBJID',
+            (('val2', 'test_set2'),): 'OBJID',
+        })
+        self.assertEqual(self.cache._instance_cache,
+                         {M.repo.Tree: {'OBJID': tree}})
         tree._id = '_id'
-        self.assertEqual(self.cache.get(M.repo.Tree, {'val1': 'test_set1'}), tree)
-        self.assertEqual(self.cache.get(M.repo.Tree, {'val2': 'test_set2'}), tree)
+        self.assertEqual(
+            self.cache.get(M.repo.Tree, {'val1': 'test_set1'}), tree)
+        self.assertEqual(
+            self.cache.get(M.repo.Tree, {'val2': 'test_set2'}), tree)
         self.cache.set(M.repo.Tree, {'val1': 'test_set2'}, tree)
-        self.assertEqual(self.cache.get(M.repo.Tree, {'val1': 'test_set1'}), tree)
-        self.assertEqual(self.cache.get(M.repo.Tree, {'val2': 'test_set2'}), tree)
+        self.assertEqual(
+            self.cache.get(M.repo.Tree, {'val1': 'test_set1'}), tree)
+        self.assertEqual(
+            self.cache.get(M.repo.Tree, {'val2': 'test_set2'}), tree)
 
     @mock.patch('bson.ObjectId')
     def test_set_none_val(self, obj_id):
@@ -458,29 +515,31 @@ class TestModelCache(unittest.TestCase):
         self.cache.set(M.repo.Tree, {'val1': 'test_set1'}, None)
         self.cache.set(M.repo.Tree, {'val2': 'test_set2'}, None)
         self.assertEqual(dict(self.cache._query_cache[M.repo.Tree]), {
-                (('val1', 'test_set1'),): None,
-                (('val2', 'test_set2'),): None,
-            })
+            (('val1', 'test_set1'),): None,
+            (('val2', 'test_set2'),): None,
+        })
         self.assertEqual(dict(self.cache._instance_cache[M.repo.Tree]), {})
         tree1 = mock.Mock(spec=['_id', 'val'], _id='tree1', val='test_set')
-        tree2 = mock.Mock(spec=['_model_cache_id', '_id', 'val'], _model_cache_id='tree2', _id='tree1', val='test_set2')
+        tree2 = mock.Mock(spec=['_model_cache_id', '_id', 'val'],
+                          _model_cache_id='tree2', _id='tree1', val='test_set2')
         self.cache.set(M.repo.Tree, {'val1': 'test_set1'}, tree1)
         self.cache.set(M.repo.Tree, {'val2': 'test_set2'}, tree2)
         self.assertEqual(dict(self.cache._query_cache[M.repo.Tree]), {
-                (('val1', 'test_set1'),): 'tree1',
-                (('val2', 'test_set2'),): 'tree2',
-            })
+            (('val1', 'test_set1'),): 'tree1',
+            (('val2', 'test_set2'),): 'tree2',
+        })
         self.assertEqual(dict(self.cache._instance_cache[M.repo.Tree]), {
-                'tree1': tree1,
-                'tree2': tree2,
-            })
+            'tree1': tree1,
+            'tree2': tree2,
+        })
 
     def test_instance_ids(self):
         tree1 = mock.Mock(spec=['_id', 'val'], _id='id1', val='tree1')
         tree2 = mock.Mock(spec=['_id', 'val'], _id='id2', val='tree2')
         self.cache.set(M.repo.Tree, {'val': 'tree1'}, tree1)
         self.cache.set(M.repo.Tree, {'val': 'tree2'}, tree2)
-        self.assertEqual(set(self.cache.instance_ids(M.repo.Tree)), set(['id1', 'id2']))
+        self.assertEqual(set(self.cache.instance_ids(M.repo.Tree)),
+                         set(['id1', 'id2']))
         self.assertEqual(self.cache.instance_ids(M.repo.LastCommit), [])
 
     @mock.patch.object(M.repo.Tree.query, 'find')
@@ -493,13 +552,13 @@ class TestModelCache(unittest.TestCase):
         self.cache.batch_load(M.repo.Tree, {'foo': {'$in': 'bar'}})
         tr_find.assert_called_with({'foo': {'$in': 'bar'}})
         self.assertEqual(self.cache._query_cache[M.repo.Tree], {
-                (('foo', 1),): 'id1',
-                (('foo', 2),): 'id2',
-            })
+            (('foo', 1),): 'id1',
+            (('foo', 2),): 'id2',
+        })
         self.assertEqual(self.cache._instance_cache[M.repo.Tree], {
-                'id1': m1,
-                'id2': m2,
-            })
+            'id1': m1,
+            'id2': m2,
+        })
 
     @mock.patch.object(M.repo.Tree.query, 'find')
     def test_batch_load_attrs(self, tr_find):
@@ -511,13 +570,13 @@ class TestModelCache(unittest.TestCase):
         self.cache.batch_load(M.repo.Tree, {'foo': {'$in': 'bar'}}, ['qux'])
         tr_find.assert_called_with({'foo': {'$in': 'bar'}})
         self.assertEqual(self.cache._query_cache[M.repo.Tree], {
-                (('qux', 3),): 'id1',
-                (('qux', 5),): 'id2',
-            })
+            (('qux', 3),): 'id1',
+            (('qux', 5),): 'id2',
+        })
         self.assertEqual(self.cache._instance_cache[M.repo.Tree], {
-                'id1': m1,
-                'id2': m2,
-            })
+            'id1': m1,
+            'id2': m2,
+        })
 
     def test_pruning(self):
         cache = M.repo.ModelCache(max_queries=3, max_instances=2)
@@ -533,18 +592,18 @@ class TestModelCache(unittest.TestCase):
         cache.get(M.repo.Tree, {'_id': 'f00'})
         cache.set(M.repo.Tree, {'val': 'b4r'}, tree3)
         self.assertEqual(cache._query_cache, {
-                M.repo.Tree: {
-                    (('_id', 'foo'),): 'foo',
-                    (('_id', 'f00'),): 'f00',
-                    (('val', 'b4r'),): 'f00',
-                },
-            })
+            M.repo.Tree: {
+                (('_id', 'foo'),): 'foo',
+                (('_id', 'f00'),): 'f00',
+                (('val', 'b4r'),): 'f00',
+            },
+        })
         self.assertEqual(cache._instance_cache, {
-                M.repo.Tree: {
-                    'f00': tree3,
-                    'foo': tree4,
-                },
-            })
+            M.repo.Tree: {
+                'f00': tree3,
+                'foo': tree4,
+            },
+        })
 
     def test_pruning_query_vs_instance(self):
         cache = M.repo.ModelCache(max_queries=3, max_instances=2)
@@ -555,17 +614,19 @@ class TestModelCache(unittest.TestCase):
         tree4 = mock.Mock(spec=['_id', '_val'], _id='tree4', val='zaz')
         cache.set(M.repo.Tree, {'keep_query_1': 'bar'}, tree1)
         cache.set(M.repo.Tree, {'drop_query_1': 'bar'}, tree2)
-        cache.set(M.repo.Tree, {'keep_query_2': 'bar'}, tree1)  # should refresh tree1 in _instance_cache
-        cache.set(M.repo.Tree, {'drop_query_2': 'bar'}, tree3)  # should drop tree2, not tree1, from _instance_cache
+        # should refresh tree1 in _instance_cache
+        cache.set(M.repo.Tree, {'keep_query_2': 'bar'}, tree1)
+        # should drop tree2, not tree1, from _instance_cache
+        cache.set(M.repo.Tree, {'drop_query_2': 'bar'}, tree3)
         self.assertEqual(cache._query_cache[M.repo.Tree], {
-                (('drop_query_1', 'bar'),): 'tree2',
-                (('keep_query_2', 'bar'),): 'keep',
-                (('drop_query_2', 'bar'),): 'tree3',
-            })
+            (('drop_query_1', 'bar'),): 'tree2',
+            (('keep_query_2', 'bar'),): 'keep',
+            (('drop_query_2', 'bar'),): 'tree3',
+        })
         self.assertEqual(cache._instance_cache[M.repo.Tree], {
-                'keep': tree1,
-                'tree3': tree3,
-            })
+            'keep': tree1,
+            'tree3': tree3,
+        })
 
     @mock.patch('bson.ObjectId')
     def test_pruning_no_id(self, obj_id):
@@ -577,13 +638,13 @@ class TestModelCache(unittest.TestCase):
         cache.set(M.repo.Tree, {'query_2': 'bar'}, tree1)
         cache.set(M.repo.Tree, {'query_3': 'bar'}, tree1)
         self.assertEqual(cache._instance_cache[M.repo.Tree], {
-                'id1': tree1,
-            })
+            'id1': tree1,
+        })
         self.assertEqual(cache._query_cache[M.repo.Tree], {
-                (('query_1', 'bar'),): 'id1',
-                (('query_2', 'bar'),): 'id1',
-                (('query_3', 'bar'),): 'id1',
-            })
+            (('query_1', 'bar'),): 'id1',
+            (('query_2', 'bar'),): 'id1',
+            (('query_3', 'bar'),): 'id1',
+        })
 
     @mock.patch('bson.ObjectId')
     def test_pruning_none(self, obj_id):
@@ -595,18 +656,20 @@ class TestModelCache(unittest.TestCase):
         cache.set(M.repo.Tree, {'query_3': 'bar'}, None)
         self.assertEqual(cache._instance_cache[M.repo.Tree], {})
         self.assertEqual(cache._query_cache[M.repo.Tree], {
-                (('query_1', 'bar'),): None,
-                (('query_2', 'bar'),): None,
-                (('query_3', 'bar'),): None,
-            })
+            (('query_1', 'bar'),): None,
+            (('query_2', 'bar'),): None,
+            (('query_3', 'bar'),): None,
+        })
 
     @mock.patch('allura.model.repo.session')
     @mock.patch.object(M.repo.Tree.query, 'get')
     def test_pruning_query_flush(self, tr_get, session):
         cache = M.repo.ModelCache(max_queries=3, max_instances=2)
         # ensure cache doesn't store None instances
-        tree1 = mock.Mock(name='tree1', spec=['_id', '_val'], _id='tree1', val='bar')
-        tree2 = mock.Mock(name='tree2', spec=['_id', '_val'], _id='tree2', val='fuz')
+        tree1 = mock.Mock(name='tree1',
+                          spec=['_id', '_val'], _id='tree1', val='bar')
+        tree2 = mock.Mock(name='tree2',
+                          spec=['_id', '_val'], _id='tree2', val='fuz')
         tr_get.return_value = tree2
         cache.set(M.repo.Tree, {'_id': 'tree1'}, tree1)
         cache.set(M.repo.Tree, {'_id': 'tree2'}, tree2)
@@ -614,16 +677,18 @@ class TestModelCache(unittest.TestCase):
         cache.get(M.repo.Tree, {'query_2': 'tree2'})
         cache.get(M.repo.Tree, {'query_3': 'tree2'})
         self.assertEqual(cache._query_cache[M.repo.Tree], {
-                (('query_1', 'tree2'),): 'tree2',
-                (('query_2', 'tree2'),): 'tree2',
-                (('query_3', 'tree2'),): 'tree2',
-            })
+            (('query_1', 'tree2'),): 'tree2',
+            (('query_2', 'tree2'),): 'tree2',
+            (('query_3', 'tree2'),): 'tree2',
+        })
         self.assertEqual(cache._instance_cache[M.repo.Tree], {
-                'tree1': tree1,
-                'tree2': tree2,
-            })
-        self.assertEqual(session.call_args_list, [mock.call(tree1), mock.call(tree2)])
-        self.assertEqual(session.return_value.flush.call_args_list, [mock.call(tree1), mock.call(tree2)])
+            'tree1': tree1,
+            'tree2': tree2,
+        })
+        self.assertEqual(session.call_args_list,
+                         [mock.call(tree1), mock.call(tree2)])
+        self.assertEqual(session.return_value.flush.call_args_list,
+                         [mock.call(tree1), mock.call(tree2)])
         assert not session.return_value.expunge.called
 
     @mock.patch('allura.model.repo.session')
@@ -637,14 +702,14 @@ class TestModelCache(unittest.TestCase):
         cache.set(M.repo.Tree, {'_id': 'tree2'}, tree2)
         cache.set(M.repo.Tree, {'_id': 'tree3'}, tree3)
         self.assertEqual(cache._query_cache[M.repo.Tree], {
-                (('_id', 'tree1'),): 'tree1',
-                (('_id', 'tree2'),): 'tree2',
-                (('_id', 'tree3'),): 'tree3',
-            })
+            (('_id', 'tree1'),): 'tree1',
+            (('_id', 'tree2'),): 'tree2',
+            (('_id', 'tree3'),): 'tree3',
+        })
         self.assertEqual(cache._instance_cache[M.repo.Tree], {
-                'tree2': tree2,
-                'tree3': tree3,
-            })
+            'tree2': tree2,
+            'tree3': tree3,
+        })
         session.assert_called_once_with(tree1)
         session.return_value.flush.assert_called_once_with(tree1)
         session.return_value.expunge.assert_called_once_with(tree1)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_app.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_app.py b/Allura/allura/tests/test_app.py
index 30c85c0..9389238 100644
--- a/Allura/allura/tests/test_app.py
+++ b/Allura/allura/tests/test_app.py
@@ -26,6 +26,7 @@ from allura import app
 from allura.lib.app_globals import Globals
 from allura import model as M
 
+
 def setUp():
     g._push_object(Globals())
     c._push_object(mock.Mock())
@@ -39,14 +40,15 @@ def setUp():
     app_config._id = None
     app_config.project_id = 'testproject/'
     app_config.tool_name = 'tool'
-    app_config.options = Object(mount_point = 'foo')
+    app_config.options = Object(mount_point='foo')
     c.app = mock.Mock()
     c.app.config = app_config
-    c.app.config.script_name = lambda:'/testproject/test_application/'
-    c.app.config.url = lambda:'http://testproject/test_application/'
+    c.app.config.script_name = lambda: '/testproject/test_application/'
+    c.app.config.url = lambda: 'http://testproject/test_application/'
     c.app.url = c.app.config.url()
     c.app.__version__ = '0.0'
 
+
 def test_config_options():
     options = [
         app.ConfigOption('test1', str, 'MyTestValue'),
@@ -54,6 +56,7 @@ def test_config_options():
     assert options[0].default == 'MyTestValue'
     assert options[1].default == 'MyTestValue'
 
+
 def test_sitemap():
     sm = app.SitemapEntry('test', '')[
         app.SitemapEntry('a', 'a/'),
@@ -61,9 +64,9 @@ def test_sitemap():
     sm[app.SitemapEntry(lambda app:app.config.script_name(), 'c/')]
     bound_sm = sm.bind_app(c.app)
     assert bound_sm.url == 'http://testproject/test_application/', bound_sm.url
-    assert bound_sm.children[-1].label == '/testproject/test_application/', bound_sm.children[-1].label
+    assert bound_sm.children[
+        -1].label == '/testproject/test_application/', bound_sm.children[-1].label
     assert len(sm.children) == 3
     sm.extend([app.SitemapEntry('a', 'a/')[
-                app.SitemapEntry('d', 'd/')]])
+        app.SitemapEntry('d', 'd/')]])
     assert len(sm.children) == 3
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_commands.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_commands.py b/Allura/allura/tests/test_commands.py
index 5187d14..4dda1be 100644
--- a/Allura/allura/tests/test_commands.py
+++ b/Allura/allura/tests/test_commands.py
@@ -24,32 +24,40 @@ import pkg_resources
 
 from alluratest.controller import setup_basic_test, setup_global_objects
 from allura.command import base, script, set_neighborhood_features, \
-                           create_neighborhood, show_models, taskd_cleanup
+    create_neighborhood, show_models, taskd_cleanup
 from allura import model as M
 from forgeblog import model as BM
 from allura.lib.exceptions import InvalidNBFeatureValueError
 from allura.tests import decorators as td
 
-test_config = pkg_resources.resource_filename('allura', '../test.ini') + '#main'
+test_config = pkg_resources.resource_filename(
+    'allura', '../test.ini') + '#main'
 
 
-class EmptyClass(object): pass
+class EmptyClass(object):
+    pass
+
 
 def setUp(self):
     """Method called by nose before running each test"""
-    #setup_basic_test(app_name='main_with_amqp')
+    # setup_basic_test(app_name='main_with_amqp')
     setup_basic_test()
     setup_global_objects()
 
+
 def test_script():
     cmd = script.ScriptCommand('script')
-    cmd.run([test_config, pkg_resources.resource_filename('allura', 'tests/tscript.py') ])
-    assert_raises(ValueError, cmd.run, [test_config, pkg_resources.resource_filename('allura','tests/tscript_error.py') ])
+    cmd.run(
+        [test_config, pkg_resources.resource_filename('allura', 'tests/tscript.py')])
+    assert_raises(ValueError, cmd.run,
+                  [test_config, pkg_resources.resource_filename('allura', 'tests/tscript_error.py')])
+
 
 def test_set_neighborhood_max_projects():
     neighborhood = M.Neighborhood.query.find().first()
     n_id = neighborhood._id
-    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand('setnbfeatures')
+    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand(
+        'setnbfeatures')
 
     # a valid number
     cmd.run([test_config, str(n_id), 'max_projects', '50'])
@@ -62,13 +70,17 @@ def test_set_neighborhood_max_projects():
     assert neighborhood.features['max_projects'] == None
 
     # check validation
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'max_projects', 'string'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'max_projects', '2.8'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'max_projects', 'string'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'max_projects', '2.8'])
+
 
 def test_set_neighborhood_private():
     neighborhood = M.Neighborhood.query.find().first()
     n_id = neighborhood._id
-    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand('setnbfeatures')
+    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand(
+        'setnbfeatures')
 
     # allow private projects
     cmd.run([test_config, str(n_id), 'private_projects', 'True'])
@@ -81,14 +93,19 @@ def test_set_neighborhood_private():
     assert not neighborhood.features['private_projects']
 
     # check validation
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'private_projects', 'string'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'private_projects', '1'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'private_projects', '2.8'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'private_projects', 'string'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'private_projects', '1'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'private_projects', '2.8'])
+
 
 def test_set_neighborhood_google_analytics():
     neighborhood = M.Neighborhood.query.find().first()
     n_id = neighborhood._id
-    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand('setnbfeatures')
+    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand(
+        'setnbfeatures')
 
     # allow private projects
     cmd.run([test_config, str(n_id), 'google_analytics', 'True'])
@@ -101,14 +118,19 @@ def test_set_neighborhood_google_analytics():
     assert not neighborhood.features['google_analytics']
 
     # check validation
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'google_analytics', 'string'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'google_analytics', '1'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'google_analytics', '2.8'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'google_analytics', 'string'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'google_analytics', '1'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'google_analytics', '2.8'])
+
 
 def test_set_neighborhood_css():
     neighborhood = M.Neighborhood.query.find().first()
     n_id = neighborhood._id
-    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand('setnbfeatures')
+    cmd = set_neighborhood_features.SetNeighborhoodFeaturesCommand(
+        'setnbfeatures')
 
     # none
     cmd.run([test_config, str(n_id), 'css', 'none'])
@@ -126,22 +148,30 @@ def test_set_neighborhood_css():
     assert neighborhood.features['css'] == 'custom'
 
     # check validation
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'css', 'string'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'css', '1'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'css', '2.8'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'css', 'None'])
-    assert_raises(InvalidNBFeatureValueError, cmd.run, [test_config, str(n_id), 'css', 'True'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'css', 'string'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'css', '1'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'css', '2.8'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'css', 'None'])
+    assert_raises(InvalidNBFeatureValueError, cmd.run,
+                  [test_config, str(n_id), 'css', 'True'])
+
 
 def test_update_neighborhood():
     cmd = create_neighborhood.UpdateNeighborhoodCommand('update-neighborhood')
     cmd.run([test_config, 'Projects', 'True'])
-    ThreadLocalORMSession.close_all() # make sure the app_configs get freshly queried
+    # make sure the app_configs get freshly queried
+    ThreadLocalORMSession.close_all()
     nb = M.Neighborhood.query.get(name='Projects')
     assert nb.has_home_tool == True
 
     cmd = create_neighborhood.UpdateNeighborhoodCommand('update-neighborhood')
     cmd.run([test_config, 'Projects', 'False'])
-    ThreadLocalORMSession.close_all() # make sure the app_configs get freshly queried
+    # make sure the app_configs get freshly queried
+    ThreadLocalORMSession.close_all()
     nb = M.Neighborhood.query.get(name='Projects')
     assert nb.has_home_tool == False
 
@@ -155,12 +185,12 @@ class TestEnsureIndexCommand(object):
     def test_update_indexes_order(self):
         collection = Mock(name='collection')
         collection.index_information.return_value = {
-                '_id_': {'key': '_id'},
-                '_foo_bar': {'key': [('foo', 1), ('bar', 1)]},
-                }
+            '_id_': {'key': '_id'},
+            '_foo_bar': {'key': [('foo', 1), ('bar', 1)]},
+        }
         indexes = [
-                Mock(unique=False, index_spec=[('foo', 1)]),
-                ]
+            Mock(unique=False, index_spec=[('foo', 1)]),
+        ]
         cmd = show_models.EnsureIndexCommand('ensure_index')
         cmd._update_indexes(collection, indexes)
 
@@ -168,39 +198,43 @@ class TestEnsureIndexCommand(object):
         for i, call in enumerate(collection.mock_calls):
             method_name = call[0]
             collection_call_order[method_name] = i
-        assert collection_call_order['ensure_index'] < collection_call_order['drop_index'], collection.mock_calls
+        assert collection_call_order['ensure_index'] < collection_call_order[
+            'drop_index'], collection.mock_calls
 
     def test_update_indexes_unique_changes(self):
         collection = Mock(name='collection')
         # expecting these ensure_index calls, we'll make their return values normal
         # for easier assertions later
-        collection.ensure_index.side_effect = ['_foo_bar_temporary_extra_field_for_indexing',
-                                               '_foo_bar',
-                                               '_foo_baz_temporary_extra_field_for_indexing',
-                                               '_foo_baz',
-                                               '_foo_baz',
-                                               '_foo_bar',
-                                               ]
+        collection.ensure_index.side_effect = [
+            '_foo_bar_temporary_extra_field_for_indexing',
+            '_foo_bar',
+            '_foo_baz_temporary_extra_field_for_indexing',
+            '_foo_baz',
+            '_foo_baz',
+            '_foo_bar',
+        ]
         collection.index_information.return_value = {
-                '_id_': {'key': '_id'},
-                '_foo_bar': {'key': [('foo', 1), ('bar', 1)], 'unique': True},
-                '_foo_baz': {'key': [('foo', 1), ('baz', 1)]},
-                }
+            '_id_': {'key': '_id'},
+            '_foo_bar': {'key': [('foo', 1), ('bar', 1)], 'unique': True},
+            '_foo_baz': {'key': [('foo', 1), ('baz', 1)]},
+        }
         indexes = [
-                Mock(index_spec=[('foo', 1), ('bar', 1)], unique=False, ),
-                Mock(index_spec=[('foo', 1), ('baz', 1)], unique=True, ),
-                ]
+            Mock(index_spec=[('foo', 1), ('bar', 1)], unique=False, ),
+            Mock(index_spec=[('foo', 1), ('baz', 1)], unique=True, ),
+        ]
 
         cmd = show_models.EnsureIndexCommand('ensure_index')
         cmd._update_indexes(collection, indexes)
 
         assert_equal(collection.mock_calls, [
             call.index_information(),
-            call.ensure_index([('foo', 1), ('bar', 1), ('temporary_extra_field_for_indexing', 1)]),
+            call.ensure_index(
+                [('foo', 1), ('bar', 1), ('temporary_extra_field_for_indexing', 1)]),
             call.drop_index('_foo_bar'),
             call.ensure_index([('foo', 1), ('bar', 1)], unique=False),
             call.drop_index('_foo_bar_temporary_extra_field_for_indexing'),
-            call.ensure_index([('foo', 1), ('baz', 1), ('temporary_extra_field_for_indexing', 1)]),
+            call.ensure_index(
+                [('foo', 1), ('baz', 1), ('temporary_extra_field_for_indexing', 1)]),
             call.drop_index('_foo_baz'),
             call.ensure_index([('foo', 1), ('baz', 1)], unique=True),
             call.drop_index('_foo_baz_temporary_extra_field_for_indexing'),
@@ -292,7 +326,8 @@ class TestTaskdCleanupCommand(object):
         assert task1.result == 'Forsaken task'
 
         # task1 seems lost, but it just moved quickly
-        task1 = Mock(state='complete', process='host pid 1111', result='', _id=1)
+        task1 = Mock(state='complete',
+                     process='host pid 1111', result='', _id=1)
         task2 = Mock(state='busy', process='host pid 1111', result='', _id=2)
         self.cmd_class._complete_suspicious_tasks = lambda x: [1]
         self.cmd_class._busy_tasks = lambda x: [task1, task2]
@@ -341,7 +376,8 @@ class TestBackgroundCommand(object):
     def test_run_command(self, command):
         command.__name__ = 'ReindexCommand'
         base.run_command(self.cmd, 'dev.ini -p "project 3"')
-        command(command.__name__).run.assert_called_with(['dev.ini', '-p', 'project 3'])
+        command(command.__name__).run.assert_called_with(
+            ['dev.ini', '-p', 'project 3'])
 
     def test_invalid_args(self):
         M.MonQTask.query.remove()
@@ -377,9 +413,11 @@ class TestReindexCommand(object):
             '-p', 'test', '--solr', '--solr-hosts=http://blah.com/solr/forge,https://other.net/solr/forge'])
         cmd._chunked_add_artifacts(list(range(10)))
         # check constructors of first and second Solr() instantiations
-        assert_equal(set([Solr.call_args_list[0][0][0], Solr.call_args_list[1][0][0]]),
-                     set(['http://blah.com/solr/forge', 'https://other.net/solr/forge'])
-                     )
+        assert_equal(
+            set([Solr.call_args_list[0][0][0], Solr.call_args_list[1][0][0]]),
+            set(['http://blah.com/solr/forge',
+                 'https://other.net/solr/forge'])
+        )
 
     @patch('allura.command.show_models.utils')
     def test_project_regex(self, utils):
@@ -391,12 +429,14 @@ class TestReindexCommand(object):
     @patch('allura.command.show_models.add_artifacts')
     def test_chunked_add_artifacts(self, add_artifacts):
         cmd = show_models.ReindexCommand('reindex')
-        cmd.options = Mock(tasks=True, max_chunk=10*1000, ming_config=None)
+        cmd.options = Mock(tasks=True, max_chunk=10 * 1000, ming_config=None)
         ref_ids = list(range(10 * 1000 * 2 + 20))
         cmd._chunked_add_artifacts(ref_ids)
         assert_equal(len(add_artifacts.post.call_args_list), 3)
-        assert_equal(len(add_artifacts.post.call_args_list[0][0][0]), 10 * 1000)
-        assert_equal(len(add_artifacts.post.call_args_list[1][0][0]), 10 * 1000)
+        assert_equal(
+            len(add_artifacts.post.call_args_list[0][0][0]), 10 * 1000)
+        assert_equal(
+            len(add_artifacts.post.call_args_list[1][0][0]), 10 * 1000)
         assert_equal(len(add_artifacts.post.call_args_list[2][0][0]), 20)
 
     @patch('allura.command.show_models.add_artifacts')
@@ -404,7 +444,7 @@ class TestReindexCommand(object):
         def on_post(chunk, **kw):
             if len(chunk) > 1:
                 raise pymongo.errors.InvalidDocument(
-                        "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
+                    "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
         add_artifacts.post.side_effect = on_post
         cmd = show_models.ReindexCommand('reindex')
         cmd.options, args = cmd.parser.parse_args([])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/test_decorators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/test_decorators.py b/Allura/allura/tests/test_decorators.py
index d4b70a5..0d1d338 100644
--- a/Allura/allura/tests/test_decorators.py
+++ b/Allura/allura/tests/test_decorators.py
@@ -42,6 +42,7 @@ class TestTask(TestCase):
         @task(disable_notifications=True)
         def func(s, foo=None, **kw):
             pass
+
         def mock_post(f, args, kw, delay=None):
             self.assertTrue(c.project.notifications_disabled)
             self.assertFalse('delay' in kw)


[25/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/form_fields.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/form_fields.py b/Allura/allura/lib/widgets/form_fields.py
index f59b5fd..e8d9a27 100644
--- a/Allura/allura/lib/widgets/form_fields.py
+++ b/Allura/allura/lib/widgets/form_fields.py
@@ -28,8 +28,10 @@ import ew.jinja2_ew as ew
 
 log = logging.getLogger(__name__)
 
+
 def onready(text):
-    return ew.JSScript('$(function () {%s});' % text);
+    return ew.JSScript('$(function () {%s});' % text)
+
 
 class LabelList(fev.UnicodeString):
 
@@ -46,10 +48,11 @@ class LabelList(fev.UnicodeString):
         value = super(LabelList, self)._from_python(value, state)
         return value
 
+
 class LabelEdit(ew.InputField):
-    template='jinja:allura:templates/widgets/label_edit.html'
+    template = 'jinja:allura:templates/widgets/label_edit.html'
     validator = LabelList(if_empty=[])
-    defaults=dict(
+    defaults = dict(
         ew.InputField.defaults,
         name=None,
         value=None,
@@ -90,9 +93,10 @@ class LabelEdit(ew.InputField):
             });
         ''' % dict(url=c.app.url))
 
+
 class ProjectUserSelect(ew.InputField):
-    template='jinja:allura:templates/widgets/project_user_select.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/project_user_select.html'
+    defaults = dict(
         ew.InputField.defaults,
         name=None,
         value=None,
@@ -102,13 +106,14 @@ class ProjectUserSelect(ew.InputField):
     def __init__(self, **kw):
         super(ProjectUserSelect, self).__init__(**kw)
         if not isinstance(self.value, list):
-            self.value=[self.value]
+            self.value = [self.value]
 
     def from_python(self, value, state=None):
         return value
 
     def resources(self):
-        for r in super(ProjectUserSelect, self).resources(): yield r
+        for r in super(ProjectUserSelect, self).resources():
+            yield r
         yield ew.CSSLink('css/autocomplete.css')
         yield onready('''
           $('input.project_user_select').autocomplete({
@@ -153,8 +158,8 @@ class ProjectUserCombo(ew.SingleSelectField):
 
 
 class NeighborhoodProjectSelect(ew.InputField):
-    template='jinja:allura:templates/widgets/neighborhood_project_select.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/neighborhood_project_select.html'
+    defaults = dict(
         ew.InputField.defaults,
         name=None,
         value=None,
@@ -164,14 +169,15 @@ class NeighborhoodProjectSelect(ew.InputField):
     def __init__(self, url, **kw):
         super(NeighborhoodProjectSelect, self).__init__(**kw)
         if not isinstance(self.value, list):
-            self.value=[self.value]
+            self.value = [self.value]
         self.url = url
 
     def from_python(self, value, state=None):
         return value
 
     def resources(self):
-        for r in super(NeighborhoodProjectSelect, self).resources(): yield r
+        for r in super(NeighborhoodProjectSelect, self).resources():
+            yield r
         yield ew.CSSLink('css/autocomplete.css')
         yield onready('''
           $('input.neighborhood-project-select').autocomplete({
@@ -190,22 +196,25 @@ class NeighborhoodProjectSelect(ew.InputField):
             minLength: 3
           });''' % self.url)
 
+
 class AttachmentList(ew_core.Widget):
-    template='jinja:allura:templates/widgets/attachment_list.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/attachment_list.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         attachments=None,
         edit_mode=None)
 
+
 class AttachmentAdd(ew_core.Widget):
-    template='jinja:allura:templates/widgets/attachment_add.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/attachment_add.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         action=None,
         name=None)
 
     def resources(self):
-        for r in super(AttachmentAdd, self).resources(): yield r
+        for r in super(AttachmentAdd, self).resources():
+            yield r
         yield onready('''
             $(".attachment_form_add_button").click(function (evt) {
                 $(this).hide();
@@ -214,12 +223,13 @@ class AttachmentAdd(ew_core.Widget):
             });
          ''')
 
+
 class SubmitButton(ew.SubmitButton):
-    attrs={'class':'ui-state-default ui-button ui-button-text'}
+    attrs = {'class': 'ui-state-default ui-button ui-button-text'}
 
 
 class Radio(ew.InputField):
-    template=ew_core.render.Snippet('''<input {% if value %} checked{% endif %} {{widget.j2_attrs({
+    template = ew_core.render.Snippet('''<input {% if value %} checked{% endif %} {{widget.j2_attrs({
         'id':id,
         'type':field_type,
         'name':rendered_name,
@@ -227,13 +237,13 @@ class Radio(ew.InputField):
         'readonly':readonly,
         'value':value},
         attrs)}}>''', 'jinja2')
-    defaults=dict(
+    defaults = dict(
         ew.InputField.defaults,
         field_type='radio')
 
 
 class AutoResizeTextarea(ew.TextArea):
-    defaults=dict(
+    defaults = dict(
         ew.TextArea.defaults,
         name=None,
         value=None,
@@ -245,10 +255,11 @@ class AutoResizeTextarea(ew.TextArea):
             $('textarea.auto_resize').focus(function(){$(this).autosize();});
         ''')
 
+
 class MarkdownEdit(AutoResizeTextarea):
-    template='jinja:allura:templates/widgets/markdown_edit.html'
+    template = 'jinja:allura:templates/widgets/markdown_edit.html'
     validator = fev.UnicodeString()
-    defaults=dict(
+    defaults = dict(
         AutoResizeTextarea.defaults,
         name=None,
         value=None,
@@ -258,15 +269,17 @@ class MarkdownEdit(AutoResizeTextarea):
         return value
 
     def resources(self):
-        for r in super(MarkdownEdit, self).resources(): yield r
+        for r in super(MarkdownEdit, self).resources():
+            yield r
         yield ew.JSLink('js/jquery.lightbox_me.js')
         yield ew.JSLink('js/jquery.textarea.js')
         yield ew.JSLink('js/sf_markitup.js')
         yield ew.CSSLink('css/markitup_sf.css')
 
+
 class PageList(ew_core.Widget):
-    template='jinja:allura:templates/widgets/page_list.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/page_list.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         name=None,
         limit=None,
@@ -277,12 +290,13 @@ class PageList(ew_core.Widget):
     def paginator(self, count, page, limit, zero_based_pages=True):
         page_offset = 1 if zero_based_pages else 0
         limit = 10 if limit is None else limit
+
         def page_url(page):
             params = request.GET.copy()
             params['page'] = page - page_offset
             return url(request.path, params)
         return paginate.Page(range(count), page + page_offset, int(limit),
-        url=page_url)
+                             url=page_url)
 
     def resources(self):
         yield ew.CSSLink('css/page_list.css')
@@ -290,14 +304,15 @@ class PageList(ew_core.Widget):
     @property
     def url_params(self, **kw):
         url_params = dict()
-        for k,v in request.params.iteritems():
-            if k not in ['limit','count','page']:
+        for k, v in request.params.iteritems():
+            if k not in ['limit', 'count', 'page']:
                 url_params[k] = v
         return url_params
 
+
 class PageSize(ew_core.Widget):
-    template='jinja:allura:templates/widgets/page_size.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/page_size.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         limit=None,
         name=None,
@@ -307,8 +322,8 @@ class PageSize(ew_core.Widget):
     @property
     def url_params(self, **kw):
         url_params = dict()
-        for k,v in request.params.iteritems():
-            if k not in ['limit','count','page']:
+        for k, v in request.params.iteritems():
+            if k not in ['limit', 'count', 'page']:
                 url_params[k] = v
         return url_params
 
@@ -317,15 +332,17 @@ class PageSize(ew_core.Widget):
             $('select.results_per_page').change(function () {
                 this.form.submit();});''')
 
+
 class FileChooser(ew.InputField):
-    template='jinja:allura:templates/widgets/file_chooser.html'
-    validator=fev.FieldStorageUploadConverter()
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/file_chooser.html'
+    validator = fev.FieldStorageUploadConverter()
+    defaults = dict(
         ew.InputField.defaults,
         name=None)
 
     def resources(self):
-        for r in super(FileChooser, self).resources(): yield r
+        for r in super(FileChooser, self).resources():
+            yield r
         yield ew.JSLink('js/jquery.file_chooser.js')
         yield onready('''
             var num_files = 0;
@@ -350,27 +367,30 @@ class FileChooser(ew.InputField):
                 $(holder).append(delete_link);
             });''')
 
+
 class JQueryMixin(object):
     js_widget_name = None
     js_plugin_file = None
     js_params = [
         'container_cls'
-        ]
-    defaults=dict(
-        container_cls = 'container')
+    ]
+    defaults = dict(
+        container_cls='container')
 
     def resources(self):
         for r in super(JQueryMixin, self).resources():
             yield r
-        if self.js_plugin_file is not None: yield self.js_plugin_file
+        if self.js_plugin_file is not None:
+            yield self.js_plugin_file
         opts = dict(
             (k, getattr(self, k))
-            for k in self.js_params )
+            for k in self.js_params)
         yield onready('''
 $(document).bind('clone', function () {
     $('.%s').%s(%s); });
 $(document).trigger('clone');
-            ''' % (self.container_cls, self.js_widget_name, json.dumps(opts)));
+            ''' % (self.container_cls, self.js_widget_name, json.dumps(opts)))
+
 
 class SortableRepeatedMixin(JQueryMixin):
     js_widget_name = 'SortableRepeatedField'
@@ -380,8 +400,8 @@ class SortableRepeatedMixin(JQueryMixin):
         'flist_cls',
         'stub_cls',
         'msg_cls',
-        ]
-    defaults=dict(
+    ]
+    defaults = dict(
         container_cls='sortable-repeated-field',
         field_cls='sortable-field',
         flist_cls='sortable-field-list',
@@ -390,64 +410,72 @@ class SortableRepeatedMixin(JQueryMixin):
         empty_msg='No fields have been defined',
         nonempty_msg='Drag and drop the fields to reorder',
         repetitions=0)
-    button =  ew.InputField(
+    button = ew.InputField(
         css_class='add', field_type='button', value='New Field')
 
+
 class SortableRepeatedField(SortableRepeatedMixin, ew.RepeatedField):
-    template='genshi:allura.templates.widgets.sortable_repeated_field'
-    defaults=dict(
+    template = 'genshi:allura.templates.widgets.sortable_repeated_field'
+    defaults = dict(
         ew.RepeatedField.defaults,
         **SortableRepeatedMixin.defaults)
 
+
 class SortableTable(SortableRepeatedMixin, ew.TableField):
-    template='genshi:allura.templates.widgets.sortable_table'
-    defaults=dict(
+    template = 'genshi:allura.templates.widgets.sortable_table'
+    defaults = dict(
         ew.TableField.defaults,
         **SortableRepeatedMixin.defaults)
 
+
 class StateField(JQueryMixin, ew.CompoundField):
-    template='genshi:allura.templates.widgets.state_field'
+    template = 'genshi:allura.templates.widgets.state_field'
     js_widget_name = 'StateField'
     js_plugin_file = ew.JSLink('js/state_field.js')
     js_params = JQueryMixin.js_params + [
         'selector_cls',
         'field_cls',
-        ]
-    defaults=dict(
+    ]
+    defaults = dict(
         ew.CompoundField.defaults,
-        js_params = js_params,
+        js_params=js_params,
         container_cls='state-field-container',
         selector_cls='state-field-selector',
         field_cls='state-field',
         show_label=False,
-        selector = None,
-        states = {},
-        )
+        selector=None,
+        states={},
+    )
 
     @property
     def fields(self):
         return [self.selector] + self.states.values()
 
+
 class DateField(JQueryMixin, ew.TextField):
     js_widget_name = 'datepicker'
     js_params = JQueryMixin.js_params
     container_cls = 'ui-date-field'
-    defaults=dict(
+    defaults = dict(
         ew.TextField.defaults,
-        container_cls = 'ui-date-field',
-        css_class = 'ui-date-field')
+        container_cls='ui-date-field',
+        css_class='ui-date-field')
 
     def resources(self):
-        for r in super(DateField, self).resources(): yield r
+        for r in super(DateField, self).resources():
+            yield r
         yield ew.CSSLink('css/jquery.ui.datepicker.css')
 
+
 class FieldCluster(ew.CompoundField):
-    template='genshi:allura.templates.widgets.field_cluster'
+    template = 'genshi:allura.templates.widgets.field_cluster'
+
 
 class AdminField(ew.InputField):
+
     '''Field with the correct layout/etc for an admin page'''
-    template='jinja:allura:templates/widgets/admin_field.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/admin_field.html'
+    defaults = dict(
         ew.InputField.defaults,
         field=None,
         css_class=None,
@@ -462,9 +490,10 @@ class AdminField(ew.InputField):
         for r in self.field.resources():
             yield r
 
+
 class Lightbox(ew_core.Widget):
-    template='jinja:allura:templates/widgets/lightbox.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/lightbox.html'
+    defaults = dict(
         name=None,
         trigger=None,
         content='',
@@ -487,10 +516,11 @@ class Lightbox(ew_core.Widget):
 
 
 class DisplayOnlyField(ew.HiddenField):
+
     '''
     Render a field as plain text, optionally with a hidden field to preserve the value.
     '''
-    template=ew.Snippet('''{{ (text or value or attrs.value)|e }}
+    template = ew.Snippet('''{{ (text or value or attrs.value)|e }}
         {%- if with_hidden_input is none and name or with_hidden_input -%}
         <input {{
             widget.j2_attrs({
@@ -500,9 +530,8 @@ class DisplayOnlyField(ew.HiddenField):
                 'class':css_class}, attrs)
         }}>
         {%- endif %}''', 'jinja2')
-    defaults=dict(
+    defaults = dict(
         ew.HiddenField.defaults,
         text=None,
         value=None,
         with_hidden_input=None)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/forms.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/forms.py b/Allura/allura/lib/widgets/forms.py
index e656fc0..de87746 100644
--- a/Allura/allura/lib/widgets/forms.py
+++ b/Allura/allura/lib/widgets/forms.py
@@ -38,28 +38,33 @@ from allura import model as M
 
 log = logging.getLogger(__name__)
 
-weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
+weekdays = ['Monday', 'Tuesday', 'Wednesday',
+            'Thursday', 'Friday', 'Saturday', 'Sunday']
+
 
 class _HTMLExplanation(ew.InputField):
-    template=ew.Snippet(
+    template = ew.Snippet(
         '''<label class="grid-4">&nbsp;</label>
            <div class="grid-14" style="margin:2px;">{{widget.text}}</div>
         ''',
         'jinja2')
 
+
 class NeighborhoodProjectShortNameValidator(fev.FancyValidator):
+
     def _validate_shortname(self, shortname, neighborhood, state):
         if not h.re_project_name.match(shortname):
             raise forge_exc.ProjectShortnameInvalid(
-                    'Please use only letters, numbers, and dashes 3-15 characters long.',
-                    shortname, state)
+                'Please use only letters, numbers, and dashes 3-15 characters long.',
+                shortname, state)
 
     def _validate_allowed(self, shortname, neighborhood, state):
-        p = M.Project.query.get(shortname=shortname, neighborhood_id=neighborhood._id)
+        p = M.Project.query.get(
+            shortname=shortname, neighborhood_id=neighborhood._id)
         if p:
             raise forge_exc.ProjectConflict(
-                    'This project name is taken.',
-                    shortname, state)
+                'This project name is taken.',
+                shortname, state)
 
     def to_python(self, value, state=None, check_allowed=True, neighborhood=None):
         """
@@ -70,13 +75,15 @@ class NeighborhoodProjectShortNameValidator(fev.FancyValidator):
         otherwise disallowed.
         """
         if neighborhood is None:
-            neighborhood = M.Neighborhood.query.get(name=state.full_dict['neighborhood'])
+            neighborhood = M.Neighborhood.query.get(
+                name=state.full_dict['neighborhood'])
         value = h.really_unicode(value or '').encode('utf-8').lower()
         self._validate_shortname(value, neighborhood, state)
         if check_allowed:
             self._validate_allowed(value, neighborhood, state)
         return value
 
+
 class ForgeForm(ew.SimpleForm):
     antispam = False
     template = 'jinja:allura:templates/widgets/forge_form.html'
@@ -109,7 +116,8 @@ class ForgeForm(ew.SimpleForm):
         ctx = self.context_for(field)
         display = field.display(**ctx)
         if ctx['errors'] and field.show_errors and not ignore_errors:
-            display = "%s<div class='error'>%s</div>" % (display, ctx['errors'])
+            display = "%s<div class='error'>%s</div>" % (display,
+                                                         ctx['errors'])
         return h.html.literal(display)
 
     def display_field_by_idx(self, idx, ignore_errors=False):
@@ -120,10 +128,13 @@ class ForgeForm(ew.SimpleForm):
         ctx = self.context_for(field)
         display = field.display(**ctx)
         if ctx['errors'] and field.show_errors and not ignore_errors:
-            display = "%s<div class='error'>%s</div>" % (display, ctx['errors'])
+            display = "%s<div class='error'>%s</div>" % (display,
+                                                         ctx['errors'])
         return display
 
+
 class PasswordChangeBase(ForgeForm):
+
     class fields(ew_core.NameList):
         pw = ew.PasswordField(
             label='New Password',
@@ -139,7 +150,9 @@ class PasswordChangeBase(ForgeForm):
             raise formencode.Invalid('Passwords must match', value, state)
         return d
 
+
 class PasswordChangeForm(PasswordChangeBase):
+
     class fields(ew_core.NameList):
         oldpw = ew.PasswordField(
             label='Old Password',
@@ -151,11 +164,13 @@ class PasswordChangeForm(PasswordChangeBase):
             label='New Password (again)',
             validator=fev.UnicodeString(not_empty=True))
 
+
 class PersonalDataForm(ForgeForm):
+
     class fields(ew_core.NameList):
         sex = ew.SingleSelectField(
             label='Gender',
-            options=[ew.Option(py_value=v,label=v,selected=False)
+            options=[ew.Option(py_value=v, label=v, selected=False)
                      for v in ['Male', 'Female', 'Unknown', 'Other']],
             validator=formencode.All(
                 V.OneOfValidator(['Male', 'Female', 'Unknown', 'Other']),
@@ -170,26 +185,26 @@ class PersonalDataForm(ForgeForm):
         country = ew.SingleSelectField(
             label='Country of residence',
             validator=V.MapValidator(country_names, not_empty=False),
-            options = [
+            options=[
                 ew.Option(
-                    py_value=" ", label=" -- Unknown -- ", selected=False)] +\
-                [ew.Option(py_value=c, label=n, selected=False)
-                 for c,n in sorted(country_names.items(),
-                                   key=lambda (k,v):v)],
-            attrs={'onchange':'selectTimezone(this.value)'})
+                    py_value=" ", label=" -- Unknown -- ", selected=False)] +
+            [ew.Option(py_value=c, label=n, selected=False)
+             for c, n in sorted(country_names.items(),
+                                key=lambda (k, v):v)],
+            attrs={'onchange': 'selectTimezone(this.value)'})
         city = ew.TextField(
             label='City of residence',
             attrs=dict(value=None),
             validator=fev.UnicodeString(not_empty=False))
-        timezone=ew.SingleSelectField(
+        timezone = ew.SingleSelectField(
             label='Timezone',
-            attrs={'id':'tz'},
+            attrs={'id': 'tz'},
             validator=V.OneOfValidator(common_timezones, not_empty=False),
             options=[
-                 ew.Option(
-                     py_value=" ",
-                     label=" -- Unknown -- ")] + \
-                 [ew.Option(py_value=n, label=n)
+                ew.Option(
+                    py_value=" ",
+                    label=" -- Unknown -- ")] +
+            [ew.Option(py_value=n, label=n)
                   for n in sorted(common_timezones)])
 
     def display(self, **kw):
@@ -237,11 +252,11 @@ class PersonalDataForm(ForgeForm):
         yield ew.JSScript('''
 var $allTimezones = $("#tz").clone();
 var $t = {};
-''' + \
-    reduce(_append, [
-        '$t["'+ el +'"] = ' + str([name.encode('utf-8')
-                                  for name in country_timezones[el]]) + ";\n"
-        for el in country_timezones]) + '''
+''' +
+                          reduce(_append, [
+                              '$t["' + el + '"] = ' + str([name.encode('utf-8')
+                                                           for name in country_timezones[el]]) + ";\n"
+                              for el in country_timezones]) + '''
 function selectTimezone($country){
      if($country == " "){
          $("#tz").replaceWith($allTimezones);
@@ -254,54 +269,58 @@ function selectTimezone($country){
      }
 }''')
 
+
 class AddTelNumberForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
         newnumber = ew.TextField(
             label='New telephone number',
-            attrs={'value':''},
+            attrs={'value': ''},
             validator=fev.UnicodeString(not_empty=True))
 
     def display(self, **kw):
-        initial_value = kw.get('initial_value','')
+        initial_value = kw.get('initial_value', '')
         self.fields['newnumber'].attrs['value'] = initial_value
         return super(ForgeForm, self).display(**kw)
 
+
 class AddWebsiteForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
         newwebsite = ew.TextField(
             label='New website url',
-            attrs={'value':''},
+            attrs={'value': ''},
             validator=fev.URL())
 
     def display(self, **kw):
-        initial_value = kw.get('initial_value','')
+        initial_value = kw.get('initial_value', '')
         self.fields['newwebsite'].attrs['value'] = initial_value
         return super(ForgeForm, self).display(**kw)
 
+
 class SkypeAccountForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
         skypeaccount = ew.TextField(
             label='Skype account',
-            attrs={'value':''},
+            attrs={'value': ''},
             validator=fev.UnicodeString(not_empty=False))
 
     def display(self, **kw):
-        initial_value = kw.get('initial_value','')
+        initial_value = kw.get('initial_value', '')
         self.fields['skypeaccount'].attrs['value'] = initial_value
         return super(ForgeForm, self).display(**kw)
 
+
 class RemoveTextValueForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
-        initial_value = kw.get('value','')
-        label = kw.get('label','')
+        initial_value = kw.get('value', '')
+        label = kw.get('label', '')
         description = kw.get('description')
 
         self.fields = [
@@ -312,11 +331,11 @@ class RemoveTextValueForm(ForgeForm):
                     ffw.DisplayOnlyField(
                         name='oldvalue',
                         label=initial_value,
-                        attrs={'value':initial_value},
+                        attrs={'value': initial_value},
                         show_errors=False),
                     ew.SubmitButton(
                         show_label=False,
-                        attrs={'value':'Remove'},
+                        attrs={'value': 'Remove'},
                         show_errors=False)])]
         if description:
             self.fields.append(
@@ -331,8 +350,9 @@ class RemoveTextValueForm(ForgeForm):
         d["oldvalue"] = kw.get('oldvalue', '')
         return d
 
+
 class AddSocialNetworkForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     @property
     def fields(self):
@@ -354,11 +374,11 @@ class AddSocialNetworkForm(ForgeForm):
 
 
 class RemoveSocialNetworkForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
-        account = kw.get('account','')
-        socialnetwork = kw.get('socialnetwork','')
+        account = kw.get('account', '')
+        socialnetwork = kw.get('socialnetwork', '')
 
         self.fields = [
             ew.RowField(
@@ -367,15 +387,15 @@ class RemoveSocialNetworkForm(ForgeForm):
                     ffw.DisplayOnlyField(
                         text='%s account' % socialnetwork,
                         name="socialnetwork",
-                        attrs={'value':socialnetwork},
+                        attrs={'value': socialnetwork},
                         show_errors=False),
                     ffw.DisplayOnlyField(
                         name="account",
-                        attrs={'value':account},
+                        attrs={'value': account},
                         show_errors=False),
                     ew.SubmitButton(
                         show_label=False,
-                        attrs={'value':'Remove'},
+                        attrs={'value': 'Remove'},
                         show_errors=False)])]
         return super(ForgeForm, self).display(**kw)
 
@@ -386,7 +406,9 @@ class RemoveSocialNetworkForm(ForgeForm):
         d["socialnetwork"] = kw.get('socialnetwork', '')
         return d
 
+
 class AddInactivePeriodForm(ForgeForm):
+
     class fields(ew_core.NameList):
         startdate = ew.TextField(
             label='Start date',
@@ -404,12 +426,13 @@ class AddInactivePeriodForm(ForgeForm):
         d = super(AddInactivePeriodForm, self).to_python(kw, state)
         if d['startdate'] > d['enddate']:
                 raise formencode.Invalid(
-                   'Invalid period: start date greater than end date.',
+                    'Invalid period: start date greater than end date.',
                     kw, state)
         return d
 
+
 class RemoveInactivePeriodForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
         startdate = kw.get('startdate')
@@ -422,25 +445,27 @@ class RemoveInactivePeriodForm(ForgeForm):
                 fields=[
                     ffw.DisplayOnlyField(
                         name='startdate',
-                        attrs={'value':startdate.strftime('%d/%m/%Y')},
+                        attrs={'value': startdate.strftime('%d/%m/%Y')},
                         show_errors=False),
                     ffw.DisplayOnlyField(
                         name='enddate',
-                        attrs={'value':enddate.strftime('%d/%m/%Y')},
+                        attrs={'value': enddate.strftime('%d/%m/%Y')},
                         show_errors=False),
                     ew.SubmitButton(
-                        attrs={'value':'Remove'},
+                        attrs={'value': 'Remove'},
                         show_errors=False)])]
         return super(ForgeForm, self).display(**kw)
 
     @ew_core.core.validator
     def to_python(self, kw, state):
         d = super(RemoveInactivePeriodForm, self).to_python(kw, state)
-        d['startdate'] = V.convertDate(kw.get('startdate',''))
-        d['enddate'] = V.convertDate(kw.get('enddate',''))
+        d['startdate'] = V.convertDate(kw.get('startdate', ''))
+        d['enddate'] = V.convertDate(kw.get('enddate', ''))
         return d
 
+
 class AddTimeSlotForm(ForgeForm):
+
     class fields(ew_core.NameList):
         weekday = ew.SingleSelectField(
             label='Weekday',
@@ -466,15 +491,16 @@ class AddTimeSlotForm(ForgeForm):
         if (d['starttime']['h'], d['starttime']['m']) > \
            (d['endtime']['h'], d['endtime']['m']):
                 raise formencode.Invalid(
-                   'Invalid period: start time greater than end time.',
+                    'Invalid period: start time greater than end time.',
                     kw, state)
         return d
 
+
 class RemoveTimeSlotForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
-        weekday = kw.get('weekday','')
+        weekday = kw.get('weekday', '')
         starttime = kw.get('starttime')
         endtime = kw.get('endtime')
 
@@ -485,32 +511,32 @@ class RemoveTimeSlotForm(ForgeForm):
                 fields=[
                     ffw.DisplayOnlyField(
                         name='weekday',
-                        attrs={'value':weekday},
+                        attrs={'value': weekday},
                         show_errors=False),
                     ffw.DisplayOnlyField(
                         name='starttime',
-                        attrs={'value':starttime.strftime('%H:%M')},
+                        attrs={'value': starttime.strftime('%H:%M')},
                         show_errors=False),
                     ffw.DisplayOnlyField(
                         name='endtime',
-                        attrs={'value':endtime.strftime('%H:%M')},
+                        attrs={'value': endtime.strftime('%H:%M')},
                         show_errors=False),
                     ew.SubmitButton(
                         show_errors=False,
-                        attrs={'value':'Remove'})])]
+                        attrs={'value': 'Remove'})])]
         return super(ForgeForm, self).display(**kw)
 
     @ew_core.core.validator
     def to_python(self, kw, state):
         d = super(RemoveTimeSlotForm, self).to_python(kw, state)
         d["weekday"] = kw.get('weekday', None)
-        d['starttime'] = V.convertTime(kw.get('starttime',''))
-        d['endtime'] = V.convertTime(kw.get('endtime',''))
+        d['starttime'] = V.convertTime(kw.get('starttime', ''))
+        d['endtime'] = V.convertTime(kw.get('endtime', ''))
         return d
 
 
 class RemoveTroveCategoryForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
         cat = kw.get('category')
@@ -525,11 +551,11 @@ class RemoveTroveCategoryForm(ForgeForm):
                         href="/categories/%s" % cat.shortname),
                     ew.SubmitButton(
                         show_errors=False,
-                        attrs={'value':'Remove'})],
+                        attrs={'value': 'Remove'})],
                 hidden_fields=[
                     ew.HiddenField(
                         name='categoryid',
-                        attrs={'value':cat.trove_cat_id})])]
+                        attrs={'value': cat.trove_cat_id})])]
         return super(ForgeForm, self).display(**kw)
 
     @ew_core.core.validator
@@ -540,19 +566,20 @@ class RemoveTroveCategoryForm(ForgeForm):
             d["categoryid"] = int(d['categoryid'])
         return d
 
+
 class AddTroveCategoryForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
         uppercategory_id = ew.HiddenField(
-            attrs={'value':''},
+            attrs={'value': ''},
             show_errors=False)
         categoryname = ew.TextField(
             label="Category name",
             validator=fev.UnicodeString(not_empty=True))
 
     def display(self, **kw):
-        upper_category = kw.get('uppercategory_id',0)
+        upper_category = kw.get('uppercategory_id', 0)
 
         self.fields['uppercategory_id'].attrs['value'] = upper_category
         return super(ForgeForm, self).display(**kw)
@@ -563,39 +590,41 @@ class AddTroveCategoryForm(ForgeForm):
         d["uppercategory_id"] = kw.get('uppercategory_id', 0)
         return d
 
+
 class AddUserSkillForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults)
+    defaults = dict(ForgeForm.defaults)
 
     class fields(ew_core.NameList):
-        selected_skill=ew.HiddenField(
-            attrs={'value':''},
+        selected_skill = ew.HiddenField(
+            attrs={'value': ''},
             show_errors=False,
             validator=fev.UnicodeString(not_empty=True))
-        level=ew.SingleSelectField(
+        level = ew.SingleSelectField(
             label="Level of knowledge",
             options=[
-                ew.Option(py_value="low",label="Low level"),
-                ew.Option(py_value="medium",label="Medium level"),
-                ew.Option(py_value="high",label="Advanced level")],
+                ew.Option(py_value="low", label="Low level"),
+                ew.Option(py_value="medium", label="Medium level"),
+                ew.Option(py_value="high", label="Advanced level")],
             validator=formencode.All(
-                V.OneOfValidator(['low','medium','high']),
+                V.OneOfValidator(['low', 'medium', 'high']),
                 fev.UnicodeString(not_empty=True)))
-        comment=ew.TextArea(
+        comment = ew.TextArea(
             label="Additional comments",
             validator=fev.UnicodeString(not_empty=False),
-            attrs={'rows':5,'cols':30})
+            attrs={'rows': 5, 'cols': 30})
 
     def display(self, **kw):
         category = kw.get('selected_skill')
 
-        self.fields["selected_skill"].attrs['value']=category
+        self.fields["selected_skill"].attrs['value'] = category
         return super(ForgeForm, self).display(**kw)
 
+
 class SelectSubCategoryForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text="Continue")
+    defaults = dict(ForgeForm.defaults, submit_text="Continue")
 
     class fields(ew_core.NameList):
-        selected_category=ew.SingleSelectField(
+        selected_category = ew.SingleSelectField(
             name="selected_category",
             label="Available categories",
             options=[])
@@ -603,17 +632,18 @@ class SelectSubCategoryForm(ForgeForm):
     def display(self, **kw):
         categories = kw.get('categories')
 
-        self.fields['selected_category'].options= \
-            [ew.Option(py_value=el.trove_cat_id,label=el.fullname)
+        self.fields['selected_category'].options = \
+            [ew.Option(py_value=el.trove_cat_id, label=el.fullname)
              for el in categories]
-        self.fields['selected_category'].validator= \
-            validator=formencode.All(
+        self.fields['selected_category'].validator = \
+            validator = formencode.All(
                 V.OneOfValidator(categories),
                 fev.UnicodeString(not_empty=True))
         return super(ForgeForm, self).display(**kw)
 
+
 class RemoveSkillForm(ForgeForm):
-    defaults=dict(ForgeForm.defaults, submit_text=None, show_errors=False)
+    defaults = dict(ForgeForm.defaults, submit_text=None, show_errors=False)
 
     def display(self, **kw):
         skill = kw.get('skill')
@@ -627,7 +657,7 @@ class RemoveSkillForm(ForgeForm):
                 hidden_fields=[
                     ew.HiddenField(
                         name="categoryid",
-                        attrs={'value':skill['skill'].trove_cat_id},
+                        attrs={'value': skill['skill'].trove_cat_id},
                         show_errors=False)
                 ],
                 fields=[
@@ -636,7 +666,7 @@ class RemoveSkillForm(ForgeForm):
                     ffw.DisplayOnlyField(text=comment),
                     ew.SubmitButton(
                         show_label=False,
-                        attrs={'value':'Remove'},
+                        attrs={'value': 'Remove'},
                         show_errors=False)])]
         return super(ForgeForm, self).display(**kw)
 
@@ -646,11 +676,15 @@ class RemoveSkillForm(ForgeForm):
         d["categoryid"] = kw.get('categoryid', None)
         return d
 
+
 class UploadKeyForm(ForgeForm):
+
     class fields(ew_core.NameList):
         key = ew.TextArea(label='SSH Public Key')
 
+
 class RegistrationForm(ForgeForm):
+
     class fields(ew_core.NameList):
         display_name = ew.TextField(
             label='Displayed Name',
@@ -675,15 +709,18 @@ class RegistrationForm(ForgeForm):
         d = super(RegistrationForm, self).to_python(value, state)
         value['username'] = username = value['username'].lower()
         if M.User.by_username(username):
-            raise formencode.Invalid('That username is already taken. Please choose another.',
-                                    value, state)
+            raise formencode.Invalid(
+                'That username is already taken. Please choose another.',
+                value, state)
         if d['pw'] != d['pw2']:
             raise formencode.Invalid('Passwords must match', value, state)
         return d
 
+
 class AdminForm(ForgeForm):
     template = 'jinja:allura:templates/widgets/admin_form.html'
 
+
 class NeighborhoodOverviewForm(ForgeForm):
     template = 'jinja:allura:templates/widgets/neighborhood_overview_form.html'
 
@@ -695,7 +732,7 @@ class NeighborhoodOverviewForm(ForgeForm):
         show_title = ew.Checkbox(label='')
         css = ffw.AutoResizeTextarea()
         project_template = ffw.AutoResizeTextarea(
-                validator=V.JsonValidator(if_empty=''))
+            validator=V.JsonValidator(if_empty=''))
         icon = ew.FileField()
         tracking_id = ew.TextField()
         project_list_url = ew.TextField(validator=fev.URL())
@@ -724,17 +761,18 @@ class NeighborhoodOverviewForm(ForgeForm):
                            '<td class="right"><div class="%(ctx_name)s-%(inp_name)s-inp"><table class="input_inner">'\
                            '<tr><td><input type="text" class="%(inp_type)s" name="%(ctx_name)s-%(inp_name)s" '\
                            'value="%(inp_value)s"></td><td>%(inp_additional)s</td></tr></table></div></td></tr>\n' % {'ctx_id': ctx['id'],
-                                                            'ctx_name': ctx['name'],
-                                                            'inp_name': inp['name'],
-                                                            'inp_value': inp['value'],
-                                                            'label': inp['label'],
-                                                            'inp_type': inp['type'],
-                                                            'def_checked': 'checked="checked"' if empty_val else '',
-                                                            'inp_additional': additional_inputs}
+                                                                                                                      'ctx_name': ctx['name'],
+                                                                                                                      'inp_name': inp['name'],
+                                                                                                                      'inp_value': inp['value'],
+                                                                                                                      'label': inp['label'],
+                                                                                                                      'inp_type': inp['type'],
+                                                                                                                      'def_checked': 'checked="checked"' if empty_val else '',
+                                                                                                                      'inp_additional': additional_inputs}
             display += '</table>'
 
             if ctx['errors'] and field.show_errors and not ignore_errors:
-                display = "%s<div class='error'>%s</div>" % (display, ctx['errors'])
+                display = "%s<div class='error'>%s</div>" % (display,
+                                                             ctx['errors'])
 
             return h.html.literal(display)
         else:
@@ -754,7 +792,8 @@ class NeighborhoodOverviewForm(ForgeForm):
         return d
 
     def resources(self):
-        for r in super(NeighborhoodOverviewForm, self).resources(): yield r
+        for r in super(NeighborhoodOverviewForm, self).resources():
+            yield r
         yield ew.CSSLink('css/colorPicker.css')
         yield ew.CSSLink('css/jqfontselector.css')
         yield ew.CSSScript('''
@@ -800,6 +839,7 @@ table.table_class select.add_opt {width: 5em; margin:0; padding: 0;}
             });
         ''')
 
+
 class NeighborhoodAddProjectForm(ForgeForm):
     template = 'jinja:allura:templates/widgets/neighborhood_add_project.html'
     antispam = True
@@ -812,16 +852,17 @@ class NeighborhoodAddProjectForm(ForgeForm):
     class fields(ew_core.NameList):
         project_description = ew.HiddenField(label='Public Description')
         neighborhood = ew.HiddenField(label='Neighborhood')
-        private_project = ew.Checkbox(label="", attrs={'class':'unlabeled'})
+        private_project = ew.Checkbox(label="", attrs={'class': 'unlabeled'})
         project_name = ew.InputField(label='Project Name', field_type='text',
-            validator=formencode.All(
-                fev.UnicodeString(not_empty=True, max=40),
-                V.MaxBytesValidator(max=40)))
+                                     validator=formencode.All(
+                                         fev.UnicodeString(
+                                             not_empty=True, max=40),
+                                         V.MaxBytesValidator(max=40)))
         project_unixname = ew.InputField(
             label='Short Name', field_type='text',
             validator=None)  # will be set in __init__
         tools = ew.CheckboxSet(name='tools', options=[
-            ## Required for Neighborhood functional tests to pass
+            # Required for Neighborhood functional tests to pass
             ew.Option(label='Wiki', html_value='wiki', selected=True)
         ])
 
@@ -830,16 +871,17 @@ class NeighborhoodAddProjectForm(ForgeForm):
         # get the shortname validator from the provider
         provider = plugin.ProjectRegistrationProvider.get()
         self.fields.project_unixname.validator = provider.shortname_validator
-        ## Dynamically generating CheckboxSet of installable tools
+        # Dynamically generating CheckboxSet of installable tools
         from allura.lib.widgets import forms
         self.fields.tools.options = [
-                forms.ew.Option(label=tool.tool_label, html_value=ep)
-                    for ep,tool in g.entry_points["tool"].iteritems()
-                    if tool.installable and tool.status == 'production'
-            ]
+            forms.ew.Option(label=tool.tool_label, html_value=ep)
+            for ep, tool in g.entry_points["tool"].iteritems()
+            if tool.installable and tool.status == 'production'
+        ]
 
     def resources(self):
-        for r in super(NeighborhoodAddProjectForm, self).resources(): yield r
+        for r in super(NeighborhoodAddProjectForm, self).resources():
+            yield r
         yield ew.CSSLink('css/add_project.css')
         neighborhood = g.antispam.enc('neighborhood')
         project_name = g.antispam.enc('project_name')
@@ -949,7 +991,7 @@ class MoveTicketForm(ForgeForm):
     class fields(ew_core.NameList):
         tracker = ew.SingleSelectField(
             label='Tracker mount point',
-            options = [])
+            options=[])
 
     def __init__(self, *args, **kwargs):
         trackers = kwargs.pop('trackers', [])
@@ -960,9 +1002,11 @@ class MoveTicketForm(ForgeForm):
 
 
 class CsrfForm(ew.SimpleForm):
+
     @property
     def hidden_fields(self):
         return [ew.HiddenField(name='_session_id')]
+
     def context_for(self, field):
         ctx = super(CsrfForm, self).context_for(field)
         if field.name == '_session_id':

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/macros.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/macros.py b/Allura/allura/lib/widgets/macros.py
index 5f93d2a..18268cb 100644
--- a/Allura/allura/lib/widgets/macros.py
+++ b/Allura/allura/lib/widgets/macros.py
@@ -17,48 +17,53 @@
 
 import ew
 
+
 class Include(ew.Widget):
-    template='jinja:allura:templates/widgets/include.html'
-    params=['artifact', 'attrs']
-    artifact=None
+    template = 'jinja:allura:templates/widgets/include.html'
+    params = ['artifact', 'attrs']
+    artifact = None
     attrs = {
-        'style':'width:270px;float:right;background-color:#ccc'
-        }
+        'style': 'width:270px;float:right;background-color:#ccc'
+    }
+
 
 class GittipButton(ew.Widget):
-    template='jinja:allura:templates/widgets/gittip_button.html'
-    params=['username']
-    project=None
+    template = 'jinja:allura:templates/widgets/gittip_button.html'
+    params = ['username']
+    project = None
 
 
 class DownloadButton(ew.Widget):
-    template='jinja:allura:templates/widgets/download_button.html'
-    params=['project']
-    project=None
+    template = 'jinja:allura:templates/widgets/download_button.html'
+    params = ['project']
+    project = None
 
     def resources(self):
         yield ew.jinja2_ew.JSScript('''
             $(function(){$(".download-button-%s").load("%s");
-        });''' % (self.project._id,self.project.best_download_url()))
+        });''' % (self.project._id, self.project.best_download_url()))
+
 
 class NeighborhoodFeeds(ew.Widget):
     template = 'jinja:allura:templates/macro/neighborhood_feeds.html'
-    params=['feeds']
+    params = ['feeds']
     feeds = None
 
+
 class BlogPosts(ew.Widget):
     template = 'jinja:allura:templates/macro/blog_posts.html'
     params = ['posts']
     posts = None
 
+
 class ProjectAdmins(ew.Widget):
     template = 'jinja:allura:templates/macro/project_admins.html'
     params = ['users']
     users = None
 
+
 class Members(ew.Widget):
     template = 'jinja:allura:templates/macro/members.html'
     params = ['users', 'over_limit']
     users = None
     over_limit = None
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/oauth_widgets.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/oauth_widgets.py b/Allura/allura/lib/widgets/oauth_widgets.py
index 46b6520..6426beb 100644
--- a/Allura/allura/lib/widgets/oauth_widgets.py
+++ b/Allura/allura/lib/widgets/oauth_widgets.py
@@ -26,17 +26,21 @@ from allura import model as M
 from .form_fields import AutoResizeTextarea
 from .forms import ForgeForm
 
+
 class OAuthApplicationForm(ForgeForm):
-    submit_text='Register new application'
-    style='wide'
+    submit_text = 'Register new application'
+    style = 'wide'
+
     class fields(ew_core.NameList):
-        application_name =ew.TextField(label='Application Name',
-                                       validator=V.UniqueOAuthApplicationName())
-        application_description = AutoResizeTextarea(label='Application Description')
+        application_name = ew.TextField(label='Application Name',
+                                        validator=V.UniqueOAuthApplicationName())
+        application_description = AutoResizeTextarea(
+            label='Application Description')
+
 
 class OAuthRevocationForm(ForgeForm):
-    submit_text='Revoke Access'
+    submit_text = 'Revoke Access'
     fields = []
-    class fields(ew_core.NameList):
-        _id=ew.HiddenField()
 
+    class fields(ew_core.NameList):
+        _id = ew.HiddenField()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/project_list.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/project_list.py b/Allura/allura/lib/widgets/project_list.py
index a5a327f..343c1ba 100644
--- a/Allura/allura/lib/widgets/project_list.py
+++ b/Allura/allura/lib/widgets/project_list.py
@@ -23,9 +23,10 @@ from pylons import tmpl_context as c, app_globals as g
 from allura import model as M
 from allura.lib.security import Credentials
 
+
 class ProjectSummary(ew_core.Widget):
-    template='jinja:allura:templates/widgets/project_summary.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/project_summary.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         sitemap=None,
         icon=None,
@@ -42,7 +43,7 @@ class ProjectSummary(ew_core.Widget):
         response = super(ProjectSummary, self).prepare_context(context)
         value = response['value']
         if response['sitemap'] is None:
-            response['sitemap'] = [ s for s in value.sitemap() if s.url ]
+            response['sitemap'] = [s for s in value.sitemap() if s.url]
 
         if response['grid_view_tools'] != '':
             view_tools_list = response['grid_view_tools'].split(',')
@@ -55,7 +56,7 @@ class ProjectSummary(ew_core.Widget):
 
         if response['icon_url'] is None:
             if value.icon:
-                response['icon_url'] = value.url()+'icon'
+                response['icon_url'] = value.url() + 'icon'
         if response['accolades'] is None:
             response['accolades'] = value.accolades
 
@@ -102,9 +103,10 @@ class ProjectSummary(ew_core.Widget):
         });
         ''')
 
+
 class ProjectList(ew_core.Widget):
-    template='jinja:allura:templates/widgets/project_list_widget.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/project_list_widget.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         projects=[],
         project_summary=ProjectSummary(),
@@ -157,9 +159,10 @@ class ProjectList(ew_core.Widget):
         for r in self.project_summary.resources():
             yield r
 
+
 class ProjectScreenshots(ew_core.Widget):
-    template='jinja:allura:templates/widgets/project_screenshots.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/project_screenshots.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         project=None,
         edit=False)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/repo.py b/Allura/allura/lib/widgets/repo.py
index 31c32c1..cda4ebf 100644
--- a/Allura/allura/lib/widgets/repo.py
+++ b/Allura/allura/lib/widgets/repo.py
@@ -24,9 +24,10 @@ from allura import model as M
 from allura.lib.widgets import forms as ff
 from allura.lib.widgets import form_fields as ffw
 
+
 class SCMLogWidget(ew_core.Widget):
-    template='jinja:allura:templates/widgets/repo/log.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/repo/log.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         value=None,
         limit=None,
@@ -35,32 +36,35 @@ class SCMLogWidget(ew_core.Widget):
         show_paging=True)
 
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
 
     def resources(self):
         for f in self.fields:
             for r in f.resources():
                 yield r
 
+
 class SCMRevisionWidget(ew_core.Widget):
-    template='jinja:allura:templates/widgets/repo/revision.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/repo/revision.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         value=None,
         prev=ew_core.NoDefault,
         next=ew_core.NoDefault)
 
+
 class SCMTreeWidget(ew_core.Widget):
-    template='jinja:allura:templates/widgets/repo/tree_widget.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/repo/tree_widget.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         tree=None,
         list=list)
 
+
 class SCMMergeRequestWidget(ff.ForgeForm):
-    source_branches=[]
-    target_branches=[]
+    source_branches = []
+    target_branches = []
 
     @property
     def fields(self):
@@ -74,28 +78,31 @@ class SCMMergeRequestWidget(ff.ForgeForm):
                 name='target_branch',
                 label='Target Branch',
                 options=self.target_branches),
-            ffw.AutoResizeTextarea(name='description') ]
+            ffw.AutoResizeTextarea(name='description')]
         return result
 
+
 class SCMMergeRequestFilterWidget(ff.ForgeForm):
-    defaults=dict(
+    defaults = dict(
         ff.ForgeForm.defaults,
         submit_text='Filter',
         method='GET')
 
     class fields(ew_core.NameList):
-        status=ew.MultiSelectField(options=M.MergeRequest.statuses)
+        status = ew.MultiSelectField(options=M.MergeRequest.statuses)
+
 
 class SCMMergeRequestDisposeWidget(ff.ForgeForm):
 
     class fields(ew_core.NameList):
-        status=ew.SingleSelectField(
+        status = ew.SingleSelectField(
             label='Change Status',
             options=M.MergeRequest.statuses)
 
+
 class SCMCommitBrowserWidget(ew_core.Widget):
-    template='jinja:allura:templates/widgets/repo/commit_browser.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/repo/commit_browser.html'
+    defaults = dict(
         ew_core.Widget.defaults,
     )
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/search.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/search.py b/Allura/allura/lib/widgets/search.py
index e553842..32014c4 100644
--- a/Allura/allura/lib/widgets/search.py
+++ b/Allura/allura/lib/widgets/search.py
@@ -21,9 +21,10 @@ import jinja2
 
 from allura.lib.widgets import form_fields as ffw
 
+
 class SearchResults(ew_core.Widget):
-    template='jinja:allura:templates/widgets/search_results.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/search_results.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         results=None,
         limit=None,
@@ -32,8 +33,8 @@ class SearchResults(ew_core.Widget):
         search_error=None)
 
     class fields(ew_core.NameList):
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
 
     def resources(self):
         for f in self.fields:
@@ -43,7 +44,7 @@ class SearchResults(ew_core.Widget):
 
 
 class SearchHelp(ffw.Lightbox):
-    defaults=dict(
+    defaults = dict(
         ffw.Lightbox.defaults,
         name='search_help_modal',
         trigger='a.search_help_modal')
@@ -51,7 +52,8 @@ class SearchHelp(ffw.Lightbox):
     def __init__(self, comments=True, history=True):
         super(SearchHelp, self).__init__()
         # can't use g.jinja2_env since this widget gets imported too early :(
-        jinja2_env = jinja2.Environment(loader=jinja2.PackageLoader('allura', 'templates/widgets'))
+        jinja2_env = jinja2.Environment(
+            loader=jinja2.PackageLoader('allura', 'templates/widgets'))
         self.content = jinja2_env.get_template('search_help.html').render(dict(
             comments=comments,
             history=history,

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/subscriptions.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/subscriptions.py b/Allura/allura/lib/widgets/subscriptions.py
index 3f68d58..687178a 100644
--- a/Allura/allura/lib/widgets/subscriptions.py
+++ b/Allura/allura/lib/widgets/subscriptions.py
@@ -28,41 +28,53 @@ from allura import model as M
 from .form_fields import SubmitButton
 
 # Discussion forms
+
+
 class _SubscriptionTable(ew.TableField):
+
     class hidden_fields(ew_core.NameList):
         subscription_id = ew.HiddenField(validator=V.Ming(M.Mailbox))
         tool_id = ew.HiddenField()
         project_id = ew.HiddenField()
         topic = ew.HiddenField()
         artifact_index_id = ew.HiddenField()
+
     class fields(ew_core.NameList):
-        project_name = ffw.DisplayOnlyField(label='Project', show_label=True, with_hidden_input=False)
-        mount_point = ffw.DisplayOnlyField(label='App', show_label=True, with_hidden_input=False)
-        topic = ffw.DisplayOnlyField(label='Topic', show_label=True, with_hidden_input=False)
-        type = ffw.DisplayOnlyField(label='Type', show_label=True, with_hidden_input=False)
-        frequency = ffw.DisplayOnlyField(label='Frequency', show_label=True, with_hidden_input=False)
-        artifact_title = ew.LinkField(label='Artifact', show_label=True, plaintext_if_no_href=True)
+        project_name = ffw.DisplayOnlyField(
+            label='Project', show_label=True, with_hidden_input=False)
+        mount_point = ffw.DisplayOnlyField(
+            label='App', show_label=True, with_hidden_input=False)
+        topic = ffw.DisplayOnlyField(
+            label='Topic', show_label=True, with_hidden_input=False)
+        type = ffw.DisplayOnlyField(
+            label='Type', show_label=True, with_hidden_input=False)
+        frequency = ffw.DisplayOnlyField(
+            label='Frequency', show_label=True, with_hidden_input=False)
+        artifact_title = ew.LinkField(
+            label='Artifact', show_label=True, plaintext_if_no_href=True)
         # unsubscribe = SubmitButton()
         subscribed = ew.Checkbox(suppress_label=True)
 
+
 class SubscriptionForm(CsrfForm):
-    defaults=dict(
+    defaults = dict(
         ew.SimpleForm.defaults,
         submit_text='Save')
+
     class fields(ew_core.NameList):
-        subscriptions=_SubscriptionTable()
+        subscriptions = _SubscriptionTable()
         email_format = ew.SingleSelectField(
-                    name='email_format',
-                    label='Email Format',
-                    options=[
-                        ew.Option(py_value='plain', label='Plain Text'),
-                        ew.Option(py_value='html', label='HTML'),
-                        ew.Option(py_value='both', label='Combined')])
+            name='email_format',
+            label='Email Format',
+            options=[
+                ew.Option(py_value='plain', label='Plain Text'),
+                ew.Option(py_value='html', label='HTML'),
+                ew.Option(py_value='both', label='Combined')])
 
 
 class SubscribeForm(ew.SimpleForm):
-    template='jinja:allura:templates/widgets/subscribe.html'
-    defaults=dict(
+    template = 'jinja:allura:templates/widgets/subscribe.html'
+    defaults = dict(
         ew.SimpleForm.defaults,
         thing='tool',
         style='text',
@@ -70,9 +82,9 @@ class SubscribeForm(ew.SimpleForm):
         value=None)
 
     class fields(ew_core.NameList):
-        subscribe=SubmitButton()
-        unsubscribe=SubmitButton()
-        shortname=ew.HiddenField()
+        subscribe = SubmitButton()
+        unsubscribe = SubmitButton()
+        shortname = ew.HiddenField()
 
     def from_python(self, value, state):
         return value

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/widgets/user_profile.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/widgets/user_profile.py b/Allura/allura/lib/widgets/user_profile.py
index eb19e27..c2e32e3 100644
--- a/Allura/allura/lib/widgets/user_profile.py
+++ b/Allura/allura/lib/widgets/user_profile.py
@@ -17,7 +17,7 @@
 
 import ew as ew_core
 import ew.jinja2_ew as ew
-from formencode import  validators as fev
+from formencode import validators as fev
 from .forms import ForgeForm
 
 
@@ -25,7 +25,6 @@ class SendMessageForm(ForgeForm):
     template = 'jinja:allura.ext.user_profile:templates/send_message_form.html'
     submit_text = 'Send Message'
 
-
     class fields(ew_core.NameList):
         subject = ew.TextField(
             validator=fev.UnicodeString(

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/zarkov_helpers.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/zarkov_helpers.py b/Allura/allura/lib/zarkov_helpers.py
index 8cda182..15d0b96 100644
--- a/Allura/allura/lib/zarkov_helpers.py
+++ b/Allura/allura/lib/zarkov_helpers.py
@@ -24,6 +24,7 @@ except ImportError:
     zmq = None
 import bson
 
+
 class ZarkovClient(object):
 
     def __init__(self, addr):
@@ -36,6 +37,7 @@ class ZarkovClient(object):
             type=type, context=context, extra=extra)
         self._sock.send(bson.BSON.encode(obj))
 
+
 def zero_fill_zarkov_result(zarkov_data, period, start_date, end_date):
     """Return a new copy of zarkov_data (a dict returned from a zarkov
     query) with the timeseries data zero-filled for missing dates.
@@ -60,9 +62,10 @@ def zero_fill_zarkov_result(zarkov_data, period, start_date, end_date):
     for query in zarkov_data.iterkeys():
         for series in zarkov_data[query].iterkeys():
             d[query][series] = zero_fill_time_series(d[query][series],
-                                    period, start_date, end_date)
+                                                     period, start_date, end_date)
     return d
 
+
 def zero_fill_time_series(time_series, period, start_date, end_date):
     """Return a copy of time_series after adding [timestamp, 0] pairs for
     each missing timestamp in the given date range.
@@ -91,10 +94,10 @@ def zero_fill_time_series(time_series, period, start_date, end_date):
                 new_series[ts] = 0
             # next month
             if date.month == 12:
-                date = date.replace(year=date.year+1, month=1)
+                date = date.replace(year=date.year + 1, month=1)
             else:
-                date = date.replace(month=date.month+1)
-    else: # daily
+                date = date.replace(month=date.month + 1)
+    else:  # daily
         days = (end_date - start_date).days + 1
         periods = range(0, days)
         for dayoffset in periods:
@@ -104,6 +107,7 @@ def zero_fill_time_series(time_series, period, start_date, end_date):
                 new_series[ts] = 0
     return sorted([[k, v] for k, v in new_series.items()])
 
+
 def to_utc_timestamp(d):
     """Return UTC unix timestamp representation of d (datetime)."""
     # http://stackoverflow.com/questions/1077285/how-to-specify-time-zone-utc-when-converting-to-unix-time-python

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/artifact.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/artifact.py b/Allura/allura/model/artifact.py
index 40ccb5e..faeb1cb 100644
--- a/Allura/allura/model/artifact.py
+++ b/Allura/allura/model/artifact.py
@@ -47,6 +47,7 @@ log = logging.getLogger(__name__)
 
 
 class Artifact(MappedClass):
+
     """
     Base class for anything you want to keep track of.
 
@@ -60,10 +61,11 @@ class Artifact(MappedClass):
     """
     class __mongometa__:
         session = artifact_orm_session
-        name='artifact'
+        name = 'artifact'
         indexes = [
             ('app_config_id', 'labels'),
         ]
+
         def before_save(data):
             if not getattr(artifact_orm_session._get(), 'skip_mod_date', False):
                 data['mod_date'] = datetime.utcnow()
@@ -76,7 +78,8 @@ class Artifact(MappedClass):
     # Artifact base schema
     _id = FieldProperty(S.ObjectId)
     mod_date = FieldProperty(datetime, if_missing=datetime.utcnow)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id)
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: c.app.config._id)
     plugin_verson = FieldProperty(S.Deprecated)
     tool_version = FieldProperty(S.Deprecated)
     acl = FieldProperty(ACL)
@@ -90,7 +93,7 @@ class Artifact(MappedClass):
     # the source, original ID, and any other info needed to identify where
     # the artifact came from.  But if you only have one source, a str might do.
     import_id = FieldProperty(None, if_missing=None)
-    deleted=FieldProperty(bool, if_missing=False)
+    deleted = FieldProperty(bool, if_missing=False)
 
     def __json__(self):
         """Return a JSON-encodable :class:`dict` representation of this
@@ -103,7 +106,8 @@ class Artifact(MappedClass):
             labels=list(self.labels),
             related_artifacts=[a.url() for a in self.related_artifacts()],
             discussion_thread=self.discussion_thread.__json__(),
-            discussion_thread_url=h.absurl('/rest%s' % self.discussion_thread.url()),
+            discussion_thread_url=h.absurl('/rest%s' %
+                                           self.discussion_thread.url()),
         )
 
     def parent_security_context(self):
@@ -158,18 +162,20 @@ class Artifact(MappedClass):
 
         """
         q = ArtifactReference.query.find(dict(references=self.index_id()))
-        return [ aref._id for aref in q ]
+        return [aref._id for aref in q]
 
     def related_artifacts(self):
         """Return all Artifacts that are related to this one.
 
         """
         related_artifacts = []
-        for ref_id in self.refs+self.backrefs:
+        for ref_id in self.refs + self.backrefs:
             ref = ArtifactReference.query.get(_id=ref_id)
-            if ref is None: continue
+            if ref is None:
+                continue
             artifact = ref.artifact
-            if artifact is None: continue
+            if artifact is None:
+                continue
             artifact = artifact.primary()
             # don't link to artifacts in deleted tools
             if hasattr(artifact, 'app_config') and artifact.app_config is None:
@@ -178,7 +184,7 @@ class Artifact(MappedClass):
             # artifact type strings in platform code.
             if artifact.type_s == 'Commit' and not artifact.repo:
                 ac = AppConfig.query.get(
-                        _id=ref.artifact_reference['app_config_id'])
+                    _id=ref.artifact_reference['app_config_id'])
                 app = ac.project.app_instance(ac) if ac else None
                 if app:
                     artifact.set_context(app.repo)
@@ -196,7 +202,8 @@ class Artifact(MappedClass):
 
         """
         from allura.model import Mailbox
-        if user is None: user = c.user
+        if user is None:
+            user = c.user
         Mailbox.subscribe(
             user_id=user._id,
             project_id=self.app_config.project_id,
@@ -214,7 +221,8 @@ class Artifact(MappedClass):
 
         """
         from allura.model import Mailbox
-        if user is None: user = c.user
+        if user is None:
+            user = c.user
         Mailbox.unsubscribe(
             user_id=user._id,
             project_id=self.app_config.project_id,
@@ -237,7 +245,7 @@ class Artifact(MappedClass):
         :param app_config: :class:`allura.model.project.AppConfig` instance
 
         """
-        return cls.query.find({'labels':label, 'app_config_id': app_config._id})
+        return cls.query.find({'labels': label, 'app_config_id': app_config._id})
 
     def email_link(self, subject='artifact'):
         """Return a 'mailto' URL for this Artifact, with optional subject.
@@ -337,7 +345,7 @@ class Artifact(MappedClass):
         Subclasses must implement this.
 
         """
-        raise NotImplementedError, 'url' # pragma no cover
+        raise NotImplementedError, 'url'  # pragma no cover
 
     def shorthand_id(self):
         """How to refer to this artifact within the app instance context.
@@ -347,7 +355,7 @@ class Artifact(MappedClass):
         this should have a strong correlation to the URL.
 
         """
-        return str(self._id) # pragma no cover
+        return str(self._id)  # pragma no cover
 
     def link_text(self):
         """Return the link text to use when a shortlink to this artifact
@@ -394,7 +402,8 @@ class Artifact(MappedClass):
             file_info = [file_info]
         for attach in file_info:
             if hasattr(attach, 'file'):
-                self.attach(attach.filename, attach.file, content_type=attach.type)
+                self.attach(attach.filename, attach.file,
+                            content_type=attach.type)
 
     def attach(self, filename, fp, **kw):
         """Attach a file to this Artifact.
@@ -430,22 +439,23 @@ class Artifact(MappedClass):
 
 
 class Snapshot(Artifact):
+
     """A snapshot of an :class:`Artifact <allura.model.artifact.Artifact>`, used in :class:`VersionedArtifact <allura.model.artifact.VersionedArtifact>`"""
     class __mongometa__:
         session = artifact_orm_session
-        name='artifact_snapshot'
-        unique_indexes = [ ('artifact_class', 'artifact_id', 'version') ]
-        indexes = [ ('artifact_id', 'version') ]
+        name = 'artifact_snapshot'
+        unique_indexes = [('artifact_class', 'artifact_id', 'version')]
+        indexes = [('artifact_id', 'version')]
 
     _id = FieldProperty(S.ObjectId)
     artifact_id = FieldProperty(S.ObjectId)
     artifact_class = FieldProperty(str)
     version = FieldProperty(S.Int, if_missing=0)
     author = FieldProperty(dict(
-            id=S.ObjectId,
-            username=str,
-            display_name=str,
-            logged_ip=str))
+        id=S.ObjectId,
+        username=str,
+        display_name=str,
+        logged_ip=str))
     timestamp = FieldProperty(datetime)
     data = FieldProperty(None)
 
@@ -456,7 +466,7 @@ class Snapshot(Artifact):
             original_index = original.index()
             result.update(original_index)
             result['title'] = '%s (version %d)' % (
-                    h.get_first(original_index, 'title'), self.version)
+                h.get_first(original_index, 'title'), self.version)
         result.update(
             id=self.index_id(),
             version_i=self.version,
@@ -467,7 +477,7 @@ class Snapshot(Artifact):
         return result
 
     def original(self):
-        raise NotImplemented, 'original' # pragma no cover
+        raise NotImplemented, 'original'  # pragma no cover
 
     def shorthand_id(self):
         return '%s#%s' % (self.original().shorthand_id(), self.version)
@@ -482,14 +492,16 @@ class Snapshot(Artifact):
     def __getattr__(self, name):
         return getattr(self.data, name)
 
+
 class VersionedArtifact(Artifact):
+
     """
     An :class:`Artifact <allura.model.artifact.Artifact>` that has versions.
     Associated data like attachments and discussion thread are not versioned.
     """
     class __mongometa__:
         session = artifact_orm_session
-        name='versioned_artifact'
+        name = 'versioned_artifact'
         history_class = Snapshot
 
     version = FieldProperty(S.Int, if_missing=0)
@@ -498,7 +510,8 @@ class VersionedArtifact(Artifact):
         '''Save off a snapshot of the artifact and increment the version #'''
         self.version += 1
         try:
-            ip_address = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
+            ip_address = request.headers.get(
+                'X_FORWARDED_FOR', request.remote_addr)
             ip_address = ip_address.split(',')[0].strip()
         except:
             ip_address = '0.0.0.0'
@@ -523,7 +536,7 @@ class VersionedArtifact(Artifact):
             if self.version > 1:
                 g.statsUpdater.modifiedArtifact(
                     self.type_s, self.mod_date, self.project, c.user)
-            else :
+            else:
                 g.statsUpdater.newArtifact(
                     self.type_s, self.mod_date, self.project, c.user)
         return ss
@@ -544,13 +557,14 @@ class VersionedArtifact(Artifact):
     def revert(self, version):
         ss = self.get_version(version)
         old_version = self.version
-        for k,v in ss.data.iteritems():
+        for k, v in ss.data.iteritems():
             setattr(self, k, v)
         self.version = old_version
 
     def history(self):
         HC = self.__mongometa__.history_class
-        q = HC.query.find(dict(artifact_id=self._id)).sort('version', pymongo.DESCENDING)
+        q = HC.query.find(dict(artifact_id=self._id)).sort(
+            'version', pymongo.DESCENDING)
         return q
 
     @property
@@ -569,6 +583,7 @@ class VersionedArtifact(Artifact):
 
 
 class Message(Artifact):
+
     """
     A message
 
@@ -579,17 +594,17 @@ class Message(Artifact):
 
     class __mongometa__:
         session = artifact_orm_session
-        name='message'
-    type_s='Generic Message'
-
-    _id=FieldProperty(str, if_missing=h.gen_message_id)
-    slug=FieldProperty(str, if_missing=h.nonce)
-    full_slug=FieldProperty(str, if_missing=None)
-    parent_id=FieldProperty(str)
-    app_id=FieldProperty(S.ObjectId, if_missing=lambda:c.app.config._id)
-    timestamp=FieldProperty(datetime, if_missing=datetime.utcnow)
-    author_id=FieldProperty(S.ObjectId, if_missing=lambda:c.user._id)
-    text=FieldProperty(str, if_missing='')
+        name = 'message'
+    type_s = 'Generic Message'
+
+    _id = FieldProperty(str, if_missing=h.gen_message_id)
+    slug = FieldProperty(str, if_missing=h.nonce)
+    full_slug = FieldProperty(str, if_missing=None)
+    parent_id = FieldProperty(str)
+    app_id = FieldProperty(S.ObjectId, if_missing=lambda: c.app.config._id)
+    timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
+    author_id = FieldProperty(S.ObjectId, if_missing=lambda: c.user._id)
+    text = FieldProperty(str, if_missing='')
 
     @classmethod
     def make_slugs(cls, parent=None, timestamp=None):
@@ -622,26 +637,32 @@ class Message(Artifact):
     def shorthand_id(self):
         return self.slug
 
+
 class AwardFile(File):
+
     class __mongometa__:
         session = main_orm_session
         name = 'award_file'
-    award_id=FieldProperty(S.ObjectId)
+    award_id = FieldProperty(S.ObjectId)
+
 
 class Award(Artifact):
+
     class __mongometa__:
         session = main_orm_session
-        name='award'
-        indexes = [ 'short' ]
+        name = 'award'
+        indexes = ['short']
     type_s = 'Generic Award'
 
     from .project import Neighborhood
-    _id=FieldProperty(S.ObjectId)
-    created_by_neighborhood_id = ForeignIdProperty(Neighborhood, if_missing=None)
-    created_by_neighborhood = RelationProperty(Neighborhood, via='created_by_neighborhood_id')
-    short=FieldProperty(str, if_missing=h.nonce)
-    timestamp=FieldProperty(datetime, if_missing=datetime.utcnow)
-    full=FieldProperty(str, if_missing='')
+    _id = FieldProperty(S.ObjectId)
+    created_by_neighborhood_id = ForeignIdProperty(
+        Neighborhood, if_missing=None)
+    created_by_neighborhood = RelationProperty(
+        Neighborhood, via='created_by_neighborhood_id')
+    short = FieldProperty(str, if_missing=h.nonce)
+    timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
+    full = FieldProperty(str, if_missing='')
 
     def index(self):
         result = Artifact.index(self)
@@ -667,22 +688,27 @@ class Award(Artifact):
     def shorthand_id(self):
         return self.short
 
+
 class AwardGrant(Artifact):
+
     "An :class:`Award <allura.model.artifact.Award>` can be bestowed upon a project by a neighborhood"
     class __mongometa__:
         session = main_orm_session
-        name='grant'
-        indexes = [ 'short' ]
+        name = 'grant'
+        indexes = ['short']
     type_s = 'Generic Award Grant'
 
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
     award_id = ForeignIdProperty(Award, if_missing=None)
     award = RelationProperty(Award, via='award_id')
-    granted_by_neighborhood_id = ForeignIdProperty('Neighborhood', if_missing=None)
-    granted_by_neighborhood = RelationProperty('Neighborhood', via='granted_by_neighborhood_id')
+    granted_by_neighborhood_id = ForeignIdProperty(
+        'Neighborhood', if_missing=None)
+    granted_by_neighborhood = RelationProperty(
+        'Neighborhood', via='granted_by_neighborhood_id')
     granted_to_project_id = ForeignIdProperty('Project', if_missing=None)
-    granted_to_project = RelationProperty('Project', via='granted_to_project_id')
-    timestamp=FieldProperty(datetime, if_missing=datetime.utcnow)
+    granted_to_project = RelationProperty(
+        'Project', via='granted_to_project_id')
+    timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
 
     def index(self):
         result = Artifact.index(self)
@@ -700,11 +726,11 @@ class AwardGrant(Artifact):
         return AwardFile.query.get(award_id=self.award_id)
 
     def url(self):
-        slug = str(self.granted_to_project.shortname).replace('/','_')
+        slug = str(self.granted_to_project.shortname).replace('/', '_')
         return h.urlquote(slug)
 
     def longurl(self):
-        slug = str(self.granted_to_project.shortname).replace('/','_')
+        slug = str(self.granted_to_project.shortname).replace('/', '_')
         slug = self.award.longurl() + '/' + slug
         return h.urlquote(slug)
 
@@ -714,7 +740,9 @@ class AwardGrant(Artifact):
         else:
             return None
 
+
 class Feed(MappedClass):
+
     """
     Used to generate rss/atom feeds.  This does not need to be extended;
     all feed items go into the same collection
@@ -730,7 +758,8 @@ class Feed(MappedClass):
             (('project_id', pymongo.ASCENDING),
              ('app_config_id', pymongo.ASCENDING),
              ('pubdate', pymongo.DESCENDING)),
-            'author_link',  # used in ext/user_profile/user_main.py for user feeds
+            # used in ext/user_profile/user_main.py for user feeds
+            'author_link',
         ]
 
     _id = FieldProperty(S.ObjectId)
@@ -738,18 +767,19 @@ class Feed(MappedClass):
     neighborhood_id = ForeignIdProperty('Neighborhood')
     project_id = ForeignIdProperty('Project')
     app_config_id = ForeignIdProperty('AppConfig')
-    tool_name=FieldProperty(str)
-    title=FieldProperty(str)
-    link=FieldProperty(str)
+    tool_name = FieldProperty(str)
+    title = FieldProperty(str)
+    link = FieldProperty(str)
     pubdate = FieldProperty(datetime, if_missing=datetime.utcnow)
     description = FieldProperty(str)
     description_cache = FieldProperty(MarkdownCache)
-    unique_id = FieldProperty(str, if_missing=lambda:h.nonce(40))
-    author_name = FieldProperty(str, if_missing=lambda:c.user.get_pref('display_name') if hasattr(c, 'user') else None)
-    author_link = FieldProperty(str, if_missing=lambda:c.user.url() if hasattr(c, 'user') else None)
+    unique_id = FieldProperty(str, if_missing=lambda: h.nonce(40))
+    author_name = FieldProperty(str, if_missing=lambda: c.user.get_pref(
+        'display_name') if hasattr(c, 'user') else None)
+    author_link = FieldProperty(
+        str, if_missing=lambda: c.user.url() if hasattr(c, 'user') else None)
     artifact_reference = FieldProperty(S.Deprecated)
 
-
     @classmethod
     def post(cls, artifact, title=None, description=None, author=None, author_link=None, author_name=None, pubdate=None, link=None, **kw):
         """
@@ -769,12 +799,14 @@ class Feed(MappedClass):
         if author_name is None:
             author_name = author.get_pref('display_name')
         if title is None:
-            title='%s modified by %s' % (h.get_first(idx, 'title'), author_name)
-        if description is None: description = title
+            title = '%s modified by %s' % (
+                h.get_first(idx, 'title'), author_name)
+        if description is None:
+            description = title
         if pubdate is None:
             pubdate = datetime.utcnow()
         if link is None:
-            link=artifact.url()
+            link = artifact.url()
         item = cls(
             ref_id=artifact.index_id(),
             neighborhood_id=artifact.app_config.project.neighborhood_id,
@@ -796,7 +828,8 @@ class Feed(MappedClass):
     def feed(cls, q, feed_type, title, link, description,
              since=None, until=None, offset=None, limit=None):
         "Produces webhelper.feedgenerator Feed"
-        d = dict(title=title, link=h.absurl(link), description=description, language=u'en')
+        d = dict(title=title, link=h.absurl(link),
+                 description=description, language=u'en')
         if feed_type == 'atom':
             feed = FG.Atom1Feed(**d)
         elif feed_type == 'rss':
@@ -809,9 +842,11 @@ class Feed(MappedClass):
             query['pubdate']['$lte'] = until
         cur = cls.query.find(query)
         cur = cur.sort('pubdate', pymongo.DESCENDING)
-        if limit is None: limit = 10
+        if limit is None:
+            limit = 10
         query = cur.limit(limit)
-        if offset is not None: query = cur.offset(offset)
+        if offset is not None:
+            query = cur.offset(offset)
         for r in cur:
             feed.add_item(title=r.title,
                           link=h.absurl(r.link.encode('utf-8')),
@@ -824,6 +859,7 @@ class Feed(MappedClass):
 
 
 class VotableArtifact(MappedClass):
+
     """Voting support for the Artifact. Use as a mixin."""
 
     class __mongometa__:
@@ -899,11 +935,13 @@ class VotableArtifact(MappedClass):
 
 
 class MovedArtifact(Artifact):
+
     class __mongometa__:
         session = artifact_orm_session
-        name='moved_artifact'
+        name = 'moved_artifact'
 
     _id = FieldProperty(S.ObjectId)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id)
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: c.app.config._id)
     app_config = RelationProperty('AppConfig')
     moved_to_url = FieldProperty(str, required=True, allow_none=False)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/attachments.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/attachments.py b/Allura/allura/model/attachments.py
index 5401359..568f5b0 100644
--- a/Allura/allura/model/attachments.py
+++ b/Allura/allura/model/attachments.py
@@ -24,21 +24,22 @@ from allura.lib import helpers as h
 from .session import project_orm_session
 from .filesystem import File
 
+
 class BaseAttachment(File):
     thumbnail_size = (255, 255)
-    ArtifactType=None
+    ArtifactType = None
 
     class __mongometa__:
         name = 'attachment'
         polymorphic_on = 'attachment_type'
-        polymorphic_identity=None
+        polymorphic_identity = None
         session = project_orm_session
-        indexes = [ 'artifact_id', 'app_config_id' ]
+        indexes = ['artifact_id', 'app_config_id']
 
-    artifact_id=FieldProperty(S.ObjectId)
-    app_config_id=FieldProperty(S.ObjectId)
-    type=FieldProperty(str)
-    attachment_type=FieldProperty(str)
+    artifact_id = FieldProperty(S.ObjectId)
+    app_config_id = FieldProperty(S.ObjectId)
+    type = FieldProperty(str)
+    attachment_type = FieldProperty(str)
 
     @property
     def artifact(self):
@@ -76,7 +77,9 @@ class BaseAttachment(File):
             return orig, thumbnail
         else:
             # No, generic attachment
-            fp.seek(0)  # stream may have been partially consumed in a failed save_image attempt
+            # stream may have been partially consumed in a failed save_image
+            # attempt
+            fp.seek(0)
             return cls.from_stream(
                 filename, fp, content_type=content_type,
                 **original_meta)


[08/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeLink/setup.py
----------------------------------------------------------------------
diff --git a/ForgeLink/setup.py b/ForgeLink/setup.py
index 1379ac2..711d2c1 100644
--- a/ForgeLink/setup.py
+++ b/ForgeLink/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgelink.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeLink',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/controllers.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/controllers.py b/ForgeSVN/forgesvn/controllers.py
index 44d1a69..f4461f0 100644
--- a/ForgeSVN/forgesvn/controllers.py
+++ b/ForgeSVN/forgesvn/controllers.py
@@ -38,9 +38,9 @@ class BranchBrowser(repository.BranchBrowser, FeedController):
         latest = c.app.repo.latest(branch=self._branch)
         if is_empty or not latest:
             return dict(allow_fork=False, log=[], is_empty=is_empty)
-        redirect(c.app.repo.url_for_commit(c.app.default_branch_name) + 'tree/')
+        redirect(c.app.repo.url_for_commit(c.app.default_branch_name)
+                 + 'tree/')
 
     @expose()
     def _lookup(self, rev, *remainder):
         return repository.CommitBrowser(rev), remainder
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/model/svn.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/model/svn.py b/ForgeSVN/forgesvn/model/svn.py
index b55c397..9823f1e 100644
--- a/ForgeSVN/forgesvn/model/svn.py
+++ b/ForgeSVN/forgesvn/model/svn.py
@@ -46,13 +46,15 @@ from allura.model.repository import zipdir
 
 log = logging.getLogger(__name__)
 
+
 class Repository(M.Repository):
-    tool_name='SVN'
-    repo_id='svn'
-    type_s='SVN Repository'
+    tool_name = 'SVN'
+    repo_id = 'svn'
+    type_s = 'SVN Repository'
+
     class __mongometa__:
-        name='svn-repository'
-    branches = FieldProperty([dict(name=str,object_id=str)])
+        name = 'svn-repository'
+    branches = FieldProperty([dict(name=str, object_id=str)])
     _refresh_precompute = False
 
     @LazyProperty
@@ -68,13 +70,17 @@ class Repository(M.Repository):
         tpl = string.Template(tg.config.get('scm.clone.%s.%s' % (category, self.tool)) or
                               tg.config.get('scm.clone.%s' % self.tool))
         return tpl.substitute(dict(username=username,
-                                   source_url=self.clone_url(category, username)+c.app.config.options.get('checkout_url'),
+                                   source_url=self.clone_url(
+                                       category, username) + c.app.config.options.get(
+                                       'checkout_url'),
                                    dest_path=self.suggested_clone_dest_path()))
 
-    def compute_diffs(self): return
+    def compute_diffs(self):
+        return
 
     def latest(self, branch=None):
-        if self._impl is None: return None
+        if self._impl is None:
+            return None
         return self._impl.commit('HEAD')
 
     def tarball_filename(self, revision, path=None):
@@ -88,6 +94,7 @@ class Repository(M.Repository):
 
 
 class SVNCalledProcessError(Exception):
+
     def __init__(self, cmd, returncode, stdout, stderr):
         self.cmd = cmd
         self.returncode = returncode
@@ -113,7 +120,9 @@ def svn_path_exists(path, rev=None):
 
 
 class SVNLibWrapper(object):
+
     """Wrapper around pysvn, used for instrumentation."""
+
     def __init__(self, client):
         self.client = client
 
@@ -185,10 +194,10 @@ class SVNImplementation(M.RepositoryImplementation):
         if os.path.exists(fullname):
             shutil.rmtree(fullname)
         subprocess.call(['svnadmin', 'create', self._repo.name],
-                                 stdin=subprocess.PIPE,
-                                 stdout=subprocess.PIPE,
-                                 stderr=subprocess.PIPE,
-                                 cwd=self._repo.fs_path)
+                        stdin=subprocess.PIPE,
+                        stdout=subprocess.PIPE,
+                        stderr=subprocess.PIPE,
+                        cwd=self._repo.fs_path)
         if not skip_special_files:
             self._setup_special_files()
         self._repo.set_status('ready')
@@ -197,17 +206,18 @@ class SVNImplementation(M.RepositoryImplementation):
             tmp_working_dir = tempfile.mkdtemp(prefix='allura-svn-r1-',
                                                dir=tg.config.get('scm.svn.tmpdir', g.tmpdir))
             log.info('tmp dir = %s', tmp_working_dir)
-            self._repo._impl._svn.checkout('file://'+fullname, tmp_working_dir)
-            os.mkdir(tmp_working_dir+'/trunk')
-            os.mkdir(tmp_working_dir+'/tags')
-            os.mkdir(tmp_working_dir+'/branches')
-            self._repo._impl._svn.add(tmp_working_dir+'/trunk')
-            self._repo._impl._svn.add(tmp_working_dir+'/tags')
-            self._repo._impl._svn.add(tmp_working_dir+'/branches')
-            self._repo._impl._svn.checkin([tmp_working_dir+'/trunk',
-                                           tmp_working_dir+'/tags',
-                                           tmp_working_dir+'/branches'],
-                                        'Initial commit')
+            self._repo._impl._svn.checkout(
+                'file://' + fullname, tmp_working_dir)
+            os.mkdir(tmp_working_dir + '/trunk')
+            os.mkdir(tmp_working_dir + '/tags')
+            os.mkdir(tmp_working_dir + '/branches')
+            self._repo._impl._svn.add(tmp_working_dir + '/trunk')
+            self._repo._impl._svn.add(tmp_working_dir + '/tags')
+            self._repo._impl._svn.add(tmp_working_dir + '/branches')
+            self._repo._impl._svn.checkin([tmp_working_dir + '/trunk',
+                                           tmp_working_dir + '/tags',
+                                           tmp_working_dir + '/branches'],
+                                          'Initial commit')
             shutil.rmtree(tmp_working_dir)
             log.info('deleted %s', tmp_working_dir)
 
@@ -261,13 +271,15 @@ class SVNImplementation(M.RepositoryImplementation):
             # make sure new repo has a pre-revprop-change hook,
             # otherwise the sync will fail
             set_hook('pre-revprop-change')
-            self.check_call(['svnsync', '--non-interactive', '--allow-non-empty',
-              'initialize', self._url, source_url])
+            self.check_call(
+                ['svnsync', '--non-interactive', '--allow-non-empty',
+                 'initialize', self._url, source_url])
             clear_hook('pre-revprop-change')
         else:
             set_hook('pre-revprop-change')
             self.check_call(['svnsync', 'init', self._url, source_url])
-            self.check_call(['svnsync', '--non-interactive', 'sync', self._url])
+            self.check_call(
+                ['svnsync', '--non-interactive', 'sync', self._url])
             clear_hook('pre-revprop-change')
 
         log.info('... %r cloned', self._repo)
@@ -287,12 +299,13 @@ class SVNImplementation(M.RepositoryImplementation):
         """
         opts = self._repo.app.config.options
         if not svn_path_exists('file://{0}{1}/{2}'.format(self._repo.fs_path,
-                self._repo.name, opts['checkout_url'])):
+                                                          self._repo.name, opts['checkout_url'])):
             opts['checkout_url'] = ''
 
         if (not opts['checkout_url'] and
-                svn_path_exists('file://{0}{1}/trunk'.format(self._repo.fs_path,
-                    self._repo.name))):
+                svn_path_exists(
+                    'file://{0}{1}/trunk'.format(self._repo.fs_path,
+                                                 self._repo.name))):
             opts['checkout_url'] = 'trunk'
 
     def commit(self, rev):
@@ -319,7 +332,7 @@ class SVNImplementation(M.RepositoryImplementation):
 
     def new_commits(self, all_commits=False):
         head_revno = self.head
-        oids = [ self._oid(revno) for revno in range(1, head_revno+1) ]
+        oids = [self._oid(revno) for revno in range(1, head_revno + 1)]
         if all_commits:
             return oids
         # Find max commit id -- everything greater than that will be "unknown"
@@ -327,22 +340,24 @@ class SVNImplementation(M.RepositoryImplementation):
         q = M.repo.Commit.query.find(
             dict(
                 type='commit',
-                _id={'$gt':prefix},
-                ),
+                _id={'$gt': prefix},
+            ),
             dict(_id=True)
-            )
+        )
         seen_oids = set()
         for d in q.ming_cursor.cursor:
             oid = d['_id']
-            if not oid.startswith(prefix): break
+            if not oid.startswith(prefix):
+                break
             seen_oids.add(oid)
         return [
-            oid for oid in oids if oid not in seen_oids ]
+            oid for oid in oids if oid not in seen_oids]
 
     def refresh_commit_info(self, oid, seen_object_ids, lazy=True):
         from allura.model.repo import CommitDoc, DiffInfoDoc
         ci_doc = CommitDoc.m.get(_id=oid)
-        if ci_doc and lazy: return False
+        if ci_doc and lazy:
+            return False
         revno = self._revno(oid)
         rev = self._revision(oid)
         try:
@@ -352,7 +367,8 @@ class SVNImplementation(M.RepositoryImplementation):
                 limit=1,
                 discover_changed_paths=True)[0]
         except pysvn.ClientError:
-            log.info('ClientError processing %r %r, treating as empty', oid, self._repo, exc_info=True)
+            log.info('ClientError processing %r %r, treating as empty',
+                     oid, self._repo, exc_info=True)
             log_entry = Object(date='', message='', changed_paths=[])
         log_date = None
         if hasattr(log_entry, 'date'):
@@ -360,7 +376,7 @@ class SVNImplementation(M.RepositoryImplementation):
         user = Object(
             name=h.really_unicode(log_entry.get('author', '--none--')),
             email='',
-           date=log_date)
+            date=log_date)
         args = dict(
             tree_id=None,
             committed=user,
@@ -369,7 +385,7 @@ class SVNImplementation(M.RepositoryImplementation):
             parent_ids=[],
             child_ids=[])
         if revno > 1:
-            args['parent_ids'] = [ self._oid(revno-1) ]
+            args['parent_ids'] = [self._oid(revno - 1)]
         if ci_doc:
             ci_doc.update(**args)
             ci_doc.m.save()
@@ -378,7 +394,8 @@ class SVNImplementation(M.RepositoryImplementation):
             try:
                 ci_doc.m.insert(safe=True)
             except DuplicateKeyError:
-                if lazy: return False
+                if lazy:
+                    return False
         # Save diff info
         di = DiffInfoDoc.make(dict(_id=ci_doc._id, differences=[]))
         for path in log_entry.changed_paths:
@@ -417,21 +434,22 @@ class SVNImplementation(M.RepositoryImplementation):
             else:
                 lhs_id = None
             di.differences.append(dict(
-                    name=h.really_unicode(path.path),
-                    lhs_id=lhs_id,
-                    rhs_id=rhs_id))
+                name=h.really_unicode(path.path),
+                lhs_id=lhs_id,
+                rhs_id=rhs_id))
         di.m.save()
         return True
 
     def compute_tree_new(self, commit, tree_path='/'):
         from allura.model import repo as RM
-        tree_path = '/' + tree_path.strip('/')  # always leading slash, never trailing
+        # always leading slash, never trailing
+        tree_path = '/' + tree_path.strip('/')
         tree_id = self._tree_oid(commit._id, tree_path)
         tree = RM.Tree.query.get(_id=tree_id)
         if tree:
             return tree_id
         log.debug('Computing tree for %s: %s',
-                 self._revno(commit._id), tree_path)
+                  self._revno(commit._id), tree_path)
         rev = self._revision(commit._id)
         try:
             infos = self._svn.info2(
@@ -449,34 +467,35 @@ class SVNImplementation(M.RepositoryImplementation):
         for path, info in infos[1:]:
             if info.kind == pysvn.node_kind.dir:
                 tree_ids.append(Object(
-                        id=self._tree_oid(commit._id, path),
-                        name=path))
+                    id=self._tree_oid(commit._id, path),
+                    name=path))
             elif info.kind == pysvn.node_kind.file:
                 blob_ids.append(Object(
-                        id=self._tree_oid(commit._id, path),
-                        name=path))
+                    id=self._tree_oid(commit._id, path),
+                    name=path))
             else:
                 assert False
             lcd_entries.append(dict(
-                    name=path,
-                    commit_id=self._oid(info.last_changed_rev.number),
-                ))
+                name=path,
+                commit_id=self._oid(info.last_changed_rev.number),
+            ))
         tree, is_new = RM.Tree.upsert(tree_id,
-                tree_ids=tree_ids,
-                blob_ids=blob_ids,
-                other_ids=[],
-            )
+                                      tree_ids=tree_ids,
+                                      blob_ids=blob_ids,
+                                      other_ids=[],
+                                      )
         if is_new:
             commit_id = self._oid(infos[0][1].last_changed_rev.number)
             path = tree_path.strip('/')
             RM.TreesDoc.m.update_partial(
-                    {'_id': commit._id},
-                    {'$addToSet': {'tree_ids': tree_id}},
-                    upsert=True)
+                {'_id': commit._id},
+                {'$addToSet': {'tree_ids': tree_id}},
+                upsert=True)
             RM.LastCommitDoc.m.update_partial(
-                    {'commit_id': commit_id, 'path': path},
-                    {'commit_id': commit_id, 'path': path, 'entries': lcd_entries},
-                    upsert=True)
+                {'commit_id': commit_id, 'path': path},
+                {'commit_id': commit_id, 'path':
+                 path, 'entries': lcd_entries},
+                upsert=True)
         return tree_id
 
     def _tree_oid(self, commit_id, path):
@@ -533,7 +552,8 @@ class SVNImplementation(M.RepositoryImplementation):
         while revno > exclude:
             rev = pysvn.Revision(pysvn.opt_revision_kind.number, revno)
             try:
-                logs = self._svn.log(url, revision_start=rev, peg_revision=rev, limit=page_size,
+                logs = self._svn.log(
+                    url, revision_start=rev, peg_revision=rev, limit=page_size,
                     discover_changed_paths=True)
             except pysvn.ClientError as e:
                 if 'Unable to connect' in e.message:
@@ -548,16 +568,18 @@ class SVNImplementation(M.RepositoryImplementation):
                 else:
                     yield self._map_log(ci, url, path)
             if len(logs) < page_size:
-                return  # we didn't get a full page, don't bother calling SVN again
+                # we didn't get a full page, don't bother calling SVN again
+                return
             revno = ci.revision.number - 1
 
     def _check_changed_path(self, changed_path, path):
         if (changed_path['copyfrom_path'] and
-                    changed_path['path'] and
-                    path and
-                    (len(changed_path['path']) < len(path)) and
-                    path.startswith(changed_path['path'])):
-                changed_path['copyfrom_path'] = changed_path['copyfrom_path'] + path[len(changed_path['path']):]
+                changed_path['path'] and
+                path and
+                (len(changed_path['path']) < len(path)) and
+                path.startswith(changed_path['path'])):
+                changed_path['copyfrom_path'] = changed_path['copyfrom_path'] + \
+                    path[len(changed_path['path']):]
                 changed_path['path'] = path
         return changed_path
 
@@ -565,7 +587,8 @@ class SVNImplementation(M.RepositoryImplementation):
         revno = ci.revision.number
         rev = pysvn.Revision(pysvn.opt_revision_kind.number, revno)
         try:
-            size = int(self._svn.list(url, revision=rev, peg_revision=rev)[0][0].size)
+            size = int(
+                self._svn.list(url, revision=rev, peg_revision=rev)[0][0].size)
         except pysvn.ClientError:
             size = None
         rename_details = {}
@@ -579,23 +602,23 @@ class SVNImplementation(M.RepositoryImplementation):
                 )
                 break
         return {
-                'id': revno,
-                'message': h.really_unicode(ci.get('message', '--none--')),
-                'authored': {
-                        'name': h.really_unicode(ci.get('author', '--none--')),
-                        'email': '',
-                        'date': datetime.utcfromtimestamp(ci.date),
-                    },
-                'committed': {
-                        'name': h.really_unicode(ci.get('author', '--none--')),
-                        'email': '',
-                        'date': datetime.utcfromtimestamp(ci.date),
-                    },
-                'refs': ['HEAD'] if revno == self.head else [],
-                'parents': [revno-1] if revno > 1 else [],
-                'size': size,
-                'rename_details': rename_details,
-            }
+            'id': revno,
+            'message': h.really_unicode(ci.get('message', '--none--')),
+            'authored': {
+                'name': h.really_unicode(ci.get('author', '--none--')),
+                'email': '',
+                'date': datetime.utcfromtimestamp(ci.date),
+            },
+            'committed': {
+                'name': h.really_unicode(ci.get('author', '--none--')),
+                'email': '',
+                'date': datetime.utcfromtimestamp(ci.date),
+            },
+            'refs': ['HEAD'] if revno == self.head else [],
+            'parents': [revno - 1] if revno > 1 else [],
+            'size': size,
+            'rename_details': rename_details,
+        }
 
     def open_blob(self, blob):
         data = self._svn.cat(
@@ -607,18 +630,21 @@ class SVNImplementation(M.RepositoryImplementation):
         try:
             rev = self._revision(blob.commit._id)
             data = self._svn.list(
-                   self._url + blob.path(),
-                   revision=rev,
-                   peg_revision=rev,
-                   dirent_fields=pysvn.SVN_DIRENT_SIZE)
+                self._url + blob.path(),
+                revision=rev,
+                peg_revision=rev,
+                dirent_fields=pysvn.SVN_DIRENT_SIZE)
         except pysvn.ClientError:
-            log.info('ClientError getting filesize %r %r, returning 0', blob.path(), self._repo, exc_info=True)
+            log.info('ClientError getting filesize %r %r, returning 0',
+                     blob.path(), self._repo, exc_info=True)
             return 0
 
         try:
             size = data[0][0]['size']
         except (IndexError, KeyError):
-            log.info('Error getting filesize: bad data from svn client %r %r, returning 0', blob.path(), self._repo, exc_info=True)
+            log.info(
+                'Error getting filesize: bad data from svn client %r %r, returning 0',
+                blob.path(), self._repo, exc_info=True)
             size = 0
 
         return size
@@ -629,7 +655,8 @@ class SVNImplementation(M.RepositoryImplementation):
         # the hook should also call the user-defined post-commit-user hook
         text = self.post_receive_template.substitute(
             url=self._repo.refresh_url())
-        fn = os.path.join(self._repo.fs_path, self._repo.name, 'hooks', 'post-commit')
+        fn = os.path.join(self._repo.fs_path, self._repo.name,
+                          'hooks', 'post-commit')
         with open(fn, 'wb') as fp:
             fp.write(text)
         os.chmod(fn, 0755)
@@ -661,7 +688,8 @@ class SVNImplementation(M.RepositoryImplementation):
         if len(paths) == 1:
             tree_path = '/' + os.path.dirname(paths[0].strip('/'))
         else:
-            tree_path = '/' + os.path.commonprefix(paths).strip('/')  # always leading slash, never trailing
+            # always leading slash, never trailing
+            tree_path = '/' + os.path.commonprefix(paths).strip('/')
         paths = [path.strip('/') for path in paths]
         rev = self._revision(commit._id)
         try:
@@ -689,7 +717,8 @@ class SVNImplementation(M.RepositoryImplementation):
                 limit=1,
                 discover_changed_paths=True)[0]
         except pysvn.ClientError:
-            log.info('ClientError processing %r %r, treating as empty', oid, self._repo, exc_info=True)
+            log.info('ClientError processing %r %r, treating as empty',
+                     oid, self._repo, exc_info=True)
             log_entry = Object(date='', message='', changed_paths=[])
         return [p.path for p in log_entry.changed_paths]
 
@@ -702,7 +731,8 @@ class SVNImplementation(M.RepositoryImplementation):
                 idx = path.index('tags')
             elif 'branches' in path:
                 idx = path.index('branches')
-            if idx is not None and idx < len(path) - 1:  # e.g. path/tags/tag-1.0/...
+            # e.g. path/tags/tag-1.0/...
+            if idx is not None and idx < len(path) - 1:
                 return '/'.join(path[:idx + 2])  # path/tags/tag-1.0
             if 'trunk' in path:
                 idx = path.index('trunk')
@@ -720,8 +750,10 @@ class SVNImplementation(M.RepositoryImplementation):
             os.makedirs(self._repo.tarball_path)
         archive_name = self._repo.tarball_filename(commit, path)
         dest = os.path.join(self._repo.tarball_path, archive_name)
-        filename = os.path.join(self._repo.tarball_path, '%s%s' % (archive_name, '.zip'))
-        tmpfilename = os.path.join(self._repo.tarball_path, '%s%s' % (archive_name, '.tmp'))
+        filename = os.path.join(self._repo.tarball_path, '%s%s' %
+                                (archive_name, '.zip'))
+        tmpfilename = os.path.join(self._repo.tarball_path, '%s%s' %
+                                   (archive_name, '.tmp'))
         rmtree(dest, ignore_errors=True)
         path = os.path.join(self._url, path)
         try:
@@ -730,7 +762,8 @@ class SVNImplementation(M.RepositoryImplementation):
             locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
             self._svn.export(path,
                              dest,
-                             revision=pysvn.Revision(pysvn.opt_revision_kind.number, commit),
+                             revision=pysvn.Revision(
+                                 pysvn.opt_revision_kind.number, commit),
                              ignore_externals=True)
             zipdir(dest, tmpfilename)
             os.rename(tmpfilename, filename)
@@ -744,9 +777,11 @@ class SVNImplementation(M.RepositoryImplementation):
 
     def is_file(self, path, rev=None):
         url = '/'.join([self._url, path.strip('/')])
-        rev = pysvn.Revision(pysvn.opt_revision_kind.number, self._revno(self.rev_parse(rev)))
+        rev = pysvn.Revision(pysvn.opt_revision_kind.number,
+                             self._revno(self.rev_parse(rev)))
         try:
-            info = self._svn.list(url, revision=rev, peg_revision=rev, dirent_fields=pysvn.SVN_DIRENT_KIND)[0][0]
+            info = self._svn.list(
+                url, revision=rev, peg_revision=rev, dirent_fields=pysvn.SVN_DIRENT_KIND)[0][0]
             return info.kind == pysvn.node_kind.file
         except pysvn.ClientError:
             return False

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/svn_main.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/svn_main.py b/ForgeSVN/forgesvn/svn_main.py
index bee61b3..dd72214 100644
--- a/ForgeSVN/forgesvn/svn_main.py
+++ b/ForgeSVN/forgesvn/svn_main.py
@@ -45,23 +45,25 @@ from .model.svn import svn_path_exists
 
 log = logging.getLogger(__name__)
 
+
 class ForgeSVNApp(RepositoryApp):
+
     '''This is the SVN app for PyForge'''
     __version__ = version.__version__
     config_options = RepositoryApp.config_options + [
         ConfigOption('checkout_url', str, '')
-        ]
+    ]
     permissions_desc = dict(RepositoryApp.permissions_desc, **{
         'write': 'Repo commit access.',
         'admin': 'Set permissions, checkout url, and viewable files. Import a remote repo.',
     })
-    tool_label='SVN'
-    tool_description="""
+    tool_label = 'SVN'
+    tool_description = """
         Enterprise-class centralized version control for the masses.
     """
-    ordinal=4
-    forkable=False
-    default_branch_name='HEAD'
+    ordinal = 4
+    forkable = False
+    default_branch_name = 'HEAD'
 
     def __init__(self, project, config):
         super(ForgeSVNApp, self).__init__(project, config)
@@ -99,16 +101,20 @@ class ForgeSVNApp(RepositoryApp):
     def admin_menu(self):
         links = []
         links.append(SitemapEntry(
-                'Checkout URL',
-                c.project.url()+'admin/'+self.config.options.mount_point+'/' + 'checkout_url',
-                className='admin_modal'))
+            'Checkout URL',
+            c.project.url() + 'admin/' +
+            self.config.options.mount_point +
+            '/' + 'checkout_url',
+            className='admin_modal'))
         links.append(SitemapEntry(
-                'Import Repo',
-                c.project.url()+'admin/'+self.config.options.mount_point+'/' + 'importer/'))
+            'Import Repo',
+            c.project.url() + 'admin/' + self.config.options.mount_point + '/' + 'importer/'))
         links += super(ForgeSVNApp, self).admin_menu()
         return links
 
+
 class SVNRepoAdminController(RepoAdminController):
+
     def __init__(self, app):
         super(SVNRepoAdminController, self).__init__(app)
         self.importer = SVNImportController(self.app)
@@ -131,11 +137,12 @@ class SVNRepoAdminController(RepoAdminController):
             self.app.config.options['checkout_url'] = post_data['checkout_url']
             flash("Checkout URL successfully changed")
         else:
-            flash("%s is not a valid path for this repository" % post_data['checkout_url'], "error")
+            flash("%s is not a valid path for this repository" %
+                  post_data['checkout_url'], "error")
 
 
 class SVNImportController(BaseController):
-    import_form=widgets.ImportForm()
+    import_form = widgets.ImportForm()
 
     def __init__(self, app):
         self.app = app
@@ -154,8 +161,8 @@ class SVNImportController(BaseController):
     def do_import(self, checkout_url=None, **kwargs):
         if self.app.repo.is_empty():
             with h.push_context(
-                self.app.config.project_id,
-                app_config_id=self.app.config._id):
+                    self.app.config.project_id,
+                    app_config_id=self.app.config._id):
                 allura.tasks.repo_tasks.reclone.post(
                     cloned_from_path=None,
                     cloned_from_name=None,
@@ -172,8 +179,9 @@ class SVNImportController(BaseController):
 
 
 def svn_timers():
-    return Timer('svn_lib.{method_name}', SM.svn.SVNLibWrapper, 'checkout', 'add',
-                 'checkin', 'info2', 'log', 'cat', 'list')
+    return Timer(
+        'svn_lib.{method_name}', SM.svn.SVNLibWrapper, 'checkout', 'add',
+        'checkin', 'info2', 'log', 'cat', 'list')
 
 
 def forgesvn_timers():

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/__init__.py b/ForgeSVN/forgesvn/tests/__init__.py
index 8ee62fd..d152a9e 100644
--- a/ForgeSVN/forgesvn/tests/__init__.py
+++ b/ForgeSVN/forgesvn/tests/__init__.py
@@ -18,7 +18,7 @@
 #       under the License.
 
 
-## Make our own SVN tool test decorator
+# Make our own SVN tool test decorator
 from allura.tests.decorators import with_tool
 
 with_svn = with_tool('test', 'SVN', 'src', 'SVN')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/functional/test_auth.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/functional/test_auth.py b/ForgeSVN/forgesvn/tests/functional/test_auth.py
index 178c8f0..7394c1b 100644
--- a/ForgeSVN/forgesvn/tests/functional/test_auth.py
+++ b/ForgeSVN/forgesvn/tests/functional/test_auth.py
@@ -23,6 +23,7 @@ from datadiff.tools import assert_equal
 from allura.tests import TestController
 from forgesvn.tests import with_svn
 
+
 class TestSVNAuth(TestController):
 
     @with_svn
@@ -40,7 +41,9 @@ class TestSVNAuth(TestController):
         assert_equal(r.body, 'Cannot find repo at /p/test/blah')
 
         r = self.app.get('/auth/refresh_repo/p/test/src/')
-        assert_equal(r.body, '<Repository /tmp/svn/p/test/src> refresh queued.\n')
+        assert_equal(r.body,
+                     '<Repository /tmp/svn/p/test/src> refresh queued.\n')
+
 
 class TestSVNUserPermissions(TestController):
     allow = dict(allow_read=True, allow_write=True, allow_create=True)
@@ -49,7 +52,8 @@ class TestSVNUserPermissions(TestController):
 
     @with_svn
     def test_list_repos(self):
-        r = self.app.get('/auth/repo_permissions', params=dict(username='test-admin'), status=200)
+        r = self.app.get('/auth/repo_permissions',
+                         params=dict(username='test-admin'), status=200)
         assert_equal(json.loads(r.body), {"allow_write": [
             '/svn/test/src',
         ]})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/functional/test_controllers.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/functional/test_controllers.py b/ForgeSVN/forgesvn/tests/functional/test_controllers.py
index 2cc0b9c..ca02196 100644
--- a/ForgeSVN/forgesvn/tests/functional/test_controllers.py
+++ b/ForgeSVN/forgesvn/tests/functional/test_controllers.py
@@ -33,7 +33,9 @@ from alluratest.controller import TestController
 from forgesvn.tests import with_svn
 from allura.tests.decorators import with_tool
 
+
 class SVNTestController(TestController):
+
     def setUp(self):
         TestController.setUp(self)
         self.setup_with_tools()
@@ -95,7 +97,7 @@ class TestRootController(SVNTestController):
 
     def test_commit_browser_data(self):
         resp = self.app.get('/src/commit_browser_data')
-        data = json.loads(resp.body);
+        data = json.loads(resp.body)
         assert data['max_row'] == 5
         assert data['next_column'] == 1
         for val in data['built_tree'].values():
@@ -112,7 +114,8 @@ class TestRootController(SVNTestController):
             title = channel.find('title').text
             assert_equal(title, 'test SVN changes')
             description = channel.find('description').text
-            assert_equal(description, 'Recent changes to SVN repository in test project')
+            assert_equal(description,
+                         'Recent changes to SVN repository in test project')
             link = channel.find('link').text
             assert_equal(link, 'http://localhost/p/test/src/')
             commit = channel.find('item')
@@ -144,8 +147,10 @@ class TestRootController(SVNTestController):
 
     def test_file(self):
         resp = self.app.get('/src/1/tree/README')
-        assert 'README' in resp.html.find('h2', {'class':'dark title'}).contents[2]
-        content = str(resp.html.find('div', {'class':'clip grid-19 codebrowser'}))
+        assert 'README' in resp.html.find(
+            'h2', {'class': 'dark title'}).contents[2]
+        content = str(
+            resp.html.find('div', {'class': 'clip grid-19 codebrowser'}))
         assert 'This is readme' in content, content
         assert '<span id="l1" class="code_block">' in resp
         assert 'var hash = window.location.hash.substring(1);' in resp
@@ -233,30 +238,37 @@ class TestRootController(SVNTestController):
         form = r.html.find('form', 'tarball')
         assert_equal(form.button.text, 'Download Snapshot')
         assert_equal(form.get('action'), '/p/test/svn-tags/19/tarball')
-        assert_equal(form.find('input', attrs=dict(name='path')).get('value'), '/tags/tag-1.0')
+        assert_equal(
+            form.find('input', attrs=dict(name='path')).get('value'), '/tags/tag-1.0')
 
-        r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0')
+        r = self.app.get(
+            '/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0')
         assert_equal(r.json['status'], None)
-        r = self.app.post('/p/test/svn-tags/19/tarball', dict(path='/tags/tag-1.0')).follow()
+        r = self.app.post('/p/test/svn-tags/19/tarball',
+                          dict(path='/tags/tag-1.0')).follow()
         assert 'Generating snapshot...' in r
         M.MonQTask.run_ready()
-        r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0')
+        r = self.app.get(
+            '/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0')
         assert_equal(r.json['status'], 'complete')
 
         r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/trunk')
         assert_equal(r.json['status'], None)
-        r = self.app.post('/p/test/svn-tags/19/tarball', dict(path='/trunk/')).follow()
+        r = self.app.post('/p/test/svn-tags/19/tarball',
+                          dict(path='/trunk/')).follow()
         assert 'Generating snapshot...' in r
         M.MonQTask.run_ready()
         r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/trunk')
         assert_equal(r.json['status'], 'complete')
 
-        r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/branches/aaa/')
+        r = self.app.get(
+            '/p/test/svn-tags/19/tarball_status?path=/branches/aaa/')
         assert_equal(r.json['status'], None)
 
         # All of the following also should be ready because...
         # ...this is essentially the same as trunk snapshot
-        r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/trunk/some/path/')
+        r = self.app.get(
+            '/p/test/svn-tags/19/tarball_status?path=/trunk/some/path/')
         assert_equal(r.json['status'], 'complete')
         r = self.app.get('/p/test/svn-tags/19/tarball_status')
         assert_equal(r.json['status'], 'complete')
@@ -267,7 +279,8 @@ class TestRootController(SVNTestController):
         r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/branches/')
         assert_equal(r.json['status'], 'complete')
         # ...this is essentially the same as tag snapshot
-        r = self.app.get('/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0/dir')
+        r = self.app.get(
+            '/p/test/svn-tags/19/tarball_status?path=/tags/tag-1.0/dir')
         assert_equal(r.json['status'], 'complete')
 
 
@@ -297,6 +310,7 @@ class TestImportController(SVNTestController):
 
 
 class SVNTestRenames(TestController):
+
     def setUp(self):
         TestController.setUp(self)
         self.setup_with_tools()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/model/test_repository.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/model/test_repository.py b/ForgeSVN/forgesvn/tests/model/test_repository.py
index 9990bdb..babafb3 100644
--- a/ForgeSVN/forgesvn/tests/model/test_repository.py
+++ b/ForgeSVN/forgesvn/tests/model/test_repository.py
@@ -47,6 +47,7 @@ from forgesvn.model.svn import svn_path_exists
 from forgesvn.tests import with_svn
 from allura.tests.decorators import with_tool
 
+
 class TestNewRepo(unittest.TestCase):
 
     def setUp(self):
@@ -62,9 +63,9 @@ class TestNewRepo(unittest.TestCase):
         self.repo = SM.Repository(
             name='testsvn',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.repo.refresh()
         self.rev = self.repo.commit('HEAD')
         ThreadLocalORMSession.flush_all()
@@ -96,7 +97,8 @@ class TestNewRepo(unittest.TestCase):
         self.rev.tree.by_name['README']
         assert self.rev.tree.is_blob('README') == True
         assert self.rev.tree['a']['b']['c'].ls() == []
-        self.assertRaises(KeyError, lambda:self.rev.tree['a']['b']['d'])
+        self.assertRaises(KeyError, lambda: self.rev.tree['a']['b']['d'])
+
 
 class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
 
@@ -114,16 +116,16 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
         self.repo = SM.Repository(
             name='testsvn',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.repo.refresh()
         self.svn_tags = SM.Repository(
             name='testsvn-trunk-tags-branches',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.svn_tags.refresh()
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
@@ -131,10 +133,10 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
     def test_init(self):
         repo = SM.Repository(
             name='testsvn',
-            fs_path=g.tmpdir+'/',
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            fs_path=g.tmpdir + '/',
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         dirname = os.path.join(repo.fs_path, repo.name)
         if os.path.exists(dirname):
             shutil.rmtree(dirname)
@@ -144,10 +146,10 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
     def test_fork(self):
         repo = SM.Repository(
             name='testsvn',
-            fs_path=g.tmpdir+'/',
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            fs_path=g.tmpdir + '/',
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         repo_path = pkg_resources.resource_filename(
             'forgesvn', 'tests/data/testsvn')
         dirname = os.path.join(repo.fs_path, repo.name)
@@ -155,12 +157,16 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
             shutil.rmtree(dirname)
         repo.init()
         repo._impl.clone_from('file://' + repo_path)
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
-        assert os.path.exists(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
-        assert os.access(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
+        assert os.path.exists(
+            os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
+        assert os.access(
+            os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
         with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
             c = f.read()
-        self.assertIn('curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
+        self.assertIn(
+            'curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
         self.assertIn('exec $DIR/post-commit-user "$@"\n', c)
 
         repo.refresh(notify=False)
@@ -182,8 +188,8 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
             stdout = combo[2]
             obj.check_call.return_value = stdout, ''
             expected = (source_url.startswith('file://') and
-                    tg.config['scm.svn.hotcopy'] and
-                    stdout != 'version 1.6')
+                        tg.config['scm.svn.hotcopy'] and
+                        stdout != 'version 1.6')
             result = func(obj, source_url)
             assert result == expected
 
@@ -191,10 +197,10 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
     def test_clone(self, post_event):
         repo = SM.Repository(
             name='testsvn',
-            fs_path=g.tmpdir+'/',
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            fs_path=g.tmpdir + '/',
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         repo_path = pkg_resources.resource_filename(
             'forgesvn', 'tests/data/testsvn')
         dirname = os.path.join(repo.fs_path, repo.name)
@@ -202,12 +208,16 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
             shutil.rmtree(dirname)
         repo.init()
         repo._impl.clone_from('file://' + repo_path)
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
-        assert os.path.exists(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
-        assert os.access(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
+        assert os.path.exists(
+            os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
+        assert os.access(
+            os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
         with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
             c = f.read()
-        self.assertIn('curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
+        self.assertIn(
+            'curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
         self.assertIn('exec $DIR/post-commit-user "$@"\n', c)
 
         repo.refresh(notify=False)
@@ -277,9 +287,9 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
              'rename_details': {},
              'id': 3,
              'authored':
-                 {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
-                  'name': u'rick446',
-                  'email': ''},
+             {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
+              'name': u'rick446',
+              'email': ''},
              'size': 0},
             {'parents': [1],
              'refs': [],
@@ -337,7 +347,7 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
              'refs': [],
              'size': 15,
              'rename_details': {}},
-            ])
+        ])
 
     def test_is_file(self):
         assert self.repo.is_file('/README')
@@ -348,9 +358,9 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
         self.assertEqual(entry.diffs, entry.paged_diffs())
         self.assertEqual(entry.diffs, entry.paged_diffs(start=0))
         added_expected = entry.diffs.added[1:3]
-        expected =  dict(
-                copied=[], changed=[], removed=[],
-                added=added_expected, total=4)
+        expected = dict(
+            copied=[], changed=[], removed=[],
+            added=added_expected, total=4)
         actual = entry.paged_diffs(start=1, end=3)
         self.assertEqual(expected, actual)
 
@@ -411,20 +421,26 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
         with mock.patch('forgesvn.model.svn.pysvn') as pysvn:
             svn_path_exists('dummy')
             pysvn.Client.return_value.info2.assert_called_once_with(
-                    'dummy',
-                    revision=pysvn.Revision.return_value,
-                    recurse=False)
+                'dummy',
+                revision=pysvn.Revision.return_value,
+                recurse=False)
 
     @onlyif(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
     def test_tarball(self):
         tmpdir = tg.config['scm.repos.tarball.root']
-        assert_equal(self.repo.tarball_path, os.path.join(tmpdir, 'svn/t/te/test/testsvn'))
-        assert_equal(self.repo.tarball_url('1'), 'file:///svn/t/te/test/testsvn/test-src-1.zip')
+        assert_equal(self.repo.tarball_path,
+                     os.path.join(tmpdir, 'svn/t/te/test/testsvn'))
+        assert_equal(self.repo.tarball_url('1'),
+                     'file:///svn/t/te/test/testsvn/test-src-1.zip')
         self.repo.tarball('1')
-        assert os.path.isfile(os.path.join(tmpdir, "svn/t/te/test/testsvn/test-src-1.zip"))
-        tarball_zip = ZipFile(os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-1.zip'), 'r')
-        assert_equal(tarball_zip.namelist(), ['test-src-1/', 'test-src-1/README'])
-        shutil.rmtree(self.repo.tarball_path.encode('utf-8'), ignore_errors=True)
+        assert os.path.isfile(
+            os.path.join(tmpdir, "svn/t/te/test/testsvn/test-src-1.zip"))
+        tarball_zip = ZipFile(
+            os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-1.zip'), 'r')
+        assert_equal(tarball_zip.namelist(),
+                     ['test-src-1/', 'test-src-1/README'])
+        shutil.rmtree(self.repo.tarball_path.encode('utf-8'),
+                      ignore_errors=True)
 
     @onlyif(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
     def test_tarball_aware_of_tags(self):
@@ -434,7 +450,8 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
                               'test-svn-tags-19-tags-tag-1.0/README'])
         h.set_context('test', 'svn-tags', neighborhood='Projects')
         tmpdir = tg.config['scm.repos.tarball.root']
-        tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
+        tarball_path = os.path.join(
+            tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
         fn = tarball_path + 'test-svn-tags-19-tags-tag-1.0.zip'
         self.svn_tags.tarball(rev, '/tags/tag-1.0/')
         assert os.path.isfile(fn), fn
@@ -469,7 +486,8 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
                                  'test-svn-tags-19-branches-aaa/README'])
         h.set_context('test', 'svn-tags', neighborhood='Projects')
         tmpdir = tg.config['scm.repos.tarball.root']
-        tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
+        tarball_path = os.path.join(
+            tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
         fn = tarball_path + 'test-svn-tags-19-branches-aaa.zip'
         self.svn_tags.tarball(rev, '/branches/aaa/')
         assert os.path.isfile(fn), fn
@@ -505,7 +523,8 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
                                 'test-svn-tags-19-trunk/README'])
         h.set_context('test', 'svn-tags', neighborhood='Projects')
         tmpdir = tg.config['scm.repos.tarball.root']
-        tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
+        tarball_path = os.path.join(
+            tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
         fn = tarball_path + 'test-svn-tags-19-trunk.zip'
         self.svn_tags.tarball(rev, '/trunk/')
         assert os.path.isfile(fn), fn
@@ -532,7 +551,8 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
         assert os.path.isfile(fn), fn
         snapshot = ZipFile(fn, 'r')
         assert_equal(snapshot.namelist(), ['test-src-1/', 'test-src-1/README'])
-        shutil.rmtree(os.path.join(tmpdir, 'svn/t/te/test/testsvn/'), ignore_errors=True)
+        shutil.rmtree(os.path.join(tmpdir, 'svn/t/te/test/testsvn/'),
+                      ignore_errors=True)
         shutil.rmtree(tarball_path, ignore_errors=True)
 
     def test_is_empty(self):
@@ -541,15 +561,16 @@ class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
             repo2 = SM.Repository(
                 name='test',
                 fs_path=d.path,
-                url_path = '/test/',
-                tool = 'svn',
-                status = 'creating')
+                url_path='/test/',
+                tool='svn',
+                status='creating')
             repo2.init()
             assert repo2.is_empty()
             repo2.refresh()
             ThreadLocalORMSession.flush_all()
             assert repo2.is_empty()
 
+
 class TestSVNRev(unittest.TestCase):
 
     def setUp(self):
@@ -565,9 +586,9 @@ class TestSVNRev(unittest.TestCase):
         self.repo = SM.Repository(
             name='testsvn',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.repo.refresh()
         self.rev = self.repo.commit(1)
         ThreadLocalORMSession.flush_all()
@@ -584,9 +605,9 @@ class TestSVNRev(unittest.TestCase):
 
     def test_diff(self):
         diffs = (self.rev.diffs.added
-                 +self.rev.diffs.removed
-                 +self.rev.diffs.changed
-                 +self.rev.diffs.copied)
+                 + self.rev.diffs.removed
+                 + self.rev.diffs.changed
+                 + self.rev.diffs.copied)
         for d in diffs:
             print d
 
@@ -605,7 +626,8 @@ class TestSVNRev(unittest.TestCase):
         assert_equal(commits, [4, 2])
         commits = list(self.repo.log(3, 'a/b/c/', id_only=True))
         assert_equal(commits, [2])
-        assert_equal(list(self.repo.log(self.repo.head, 'does/not/exist', id_only=True)), [])
+        assert_equal(
+            list(self.repo.log(self.repo.head, 'does/not/exist', id_only=True)), [])
 
     def test_notification_email(self):
         setup_global_objects()
@@ -615,9 +637,9 @@ class TestSVNRev(unittest.TestCase):
         self.repo = SM.Repository(
             name='testsvn',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.repo.refresh()
         ThreadLocalORMSession.flush_all()
         send_notifications(self.repo, [self.repo.rev_to_commit_id(1)])
@@ -629,36 +651,37 @@ class TestSVNRev(unittest.TestCase):
 
 
 class _Test(unittest.TestCase):
-    idgen = ( 'obj_%d' % i for i in count())
+    idgen = ('obj_%d' % i for i in count())
 
     def _make_tree(self, object_id, **kwargs):
         t, isnew = M.repo.Tree.upsert(object_id)
         repo = getattr(self, 'repo', None)
         t.repo = repo
-        for k,v in kwargs.iteritems():
+        for k, v in kwargs.iteritems():
             if isinstance(v, basestring):
                 obj = M.repo.Blob(
                     t, k, self.idgen.next())
                 t.blob_ids.append(Object(
-                        name=k, id=obj._id))
+                    name=k, id=obj._id))
             else:
                 obj = self._make_tree(self.idgen.next(), **v)
                 t.tree_ids.append(Object(
-                        name=k, id=obj._id))
+                    name=k, id=obj._id))
         session(t).flush()
         return t
 
     def _make_commit(self, object_id, **tree_parts):
         ci, isnew = M.repo.Commit.upsert(object_id)
         if isnew:
-            ci.committed.email=c.user.email_addresses[0]
-            ci.authored.email=c.user.email_addresses[0]
+            ci.committed.email = c.user.email_addresses[0]
+            ci.authored.email = c.user.email_addresses[0]
             dt = datetime.utcnow()
             # BSON datetime resolution is to 1 millisecond, not 1 microsecond
             # like Python. Round this now so it'll match the value that's
             # pulled from MongoDB in the tests.
-            ci.authored.date = dt.replace(microsecond=dt.microsecond/1000 * 1000)
-            ci.message='summary\n\nddescription'
+            ci.authored.date = dt.replace(
+                microsecond=dt.microsecond / 1000 * 1000)
+            ci.message = 'summary\n\nddescription'
             ci.set_context(self.repo)
             ci.tree_id = 't_' + object_id
             ci.tree = self._make_tree(ci.tree_id, **tree_parts)
@@ -677,7 +700,9 @@ class _Test(unittest.TestCase):
         ThreadLocalORMSession.close_all()
         self.prefix = tg.config.get('scm.repos.root', '/')
 
+
 class _TestWithRepo(_Test):
+
     def setUp(self):
         super(_TestWithRepo, self).setUp()
         h.set_context('test', neighborhood='Projects')
@@ -690,43 +715,47 @@ class _TestWithRepo(_Test):
             lambda *a, **kw: M.RepositoryImplementation.url_for_commit(
                 self.repo._impl, *a, **kw))
         self.repo._impl._repo = self.repo
-        self.repo._impl.all_commit_ids = lambda *a,**kw: []
+        self.repo._impl.all_commit_ids = lambda *a, **kw: []
         self.repo._impl.commit().symbolic_ids = None
         ThreadLocalORMSession.flush_all()
         # ThreadLocalORMSession.close_all()
 
+
 class _TestWithRepoAndCommit(_TestWithRepo):
+
     def setUp(self):
         super(_TestWithRepoAndCommit, self).setUp()
         self.ci, isnew = self._make_commit('foo')
         ThreadLocalORMSession.flush_all()
         # ThreadLocalORMSession.close_all()
 
+
 class TestRepo(_TestWithRepo):
 
     def test_create(self):
         assert self.repo.fs_path == os.path.join(self.prefix, 'svn/p/test/')
         assert self.repo.url_path == '/p/test/'
-        assert self.repo.full_fs_path == os.path.join(self.prefix, 'svn/p/test/test1')
+        assert self.repo.full_fs_path == os.path.join(
+            self.prefix, 'svn/p/test/test1')
 
     def test_passthrough(self):
         argless = ['init']
         for fn in argless:
             getattr(self.repo, fn)()
             getattr(self.repo._impl, fn).assert_called_with()
-        unary = [ 'commit', 'open_blob' ]
+        unary = ['commit', 'open_blob']
         for fn in unary:
             getattr(self.repo, fn)('foo')
             getattr(self.repo._impl, fn).assert_called_with('foo')
 
     def test_shorthand_for_commit(self):
         self.assertEqual(
-            self.repo.shorthand_for_commit('a'*40),
+            self.repo.shorthand_for_commit('a' * 40),
             '[aaaaaa]')
 
     def test_url_for_commit(self):
         self.assertEqual(
-            self.repo.url_for_commit('a'*40),
+            self.repo.url_for_commit('a' * 40),
             '/p/test/test1/ci/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/')
 
     @mock.patch('allura.model.repository.g.post_event')
@@ -764,11 +793,13 @@ class TestRepo(_TestWithRepo):
         session(M.MergeRequest).clear()
         assert self.repo.merge_requests_by_statuses('open').count() == 1
         assert self.repo.merge_requests_by_statuses('closed').count() == 1
-        assert self.repo.merge_requests_by_statuses('open', 'closed').count() == 2
+        assert self.repo.merge_requests_by_statuses(
+            'open', 'closed').count() == 2
 
     def test_guess_type(self):
         assert self.repo.guess_type('foo.txt') == ('text/plain', None)
-        assert self.repo.guess_type('foo.gbaer') == ('application/octet-stream', None)
+        assert self.repo.guess_type('foo.gbaer') == (
+            'application/octet-stream', None)
         assert self.repo.guess_type('foo.html') == ('text/html', None)
         assert self.repo.guess_type('.gitignore') == ('text/plain', None)
 
@@ -781,15 +812,19 @@ class TestRepo(_TestWithRepo):
         ci.committed.email = committer_email
         ci.author_url = '/u/test-committer/'
         self.repo._impl.commit = mock.Mock(return_value=ci)
-        self.repo._impl.new_commits = mock.Mock(return_value=['foo%d' % i for i in range(100) ])
-        self.repo._impl.all_commit_ids = mock.Mock(return_value=['foo%d' % i for i in range(100) ])
-        self.repo.symbolics_for_commit = mock.Mock(return_value=[['master', 'branch'], []])
+        self.repo._impl.new_commits = mock.Mock(
+            return_value=['foo%d' % i for i in range(100)])
+        self.repo._impl.all_commit_ids = mock.Mock(
+            return_value=['foo%d' % i for i in range(100)])
+        self.repo.symbolics_for_commit = mock.Mock(
+            return_value=[['master', 'branch'], []])
+
         def refresh_commit_info(oid, seen, lazy=False):
             M.repo.CommitDoc(dict(
-                    authored=dict(
-                        name=committer_name,
-                        email=committer_email),
-                    _id=oid)).m.insert()
+                authored=dict(
+                    name=committer_name,
+                    email=committer_email),
+                _id=oid)).m.insert()
         self.repo._impl.refresh_commit_info = refresh_commit_info
         _id = lambda oid: getattr(oid, '_id', str(oid))
         self.repo.shorthand_for_commit = lambda oid: '[' + _id(oid) + ']'
@@ -809,9 +844,11 @@ class TestRepo(_TestWithRepo):
     def test_refresh_private(self):
         ci = mock.Mock()
         self.repo._impl.commit = mock.Mock(return_value=ci)
-        self.repo._impl.new_commits = mock.Mock(return_value=['foo%d' % i for i in range(100) ])
+        self.repo._impl.new_commits = mock.Mock(
+            return_value=['foo%d' % i for i in range(100)])
 
-        # make unreadable by *anonymous, so additional notification logic executes
+        # make unreadable by *anonymous, so additional notification logic
+        # executes
         self.repo.acl = []
         c.project.acl = []
 
@@ -822,7 +859,7 @@ class TestRepo(_TestWithRepo):
         old_app_instance = M.Project.app_instance
         try:
             M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
-                    config=ming.base.Object(_id=None)))
+                config=ming.base.Object(_id=None)))
             with self.repo.push_upstream_context():
                 assert c.project.shortname == 'test'
         finally:
@@ -833,11 +870,12 @@ class TestRepo(_TestWithRepo):
         old_app_instance = M.Project.app_instance
         try:
             M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
-                    config=ming.base.Object(_id=None)))
+                config=ming.base.Object(_id=None)))
             self.repo.pending_upstream_merges()
         finally:
             M.Project.app_instance = old_app_instance
 
+
 class TestMergeRequest(_TestWithRepoAndCommit):
 
     def setUp(self):
@@ -846,8 +884,8 @@ class TestMergeRequest(_TestWithRepoAndCommit):
         h.set_context('test', 'test2', neighborhood='Projects')
         self.repo2 = M.Repository(name='test2', tool='svn')
         self.repo2._impl = mock.Mock(spec=M.RepositoryImplementation())
-        self.repo2._impl.log = lambda *a,**kw:(['foo'], [])
-        self.repo2._impl.all_commit_ids = lambda *a,**kw: []
+        self.repo2._impl.log = lambda *a, **kw: (['foo'], [])
+        self.repo2._impl.all_commit_ids = lambda *a, **kw: []
         self.repo2._impl._repo = self.repo2
         self.repo2.init_as_clone('/p/test/', 'test1', '/p/test/test1/')
         ThreadLocalORMSession.flush_all()
@@ -868,19 +906,24 @@ class TestMergeRequest(_TestWithRepoAndCommit):
         assert_equal(mr.creator_name,  u.get_pref('display_name'))
         assert_equal(mr.creator_url,  u.url())
         assert_equal(mr.downstream_url,  '/p/test/test2/')
-        assert_equal(mr.downstream_repo_url,  'http://svn.localhost/p/test/test2/')
+        assert_equal(mr.downstream_repo_url,
+                     'http://svn.localhost/p/test/test2/')
         with mock.patch('forgesvn.model.svn.SVNLibWrapper') as _svn,\
-             mock.patch('forgesvn.model.svn.SVNImplementation._map_log') as _map_log:
+                mock.patch('forgesvn.model.svn.SVNImplementation._map_log') as _map_log:
             mr.app.repo._impl.head = 1
             _svn().log.return_value = [mock.Mock(revision=mock.Mock(number=2))]
             _map_log.return_value = 'bar'
             assert_equal(mr.commits,  ['bar'])
-            # can't do assert_called_once_with because pysvn.Revision doesn't compare nicely
+            # can't do assert_called_once_with because pysvn.Revision doesn't
+            # compare nicely
             assert_equal(_svn().log.call_count, 1)
-            assert_equal(_svn().log.call_args[0], ('file:///tmp/svn/p/test/test2',))
+            assert_equal(_svn().log.call_args[0],
+                         ('file:///tmp/svn/p/test/test2',))
             assert_equal(_svn().log.call_args[1]['revision_start'].number, 2)
             assert_equal(_svn().log.call_args[1]['limit'], 25)
-            _map_log.assert_called_once_with(_svn().log.return_value[0], 'file:///tmp/svn/p/test/test2', None)
+            _map_log.assert_called_once_with(
+                _svn().log.return_value[0], 'file:///tmp/svn/p/test/test2', None)
+
 
 class TestRepoObject(_TestWithRepoAndCommit):
 
@@ -891,7 +934,8 @@ class TestRepoObject(_TestWithRepoAndCommit):
         assert isnew0 and not isnew1
 
     def test_artifact_methods(self):
-        assert self.ci.index_id() == 'allura/model/repo/Commit#foo', self.ci.index_id()
+        assert self.ci.index_id(
+        ) == 'allura/model/repo/Commit#foo', self.ci.index_id()
         assert self.ci.primary() is self.ci, self.ci.primary()
 
 
@@ -946,17 +990,18 @@ class TestCommit(_TestWithRepo):
         M.repo_refresh.refresh_commit_trees(self.ci, {})
         M.repo_refresh.compute_diffs(self.repo._id, {}, self.ci)
         # self.ci.compute_diffs()
-        assert_equal(self.ci.diffs.added, [ 'a', 'a/a', 'a/a/a', 'a/a/b', 'a/b' ])
+        assert_equal(self.ci.diffs.added,
+                     ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
         assert (self.ci.diffs.copied
                 == self.ci.diffs.changed
                 == self.ci.diffs.removed
                 == [])
         ci, isnew = self._make_commit('bar')
-        ci.parent_ids = [ 'foo' ]
+        ci.parent_ids = ['foo']
         self._make_log(ci)
         M.repo_refresh.refresh_commit_trees(ci, {})
         M.repo_refresh.compute_diffs(self.repo._id, {}, ci)
-        assert_equal(ci.diffs.removed, [ 'a', 'a/a', 'a/a/a', 'a/a/b', 'a/b' ])
+        assert_equal(ci.diffs.removed, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
         assert (ci.diffs.copied
                 == ci.diffs.changed
                 == ci.diffs.added
@@ -968,12 +1013,12 @@ class TestCommit(_TestWithRepo):
                     a='',
                     b='',),
                 b=''))
-        ci.parent_ids = [ 'foo' ]
+        ci.parent_ids = ['foo']
         self._make_log(ci)
         M.repo_refresh.refresh_commit_trees(ci, {})
         M.repo_refresh.compute_diffs(self.repo._id, {}, ci)
-        assert_equal(ci.diffs.added, [ 'b', 'b/a', 'b/a/a', 'b/a/b', 'b/b' ])
-        assert_equal(ci.diffs.removed, [ 'a', 'a/a', 'a/a/a', 'a/a/b', 'a/b' ])
+        assert_equal(ci.diffs.added, ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'])
+        assert_equal(ci.diffs.removed, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
         assert (ci.diffs.copied
                 == ci.diffs.changed
                 == [])
@@ -986,7 +1031,8 @@ class TestCommit(_TestWithRepo):
                 u'/b/a/b': u'Luke Skywalker',
                 u'/b/b': u'Death Star will destroy you',
                 u'/b/c': u'Luke Skywalker',  # moved from /b/a/b
-                u'/b/a/z': u'Death Star will destroy you\nALL',  # moved from /b/b and modified
+                # moved from /b/b and modified
+                u'/b/a/z': u'Death Star will destroy you\nALL',
             }
             from cStringIO import StringIO
             return StringIO(blobs.get(blob.path(), ''))
@@ -995,7 +1041,8 @@ class TestCommit(_TestWithRepo):
         self.repo._impl.commit = mock.Mock(return_value=self.ci)
         M.repo_refresh.refresh_commit_trees(self.ci, {})
         M.repo_refresh.compute_diffs(self.repo._id, {}, self.ci)
-        assert_equal(self.ci.diffs.added, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
+        assert_equal(self.ci.diffs.added,
+                     ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
         assert (self.ci.diffs.copied
                 == self.ci.diffs.changed
                 == self.ci.diffs.removed
@@ -1061,9 +1108,9 @@ class TestRename(unittest.TestCase):
         self.repo = SM.Repository(
             name='testsvn-rename',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'svn',
-            status = 'creating')
+            url_path='/test/',
+            tool='svn',
+            status='creating')
         self.repo.refresh()
         self.rev = self.repo.commit('HEAD')
         ThreadLocalORMSession.flush_all()
@@ -1079,7 +1126,8 @@ class TestRename(unittest.TestCase):
         )
 
     def test_check_changed_path(self):
-        changed_path = {'copyfrom_path':'/test/path', 'path':'/test/path2'}
-        result = self.repo._impl._check_changed_path(changed_path, '/test/path2/file.txt')
-        assert_equal({'path': '/test/path2/file.txt', 'copyfrom_path': '/test/path/file.txt'}, result)
-
+        changed_path = {'copyfrom_path': '/test/path', 'path': '/test/path2'}
+        result = self.repo._impl._check_changed_path(
+            changed_path, '/test/path2/file.txt')
+        assert_equal({'path': '/test/path2/file.txt',
+                     'copyfrom_path': '/test/path/file.txt'}, result)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/model/test_svnimplementation.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/model/test_svnimplementation.py b/ForgeSVN/forgesvn/tests/model/test_svnimplementation.py
index fbd92b8..b9c75d3 100644
--- a/ForgeSVN/forgesvn/tests/model/test_svnimplementation.py
+++ b/ForgeSVN/forgesvn/tests/model/test_svnimplementation.py
@@ -40,7 +40,7 @@ class TestSVNImplementation(object):
     @patch('allura.model.repo.Tree.upsert')
     @patch('allura.model.repo.Tree.query.get')
     def _test_compute_tree_new(self, path, tree_get, tree_upsert, treesdoc_partial, lcd_partial):
-        repo = Mock(fs_path=g.tmpdir+'/')
+        repo = Mock(fs_path=g.tmpdir + '/')
         repo.name = 'code'
         impl = SVNImplementation(repo)
         impl._svn.info2 = Mock()
@@ -52,11 +52,11 @@ class TestSVNImplementation(object):
 
         tree_id = impl.compute_tree_new(commit, path)
 
-        assert_equal(impl._svn.info2.call_args[0][0], 'file://'+g.tmpdir+'/code/trunk/foo')
+        assert_equal(impl._svn.info2.call_args[0]
+                     [0], 'file://' + g.tmpdir + '/code/trunk/foo')
         treesdoc_partial.assert_called()
         lcd_partial.assert_called()
 
-
     def test_last_commit_ids(self):
         self._test_last_commit_ids('/trunk/foo/')
         self._test_last_commit_ids('/trunk/foo')
@@ -64,7 +64,7 @@ class TestSVNImplementation(object):
         self._test_last_commit_ids('trunk/foo')
 
     def _test_last_commit_ids(self, path):
-        repo = Mock(fs_path=g.tmpdir+'/')
+        repo = Mock(fs_path=g.tmpdir + '/')
         repo.name = 'code'
         repo._id = '5057636b9c1040636b81e4b1'
         impl = SVNImplementation(repo)
@@ -76,11 +76,12 @@ class TestSVNImplementation(object):
         entries = impl.last_commit_ids(commit, [path])
 
         assert_equal(entries, {path.strip('/'): '5057636b9c1040636b81e4b1:1'})
-        assert_equal(impl._svn.info2.call_args[0][0], 'file://'+g.tmpdir+'/code/trunk')
+        assert_equal(impl._svn.info2.call_args[0]
+                     [0], 'file://' + g.tmpdir + '/code/trunk')
 
     @patch('forgesvn.model.svn.svn_path_exists')
     def test__path_to_root(self, path_exists):
-        repo = Mock(fs_path=g.tmpdir+'/')
+        repo = Mock(fs_path=g.tmpdir + '/')
         repo.name = 'code'
         repo._id = '5057636b9c1040636b81e4b1'
         impl = SVNImplementation(repo)
@@ -91,15 +92,20 @@ class TestSVNImplementation(object):
         assert_equal(impl._path_to_root('/some/path/'), '')
         assert_equal(impl._path_to_root('some/path'), '')
         # tags
-        assert_equal(impl._path_to_root('/some/path/tags/1.0/some/dir'), 'some/path/tags/1.0')
-        assert_equal(impl._path_to_root('/some/path/tags/1.0/'), 'some/path/tags/1.0')
+        assert_equal(impl._path_to_root('/some/path/tags/1.0/some/dir'),
+                     'some/path/tags/1.0')
+        assert_equal(impl._path_to_root('/some/path/tags/1.0/'),
+                     'some/path/tags/1.0')
         assert_equal(impl._path_to_root('/some/path/tags/'), '')
         # branches
-        assert_equal(impl._path_to_root('/some/path/branches/b1/dir'), 'some/path/branches/b1')
-        assert_equal(impl._path_to_root('/some/path/branches/b1/'), 'some/path/branches/b1')
+        assert_equal(impl._path_to_root('/some/path/branches/b1/dir'),
+                     'some/path/branches/b1')
+        assert_equal(impl._path_to_root('/some/path/branches/b1/'),
+                     'some/path/branches/b1')
         assert_equal(impl._path_to_root('/some/path/branches/'), '')
         # trunk
-        assert_equal(impl._path_to_root('/some/path/trunk/some/dir/'), 'some/path/trunk')
+        assert_equal(impl._path_to_root('/some/path/trunk/some/dir/'),
+                     'some/path/trunk')
         assert_equal(impl._path_to_root('/some/path/trunk'), 'some/path/trunk')
         # with fallback to trunk
         path_exists.return_value = True

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/test_svn_app.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/test_svn_app.py b/ForgeSVN/forgesvn/tests/test_svn_app.py
index a5ffed7..7fd8545 100644
--- a/ForgeSVN/forgesvn/tests/test_svn_app.py
+++ b/ForgeSVN/forgesvn/tests/test_svn_app.py
@@ -25,6 +25,7 @@ from alluratest.controller import setup_basic_test, setup_global_objects
 from allura.lib import helpers as h
 from forgesvn.tests import with_svn
 
+
 class TestSVNApp(unittest.TestCase):
 
     def setUp(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/forgesvn/tests/test_tasks.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/forgesvn/tests/test_tasks.py b/ForgeSVN/forgesvn/tests/test_tasks.py
index bd2f723..fb48d56 100644
--- a/ForgeSVN/forgesvn/tests/test_tasks.py
+++ b/ForgeSVN/forgesvn/tests/test_tasks.py
@@ -35,6 +35,7 @@ from allura.tasks import repo_tasks
 
 from forgesvn.tests import with_svn
 
+
 class TestRepoTasks(unittest.TestCase):
 
     def setUp(self):
@@ -76,5 +77,6 @@ class TestRepoTasks(unittest.TestCase):
     def test_uninstall(self):
         with mock.patch.object(shutil, 'rmtree') as f:
             repo_tasks.uninstall()
-            f.assert_called_with(os.path.join(tg.config['scm.repos.root'], 'svn/p/test/src'),
-                                 ignore_errors=True)
+            f.assert_called_with(
+                os.path.join(tg.config['scm.repos.root'], 'svn/p/test/src'),
+                ignore_errors=True)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeSVN/setup.py
----------------------------------------------------------------------
diff --git a/ForgeSVN/setup.py b/ForgeSVN/setup.py
index d3b1154..a14fdd3 100644
--- a/ForgeSVN/setup.py
+++ b/ForgeSVN/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgesvn.version import __version__
 
@@ -34,7 +35,8 @@ setup(name='ForgeSVN',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeShortUrl/forgeshorturl/main.py
----------------------------------------------------------------------
diff --git a/ForgeShortUrl/forgeshorturl/main.py b/ForgeShortUrl/forgeshorturl/main.py
index 691d25d..72c7b1e 100644
--- a/ForgeShortUrl/forgeshorturl/main.py
+++ b/ForgeShortUrl/forgeshorturl/main.py
@@ -134,6 +134,7 @@ class ForgeShortUrlApp(Application):
 
 
 class RootController(BaseController):
+
     def __init__(self):
         c.short_url_lightbox = W.short_url_lightbox
 
@@ -203,11 +204,11 @@ class ShortURLAdminController(DefaultAdminController):
         validators.NotEmpty(),
         validators.Regex(
             r'^[-_a-zA-Z0-9]+$',
-            messages={'invalid': 'must include only letters, numbers, dashes and underscores.'}
+            messages={'invalid':
+                      'must include only letters, numbers, dashes and underscores.'}
         )
     )
 
-
     def __init__(self, app):
         self.app = app
 
@@ -253,7 +254,7 @@ class ShortURLAdminController(DefaultAdminController):
                     redirect(request.referer)
                 else:
                     msg = ('update short url %s from %s to %s'
-                            % (short_url, shorturl.full_url, full_url))
+                           % (short_url, shorturl.full_url, full_url))
                     flash("Short url updated")
 
             else:
@@ -273,5 +274,5 @@ class ShortURLAdminController(DefaultAdminController):
             M.AuditLog.log(msg)
             redirect(request.referer)
         return dict(
-                app=self.app,
-                url_len=len(ShortUrl.build_short_url(c.app, short_name='')))
+            app=self.app,
+            url_len=len(ShortUrl.build_short_url(c.app, short_name='')))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeShortUrl/forgeshorturl/model/shorturl.py
----------------------------------------------------------------------
diff --git a/ForgeShortUrl/forgeshorturl/model/shorturl.py b/ForgeShortUrl/forgeshorturl/model/shorturl.py
index 41617bd..b42d30d 100644
--- a/ForgeShortUrl/forgeshorturl/model/shorturl.py
+++ b/ForgeShortUrl/forgeshorturl/model/shorturl.py
@@ -54,7 +54,7 @@ class ShortUrl(M.Artifact):
         except pymongo.errors.DuplicateKeyError:
             session(u).expunge(u)
             u = cls.query.get(short_name=shortname,
-                    app_config_id=c.app.config._id)
+                              app_config_id=c.app.config._id)
         return u
 
     def index(self):
@@ -74,11 +74,11 @@ class ShortUrl(M.Artifact):
     @classmethod
     def build_short_url(cls, app, short_name):
         return config['short_url.url_pattern'].format(
-                base_url=config['base_url'],
-                nbhd=app.project.neighborhood.url_prefix.strip('/'),
-                project=app.project.shortname,
-                mount_point=app.config.options.mount_point,
-                short_name=short_name)
+            base_url=config['base_url'],
+            nbhd=app.project.neighborhood.url_prefix.strip('/'),
+            project=app.project.shortname,
+            mount_point=app.config.options.mount_point,
+            short_name=short_name)
 
     def short_url(self):
         return self.build_short_url(self.app, self.short_name)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeShortUrl/forgeshorturl/tests/functional/test.py
----------------------------------------------------------------------
diff --git a/ForgeShortUrl/forgeshorturl/tests/functional/test.py b/ForgeShortUrl/forgeshorturl/tests/functional/test.py
index 997050f..67931aa 100644
--- a/ForgeShortUrl/forgeshorturl/tests/functional/test.py
+++ b/ForgeShortUrl/forgeshorturl/tests/functional/test.py
@@ -28,6 +28,7 @@ from forgeshorturl.model import ShortUrl
 
 
 class TestRootController(TestController):
+
     def setUp(self):
         super(TestRootController, self).setUp()
         self.setup_with_tools()
@@ -109,32 +110,38 @@ class TestRootController(TestController):
     def test_shorturl_chars_restrictions(self):
         d = dict(short_url='', full_url='http://sf.net/')
         r = self.app.post('/admin/url/add', params=d)
-        assert ShortUrl.query.find(dict(app_config_id=c.app.config._id)).count() == 0
+        assert ShortUrl.query.find(
+            dict(app_config_id=c.app.config._id)).count() == 0
         assert 'Please enter a value' in self.webflash(r)
         d = dict(short_url='g*', full_url='http://sf.net/')
         r = self.app.post('/admin/url/add', params=d)
-        assert ShortUrl.query.find(dict(app_config_id=c.app.config._id)).count() == 0
-        assert 'Short url: must include only letters, numbers, dashes and underscores.' in self.webflash(r)
+        assert ShortUrl.query.find(
+            dict(app_config_id=c.app.config._id)).count() == 0
+        assert 'Short url: must include only letters, numbers, dashes and underscores.' in self.webflash(
+            r)
 
     def test_shorturl_remove(self):
         self.app.post('/admin/url/add',
-                params=dict(short_url='g', full_url='http://google.com/'))
-        assert ShortUrl.query.find(dict(app_config_id=c.app.config._id)).count() == 1
+                      params=dict(short_url='g', full_url='http://google.com/'))
+        assert ShortUrl.query.find(
+            dict(app_config_id=c.app.config._id)).count() == 1
         self.app.post('/admin/url/remove', params=dict(shorturl='g'))
-        assert ShortUrl.query.find(dict(app_config_id=c.app.config._id)).count() == 0
+        assert ShortUrl.query.find(
+            dict(app_config_id=c.app.config._id)).count() == 0
 
     def test_shorturl_permissions(self):
         self.app.post('/admin/url/add',
-                params=dict(short_url='g', full_url='http://google.com/'),
-                extra_environ=dict(username='test-user'), status=403)
+                      params=dict(short_url='g',
+                                  full_url='http://google.com/'),
+                      extra_environ=dict(username='test-user'), status=403)
         self.app.post('/admin/url/remove', params=dict(shorturl='g'),
-                extra_environ=dict(username='test-user'), status=403)
+                      extra_environ=dict(username='test-user'), status=403)
 
     def test_build_short_url(self):
         with h.push_config(config, **{
                 'short_url.url_pattern': '{base_url}:{nbhd}:{project}:{mount_point}:{short_name}',
                 'base_url': 'b',
-            }):
+        }):
             nbhd = mock.Mock(url_prefix='/n/')
             project = mock.Mock(shortname='p', neighborhood=nbhd)
             app = mock.Mock(project=project)
@@ -155,5 +162,5 @@ class TestRootController(TestController):
         with h.push_config(config, **{
                 'short_url.url_pattern': '{base_url}:{nbhd}:{project}:{mount_point}:{short_name}',
                 'base_url': 'b',
-            }):
+        }):
             assert_equal(surl.short_url(), 'b:p:test:url:test')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeShortUrl/forgeshorturl/widgets/short_url.py
----------------------------------------------------------------------
diff --git a/ForgeShortUrl/forgeshorturl/widgets/short_url.py b/ForgeShortUrl/forgeshorturl/widgets/short_url.py
index 51f14f4..0094402 100644
--- a/ForgeShortUrl/forgeshorturl/widgets/short_url.py
+++ b/ForgeShortUrl/forgeshorturl/widgets/short_url.py
@@ -20,7 +20,7 @@ from allura.lib.widgets import form_fields as ffw
 
 class ShortUrlFormWidget(ffw.Lightbox):
     defaults = dict(
-            ffw.Lightbox.defaults,
-            name='short-url-modal',
-            trigger='a.add-short-url, a.update-short-url',
-            content_template='forgeshorturl:templates/form.html')
+        ffw.Lightbox.defaults,
+        name='short-url-modal',
+        trigger='a.add-short-url, a.update-short-url',
+        content_template='forgeshorturl:templates/form.html')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/command/fix_discussion.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/command/fix_discussion.py b/ForgeTracker/forgetracker/command/fix_discussion.py
index c1f0e7d..95bddb4 100644
--- a/ForgeTracker/forgetracker/command/fix_discussion.py
+++ b/ForgeTracker/forgetracker/command/fix_discussion.py
@@ -27,6 +27,7 @@ from forgetracker.model import Ticket
 
 
 class FixDiscussion(base.Command):
+
     """Fixes trackers that had used buggy 'ticket move' feature before it was fixed.
 
     See [#5727] for details.
@@ -59,41 +60,44 @@ class FixDiscussion(base.Command):
                 ]})
                 if projects.count() > 1:
                     raise exc.ForgeError('Multiple projects has a shortname %s. '
-                            'Use project _id instead.' % p_name_or_id)
+                                         'Use project _id instead.' % p_name_or_id)
                 project = projects.first()
             if not project:
                 raise exc.NoSuchProjectError('The project %s '
-                        'could not be found' % p_name_or_id)
+                                             'could not be found' % p_name_or_id)
 
             self.fix_for_project(project)
         else:
-            base.log.info('Checking discussion instances for each tracker in all projects')
+            base.log.info(
+                'Checking discussion instances for each tracker in all projects')
             for project in M.Project.query.find():
                 self.fix_for_project(project)
 
     def fix_for_project(self, project):
         c.project = project
-        base.log.info('Checking discussion instances for each tracker in project %s' % project.shortname)
+        base.log.info(
+            'Checking discussion instances for each tracker in project %s' %
+            project.shortname)
         trackers = [ac for ac in project.app_configs
-                       if ac.tool_name.lower() == 'tickets']
+                    if ac.tool_name.lower() == 'tickets']
         for tracker in trackers:
             base.log.info('Found tracker %s' % tracker)
             for ticket in Ticket.query.find({'app_config_id': tracker._id}):
                 base.log.info('Processing ticket %s [#%s] %s'
-                        % (ticket._id, ticket.ticket_num, ticket.summary))
+                              % (ticket._id, ticket.ticket_num, ticket.summary))
                 if ticket.discussion_thread.discussion.app_config_id != tracker._id:
                     # Some tickets were moved from this tracker,
                     # and Discussion instance for entire tracker was moved too.
                     # Should move it back.
                     base.log.info("Some tickets were moved from this tracker. "
-                            "Moving tracker's discussion instance back.")
+                                  "Moving tracker's discussion instance back.")
                     ticket.discussion_thread.discussion.app_config_id = tracker._id
 
                 if ticket.discussion_thread.discussion_id != tracker.discussion_id:
                     # Ticket was moved from another tracker.
                     # Should bind his comment thread to tracker's Discussion
                     base.log.info("Ticket was moved from another tracker. "
-                            "Bind ticket's comment thread to tracker's Discussion instance.")
+                                  "Bind ticket's comment thread to tracker's Discussion instance.")
                     ticket.discussion_thread.discussion_id = tracker.discussion_id
                     for post in ticket.discussion_thread.posts:
                         post.discussion_id = tracker.discussion_id

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/config/resources.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/config/resources.py b/ForgeTracker/forgetracker/config/resources.py
index efcc766..c62d4c4 100644
--- a/ForgeTracker/forgetracker/config/resources.py
+++ b/ForgeTracker/forgetracker/config/resources.py
@@ -17,8 +17,9 @@
 
 import pkg_resources
 
+
 def register_ew_resources(manager):
     manager.register_directory(
         'tracker_js', pkg_resources.resource_filename('forgetracker', 'widgets/resources/js'))
     manager.register_directory(
-        'tracker_css', pkg_resources.resource_filename('forgetracker', 'widgets/resources/css'))
\ No newline at end of file
+        'tracker_css', pkg_resources.resource_filename('forgetracker', 'widgets/resources/css'))


[09/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/google/test_extractor.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/google/test_extractor.py b/ForgeImporters/forgeimporters/tests/google/test_extractor.py
index 0752755..3635ac5 100644
--- a/ForgeImporters/forgeimporters/tests/google/test_extractor.py
+++ b/ForgeImporters/forgeimporters/tests/google/test_extractor.py
@@ -30,6 +30,7 @@ from forgeimporters import base
 
 
 class TestGoogleCodeProjectExtractor(TestCase):
+
     def setUp(self):
         self._p_urlopen = mock.patch.object(base.ProjectExtractor, 'urlopen')
         # self._p_soup = mock.patch.object(google, 'BeautifulSoup')
@@ -50,24 +51,30 @@ class TestGoogleCodeProjectExtractor(TestCase):
                     raise
 
     def test_init(self):
-        extractor = google.GoogleCodeProjectExtractor('my-project', 'project_info')
+        extractor = google.GoogleCodeProjectExtractor(
+            'my-project', 'project_info')
 
-        self.urlopen.assert_called_once_with('http://code.google.com/p/my-project/')
+        self.urlopen.assert_called_once_with(
+            'http://code.google.com/p/my-project/')
         self.soup.assert_called_once_with(self.urlopen.return_value)
         self.assertEqual(extractor.page, self.soup.return_value)
 
     def test_get_page(self):
-        extractor = google.GoogleCodeProjectExtractor('my-project', 'project_info')
+        extractor = google.GoogleCodeProjectExtractor(
+            'my-project', 'project_info')
         self.assertEqual(1, self.urlopen.call_count)
         page = extractor.get_page('project_info')
         self.assertEqual(1, self.urlopen.call_count)
-        self.assertEqual(page, extractor._page_cache['http://code.google.com/p/my-project/'])
+        self.assertEqual(
+            page, extractor._page_cache['http://code.google.com/p/my-project/'])
         page = extractor.get_page('project_info')
         self.assertEqual(1, self.urlopen.call_count)
-        self.assertEqual(page, extractor._page_cache['http://code.google.com/p/my-project/'])
+        self.assertEqual(
+            page, extractor._page_cache['http://code.google.com/p/my-project/'])
         page = extractor.get_page('source_browse')
         self.assertEqual(2, self.urlopen.call_count)
-        self.assertEqual(page, extractor._page_cache['http://code.google.com/p/my-project/source/browse/'])
+        self.assertEqual(
+            page, extractor._page_cache['http://code.google.com/p/my-project/source/browse/'])
         parser = mock.Mock(return_value='parsed')
         page = extractor.get_page('url', parser=parser)
         self.assertEqual(page, 'parsed')
@@ -76,10 +83,11 @@ class TestGoogleCodeProjectExtractor(TestCase):
     def test_get_page_url(self):
         extractor = google.GoogleCodeProjectExtractor('my-project')
         self.assertEqual(extractor.get_page_url('project_info'),
-                'http://code.google.com/p/my-project/')
+                         'http://code.google.com/p/my-project/')
 
     def test_get_short_description(self):
-        extractor = google.GoogleCodeProjectExtractor('my-project', 'project_info')
+        extractor = google.GoogleCodeProjectExtractor(
+            'my-project', 'project_info')
         extractor.page.find.return_value.text = 'My Super Project'
 
         extractor.get_short_description(self.project)
@@ -92,22 +100,25 @@ class TestGoogleCodeProjectExtractor(TestCase):
     def test_get_icon(self, M, File):
         File.return_value.type = 'image/png'
         File.return_value.file = 'data'
-        extractor = google.GoogleCodeProjectExtractor('my-project', 'project_info')
+        extractor = google.GoogleCodeProjectExtractor(
+            'my-project', 'project_info')
         extractor.page.find.return_value.get.return_value = 'http://example.com/foo/bar/my-logo.png'
 
         extractor.get_icon(self.project)
 
         extractor.page.find.assert_called_once_with(itemprop='image')
-        File.assert_called_once_with('http://example.com/foo/bar/my-logo.png', 'my-logo.png')
+        File.assert_called_once_with(
+            'http://example.com/foo/bar/my-logo.png', 'my-logo.png')
         M.ProjectFile.save_image.assert_called_once_with(
             'my-logo.png', 'data', 'image/png', square=True,
-            thumbnail_size=(48,48), thumbnail_meta={
+            thumbnail_size=(48, 48), thumbnail_meta={
                 'project_id': self.project._id, 'category': 'icon'})
 
     @mock.patch.object(google, 'M')
     def test_get_license(self, M):
         self.project.trove_license = []
-        extractor = google.GoogleCodeProjectExtractor('my-project', 'project_info')
+        extractor = google.GoogleCodeProjectExtractor(
+            'my-project', 'project_info')
         extractor.page.find.return_value.findNext.return_value.find.return_value.text = '  New BSD License  '
         trove = M.TroveCategory.query.get.return_value
 
@@ -115,27 +126,31 @@ class TestGoogleCodeProjectExtractor(TestCase):
 
         extractor.page.find.assert_called_once_with(text='Code license')
         extractor.page.find.return_value.findNext.assert_called_once_with()
-        extractor.page.find.return_value.findNext.return_value.find.assert_called_once_with('a')
+        extractor.page.find.return_value.findNext.return_value.find.assert_called_once_with(
+            'a')
         self.assertEqual(self.project.trove_license, [trove._id])
-        M.TroveCategory.query.get.assert_called_once_with(fullname='BSD License')
+        M.TroveCategory.query.get.assert_called_once_with(
+            fullname='BSD License')
 
         M.TroveCategory.query.get.reset_mock()
         extractor.page.find.return_value.findNext.return_value.find.return_value.text = 'non-existant license'
         extractor.get_license(self.project)
-        M.TroveCategory.query.get.assert_called_once_with(fullname='Other/Proprietary License')
+        M.TroveCategory.query.get.assert_called_once_with(
+            fullname='Other/Proprietary License')
 
     def _make_extractor(self, html):
         from BeautifulSoup import BeautifulSoup
         with mock.patch.object(base.ProjectExtractor, 'urlopen'):
-            extractor = google.GoogleCodeProjectExtractor('allura-google-importer')
+            extractor = google.GoogleCodeProjectExtractor(
+                'allura-google-importer')
         extractor.page = BeautifulSoup(html)
         extractor.get_page = lambda pagename: extractor.page
-        extractor.url="http://test/source/browse"
+        extractor.url = "http://test/source/browse"
         return extractor
 
     def test_get_repo_type_happy_path(self):
         extractor = self._make_extractor(
-                '<span id="crumb_root">\nsvn/&nbsp;</span>')
+            '<span id="crumb_root">\nsvn/&nbsp;</span>')
         self.assertEqual('svn', extractor.get_repo_type())
 
     def test_get_repo_type_no_crumb_root(self):
@@ -143,18 +158,19 @@ class TestGoogleCodeProjectExtractor(TestCase):
         with self.assertRaises(Exception) as cm:
             extractor.get_repo_type()
         self.assertEqual(str(cm.exception),
-                "Couldn't detect repo type: no #crumb_root in "
-                "http://test/source/browse")
+                         "Couldn't detect repo type: no #crumb_root in "
+                         "http://test/source/browse")
 
     def test_get_repo_type_unknown_repo_type(self):
         extractor = self._make_extractor(
-                '<span id="crumb_root">cvs</span>')
+            '<span id="crumb_root">cvs</span>')
         with self.assertRaises(Exception) as cm:
             extractor.get_repo_type()
         self.assertEqual(str(cm.exception), "Unknown repo type: cvs")
 
     def test_empty_issue(self):
-        empty_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/empty-issue.html')).read()
+        empty_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/empty-issue.html')).read()
         gpe = self._make_extractor(empty_issue)
         self.assertIsNone(gpe.get_issue_owner())
         self.assertEqual(gpe.get_issue_status(), '')
@@ -164,74 +180,82 @@ class TestGoogleCodeProjectExtractor(TestCase):
 
     @without_module('html2text')
     def test_get_issue_basic_fields(self):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         self.assertEqual(gpe.get_issue_creator().name, 'john...@gmail.com')
-        self.assertEqual(gpe.get_issue_creator().url, 'http://code.google.com/u/101557263855536553789/')
+        self.assertEqual(gpe.get_issue_creator().url,
+                         'http://code.google.com/u/101557263855536553789/')
         self.assertEqual(gpe.get_issue_owner().name, 'john...@gmail.com')
-        self.assertEqual(gpe.get_issue_owner().url, 'http://code.google.com/u/101557263855536553789/')
+        self.assertEqual(gpe.get_issue_owner().url,
+                         'http://code.google.com/u/101557263855536553789/')
         self.assertEqual(gpe.get_issue_status(), 'Started')
         self._p_soup.stop()
         self.assertEqual(gpe.get_issue_summary(), 'Test "Issue"')
         assert_equal(gpe.get_issue_description(),
-                'Test \\*Issue\\* for testing\n'
-                '\n'
-                '&nbsp; 1\\. Test List\n'
-                '&nbsp; 2\\. Item\n'
-                '\n'
-                '\\*\\*Testing\\*\\*\n'
-                '\n'
-                ' \\* Test list 2\n'
-                ' \\* Item\n'
-                '\n'
-                '\\# Test Section\n'
-                '\n'
-                '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
-                '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
-                '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
-                '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
-                '\n'
-                'References: [issue 1](#1), [r2]\n'
-                '\n'
-                'That\'s all'
-            )
-        self.assertEqual(gpe.get_issue_created_date(), 'Thu Aug  8 15:33:52 2013')
+                     'Test \\*Issue\\* for testing\n'
+                     '\n'
+                     '&nbsp; 1\\. Test List\n'
+                     '&nbsp; 2\\. Item\n'
+                     '\n'
+                     '\\*\\*Testing\\*\\*\n'
+                     '\n'
+                     ' \\* Test list 2\n'
+                     ' \\* Item\n'
+                     '\n'
+                     '\\# Test Section\n'
+                     '\n'
+                     '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
+                     '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
+                     '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
+                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
+                     '\n'
+                     'References: [issue 1](#1), [r2]\n'
+                     '\n'
+                     'That\'s all'
+                     )
+        self.assertEqual(gpe.get_issue_created_date(),
+                         'Thu Aug  8 15:33:52 2013')
         self.assertEqual(gpe.get_issue_stars(), 1)
 
     @skipif(module_not_available('html2text'))
     def test_get_issue_basic_fields_html2text(self):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         self.assertEqual(gpe.get_issue_creator().name, 'john...@gmail.com')
-        self.assertEqual(gpe.get_issue_creator().url, 'http://code.google.com/u/101557263855536553789/')
+        self.assertEqual(gpe.get_issue_creator().url,
+                         'http://code.google.com/u/101557263855536553789/')
         self.assertEqual(gpe.get_issue_owner().name, 'john...@gmail.com')
-        self.assertEqual(gpe.get_issue_owner().url, 'http://code.google.com/u/101557263855536553789/')
+        self.assertEqual(gpe.get_issue_owner().url,
+                         'http://code.google.com/u/101557263855536553789/')
         self.assertEqual(gpe.get_issue_status(), 'Started')
         self._p_soup.stop()
         self.assertEqual(gpe.get_issue_summary(), 'Test "Issue"')
         assert_equal(gpe.get_issue_description(),
-                'Test \\*Issue\\* for testing\n'
-                '\n'
-                '&nbsp; 1. Test List\n'
-                '&nbsp; 2. Item\n'
-                '\n'
-                '\\*\\*Testing\\*\\*\n'
-                '\n'
-                ' \\* Test list 2\n'
-                ' \\* Item\n'
-                '\n'
-                '\\# Test Section\n'
-                '\n'
-                '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
-                '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
-                '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
-                '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
-                '\n'
-                'References: [issue 1](#1), [r2]\n'
-                '\n'
-                'That\'s all'
-            )
-        self.assertEqual(gpe.get_issue_created_date(), 'Thu Aug  8 15:33:52 2013')
+                     'Test \\*Issue\\* for testing\n'
+                     '\n'
+                     '&nbsp; 1. Test List\n'
+                     '&nbsp; 2. Item\n'
+                     '\n'
+                     '\\*\\*Testing\\*\\*\n'
+                     '\n'
+                     ' \\* Test list 2\n'
+                     ' \\* Item\n'
+                     '\n'
+                     '\\# Test Section\n'
+                     '\n'
+                     '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
+                     '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
+                     '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
+                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
+                     '\n'
+                     'References: [issue 1](#1), [r2]\n'
+                     '\n'
+                     'That\'s all'
+                     )
+        self.assertEqual(gpe.get_issue_created_date(),
+                         'Thu Aug  8 15:33:52 2013')
         self.assertEqual(gpe.get_issue_stars(), 1)
 
     def test_get_issue_summary(self):
@@ -253,168 +277,180 @@ class TestGoogleCodeProjectExtractor(TestCase):
         self.assertEqual(gpe.get_issue_summary(), u'My Summary')
 
     def test_get_issue_mod_date(self):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         self.assertEqual(gpe.get_issue_mod_date(), 'Thu Aug  8 15:36:57 2013')
 
     def test_get_issue_labels(self):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         self.assertEqual(gpe.get_issue_labels(), [
-                'Type-Defect',
-                'Priority-Medium',
-                'Milestone-Release1.0',
-                'OpSys-All',
-                'Component-Logic',
-                'Performance',
-                'Security',
-                'OpSys-Windows',
-                'OpSys-OSX',
-            ])
+            'Type-Defect',
+            'Priority-Medium',
+            'Milestone-Release1.0',
+            'OpSys-All',
+            'Component-Logic',
+            'Performance',
+            'Security',
+            'OpSys-Windows',
+            'OpSys-OSX',
+        ])
 
     @mock.patch.object(base, 'StringIO')
     def test_get_issue_attachments(self, StringIO):
-        self.urlopen.return_value.info.return_value = {'content-type': 'text/plain; foo'}
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        self.urlopen.return_value.info.return_value = {
+            'content-type': 'text/plain; foo'}
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         attachments = gpe.get_issue_attachments()
         self.assertEqual(len(attachments), 1)
         self.assertEqual(attachments[0].filename, 'at1.txt')
-        self.assertEqual(attachments[0].url, 'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255')
+        self.assertEqual(
+            attachments[0].url, 'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255')
         self.assertEqual(attachments[0].type, 'text/plain')
 
     @without_module('html2text')
     @mock.patch.object(base, 'StringIO')
     def test_iter_comments(self, StringIO):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         comments = list(gpe.iter_comments())
         self.assertEqual(len(comments), 4)
         expected = [
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:35:15 2013',
-                    'body': 'Test \\*comment\\* is a comment',
-                    'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
-                    'attachments': ['at2.txt'],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:35:34 2013',
-                    'body': 'Another comment with references: [issue 2](#2), [r1]',
-                    'updates': {},
-                    'attachments': [],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:36:39 2013',
-                    'body': 'Last comment',
-                    'updates': {},
-                    'attachments': ['at4.txt', 'at1.txt'],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:36:57 2013',
-                    'body': 'Oh, I forgot one \\(with an inter\\-project reference to [issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
-                    'updates': {'Labels:': 'OpSys-OSX'},
-                    'attachments': [],
-                },
-            ]
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:35:15 2013',
+                'body': 'Test \\*comment\\* is a comment',
+                'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
+                'attachments': ['at2.txt'],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:35:34 2013',
+                'body': 'Another comment with references: [issue 2](#2), [r1]',
+                'updates': {},
+                'attachments': [],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:36:39 2013',
+                'body': 'Last comment',
+                'updates': {},
+                'attachments': ['at4.txt', 'at1.txt'],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:36:57 2013',
+                'body': 'Oh, I forgot one \\(with an inter\\-project reference to [issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
+                'updates': {'Labels:': 'OpSys-OSX'},
+                'attachments': [],
+            },
+        ]
         for actual, expected in zip(comments, expected):
             self.assertEqual(actual.author.name, expected['author.name'])
             self.assertEqual(actual.author.url, expected['author.url'])
             self.assertEqual(actual.created_date, expected['created_date'])
             self.assertEqual(actual.body, expected['body'])
             self.assertEqual(actual.updates, expected['updates'])
-            self.assertEqual([a.filename for a in actual.attachments], expected['attachments'])
+            self.assertEqual(
+                [a.filename for a in actual.attachments], expected['attachments'])
 
     @skipif(module_not_available('html2text'))
     @mock.patch.object(base, 'StringIO')
     def test_iter_comments_html2text(self, StringIO):
-        test_issue = open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read()
+        test_issue = open(pkg_resources.resource_filename(
+            'forgeimporters', 'tests/data/google/test-issue.html')).read()
         gpe = self._make_extractor(test_issue)
         comments = list(gpe.iter_comments())
         self.assertEqual(len(comments), 4)
         expected = [
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:35:15 2013',
-                    'body': 'Test \\*comment\\* is a comment',
-                    'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
-                    'attachments': ['at2.txt'],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:35:34 2013',
-                    'body': 'Another comment with references: [issue 2](#2), [r1]',
-                    'updates': {},
-                    'attachments': [],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:36:39 2013',
-                    'body': 'Last comment',
-                    'updates': {},
-                    'attachments': ['at4.txt', 'at1.txt'],
-                },
-                {
-                    'author.name': 'john...@gmail.com',
-                    'author.url': 'http://code.google.com/u/101557263855536553789/',
-                    'created_date': 'Thu Aug  8 15:36:57 2013',
-                    'body': 'Oh, I forgot one \\(with an inter-project reference to [issue other-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
-                    'updates': {'Labels:': 'OpSys-OSX'},
-                    'attachments': [],
-                },
-            ]
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:35:15 2013',
+                'body': 'Test \\*comment\\* is a comment',
+                'updates': {'Status:': 'Started', 'Labels:': '-OpSys-Linux OpSys-Windows'},
+                'attachments': ['at2.txt'],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:35:34 2013',
+                'body': 'Another comment with references: [issue 2](#2), [r1]',
+                'updates': {},
+                'attachments': [],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:36:39 2013',
+                'body': 'Last comment',
+                'updates': {},
+                'attachments': ['at4.txt', 'at1.txt'],
+            },
+            {
+                'author.name': 'john...@gmail.com',
+                'author.url': 'http://code.google.com/u/101557263855536553789/',
+                'created_date': 'Thu Aug  8 15:36:57 2013',
+                'body': 'Oh, I forgot one \\(with an inter-project reference to [issue other-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)',
+                'updates': {'Labels:': 'OpSys-OSX'},
+                'attachments': [],
+            },
+        ]
         for actual, expected in zip(comments, expected):
             self.assertEqual(actual.author.name, expected['author.name'])
             self.assertEqual(actual.author.url, expected['author.url'])
             self.assertEqual(actual.created_date, expected['created_date'])
             self.assertEqual(actual.body, expected['body'])
             self.assertEqual(actual.updates, expected['updates'])
-            self.assertEqual([a.filename for a in actual.attachments], expected['attachments'])
+            self.assertEqual(
+                [a.filename for a in actual.attachments], expected['attachments'])
 
     def test_get_issue_ids(self):
         extractor = google.GoogleCodeProjectExtractor(None)
-        extractor.get_page = mock.Mock(side_effect=((1, 2, 3),(2, 3, 4), ()))
+        extractor.get_page = mock.Mock(side_effect=((1, 2, 3), (2, 3, 4), ()))
         self.assertItemsEqual(extractor.get_issue_ids(start=10), (1, 2, 3, 4))
         self.assertEqual(extractor.get_page.call_count, 3)
         extractor.get_page.assert_has_calls([
-                mock.call('issues_csv', parser=google.csv_parser, start=10),
-                mock.call('issues_csv', parser=google.csv_parser, start=110),
-                mock.call('issues_csv', parser=google.csv_parser, start=210),
-            ])
+            mock.call('issues_csv', parser=google.csv_parser, start=10),
+            mock.call('issues_csv', parser=google.csv_parser, start=110),
+            mock.call('issues_csv', parser=google.csv_parser, start=210),
+        ])
 
     @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_page')
     @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_issue_ids')
     def test_iter_issue_ids(self, get_issue_ids, get_page):
         get_issue_ids.side_effect = [set([1, 2]), set([2, 3, 4])]
-        issue_ids = [i for i,e in list(google.GoogleCodeProjectExtractor.iter_issues('foo'))]
+        issue_ids = [i for i,
+                     e in list(google.GoogleCodeProjectExtractor.iter_issues('foo'))]
         self.assertEqual(issue_ids, [1, 2, 3, 4])
         get_issue_ids.assert_has_calls([
-                mock.call(start=0),
-                mock.call(start=-8),
-            ])
+            mock.call(start=0),
+            mock.call(start=-8),
+        ])
 
     @mock.patch.object(google.GoogleCodeProjectExtractor, '__init__')
     @mock.patch.object(google.GoogleCodeProjectExtractor, 'get_issue_ids')
     def test_iter_issue_ids_raises(self, get_issue_ids, __init__):
         get_issue_ids.side_effect = [set([1, 2, 3, 4, 5])]
         __init__.side_effect = [
-                None,
-                None,
-                HTTPError('fourohfour', 404, 'fourohfour', {}, mock.Mock()),  # should skip but keep going
-                None,
-                HTTPError('fubar', 500, 'fubar', {}, mock.Mock()),  # should be re-raised
-                None,
-            ]
+            None,
+            None,
+            # should skip but keep going
+            HTTPError('fourohfour', 404, 'fourohfour', {}, mock.Mock()),
+            None,
+            # should be re-raised
+            HTTPError('fubar', 500, 'fubar', {}, mock.Mock()),
+            None,
+        ]
         issue_ids = []
         try:
             for issue_id, extractor in google.GoogleCodeProjectExtractor.iter_issues('foo'):
@@ -425,7 +461,9 @@ class TestGoogleCodeProjectExtractor(TestCase):
             assert False, 'Missing expected raised exception'
         self.assertEqual(issue_ids, [1, 3])
 
+
 class TestUserLink(TestCase):
+
     def test_plain(self):
         tag = mock.Mock()
         tag.text.strip.return_value = 'name'
@@ -484,10 +522,11 @@ class TestComment(TestCase):
         self.assertEqual(comment.updates, {
             u'Summary:': u'Make PyChess keyboard accessible',
             u'Status:': u'Accepted',
-            })
+        })
 
 
 class TestAsMarkdown(TestCase):
+
     def soup(self, tag):
         return BeautifulSoup(u'<pre>%s</pre>' % tag).first()
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/google/test_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/google/test_tracker.py b/ForgeImporters/forgeimporters/tests/google/test_tracker.py
index 7003e32..cfb8d97 100644
--- a/ForgeImporters/forgeimporters/tests/google/test_tracker.py
+++ b/ForgeImporters/forgeimporters/tests/google/test_tracker.py
@@ -30,6 +30,7 @@ from forgeimporters.google import tracker
 
 
 class TestTrackerImporter(TestCase):
+
     @mock.patch.object(tracker, 'g')
     @mock.patch.object(tracker, 'c')
     @mock.patch.object(tracker, 'ThreadLocalORMSession')
@@ -47,55 +48,57 @@ class TestTrackerImporter(TestCase):
         app = project.install_app.return_value
         app.config.options.mount_point = 'mount_point'
         app.config.options.import_id = {
-                'source': 'Google Code',
-                'project_name': 'project_name',
-            }
+            'source': 'Google Code',
+            'project_name': 'project_name',
+        }
         app.config.options.get = lambda *a: getattr(app.config.options, *a)
         app.url = 'foo'
-        issues = gpe.iter_issues.return_value = [(50, mock.Mock()), (100, mock.Mock())]
+        issues = gpe.iter_issues.return_value = [
+            (50, mock.Mock()), (100, mock.Mock())]
         tickets = TM.Ticket.side_effect = [mock.Mock(), mock.Mock()]
 
         importer.import_tool(project, user, project_name='project_name',
-                mount_point='mount_point', mount_label='mount_label')
+                             mount_point='mount_point', mount_label='mount_label')
 
-        project.install_app.assert_called_once_with('tickets', 'mount_point', 'mount_label',
-                EnableVoting=True,
-                open_status_names='New Accepted Started',
-                closed_status_names='Fixed Verified Invalid Duplicate WontFix Done',
-                import_id={
-                        'source': 'Google Code',
-                        'project_name': 'project_name',
-                    }
-            )
+        project.install_app.assert_called_once_with(
+            'tickets', 'mount_point', 'mount_label',
+            EnableVoting=True,
+            open_status_names='New Accepted Started',
+            closed_status_names='Fixed Verified Invalid Duplicate WontFix Done',
+            import_id={
+                'source': 'Google Code',
+                'project_name': 'project_name',
+            }
+        )
         gpe.iter_issues.assert_called_once_with('project_name')
         self.assertEqual(importer.process_fields.call_args_list, [
-                mock.call(tickets[0], issues[0][1]),
-                mock.call(tickets[1], issues[1][1]),
-            ])
+            mock.call(tickets[0], issues[0][1]),
+            mock.call(tickets[1], issues[1][1]),
+        ])
         self.assertEqual(importer.process_labels.call_args_list, [
-                mock.call(tickets[0], issues[0][1]),
-                mock.call(tickets[1], issues[1][1]),
-            ])
+            mock.call(tickets[0], issues[0][1]),
+            mock.call(tickets[1], issues[1][1]),
+        ])
         self.assertEqual(importer.process_comments.call_args_list, [
-                mock.call(tickets[0], issues[0][1]),
-                mock.call(tickets[1], issues[1][1]),
-            ])
+            mock.call(tickets[0], issues[0][1]),
+            mock.call(tickets[1], issues[1][1]),
+        ])
         self.assertEqual(tlos.flush_all.call_args_list, [
-                mock.call(),
-                mock.call(),
-            ])
+            mock.call(),
+            mock.call(),
+        ])
         self.assertEqual(session.return_value.flush.call_args_list, [
-                mock.call(tickets[0]),
-                mock.call(tickets[1]),
-            ])
+            mock.call(tickets[0]),
+            mock.call(tickets[1]),
+        ])
         self.assertEqual(session.return_value.expunge.call_args_list, [
-                mock.call(tickets[0]),
-                mock.call(tickets[1]),
-            ])
+            mock.call(tickets[0]),
+            mock.call(tickets[1]),
+        ])
         self.assertEqual(app.globals.last_ticket_num, 100)
         M.AuditLog.log.assert_called_once_with(
-                'import tool mount_point from project_name on Google Code',
-                project=project, user=user, url='foo')
+            'import tool mount_point from project_name on Google Code',
+            project=project, user=user, url='foo')
         g.post_event.assert_called_once_with('project_updated')
         app.globals.invalidate_bin_counts.assert_called_once_with()
 
@@ -108,10 +111,12 @@ class TestTrackerImporter(TestCase):
         user = mock.Mock()
 
         importer = tracker.GoogleCodeTrackerImporter()
-        self.assertRaises(ValueError, importer.import_tool, project, user, project_name='project_name',
-                mount_point='mount_point', mount_label='mount_label')
+        self.assertRaises(
+            ValueError, importer.import_tool, project, user, project_name='project_name',
+            mount_point='mount_point', mount_label='mount_label')
 
-        h.make_app_admin_only.assert_called_once_with(project.install_app.return_value)
+        h.make_app_admin_only.assert_called_once_with(
+            project.install_app.return_value)
 
     def test_custom_fields(self):
         importer = tracker.GoogleCodeTrackerImporter()
@@ -121,31 +126,31 @@ class TestTrackerImporter(TestCase):
         importer.custom_field('Priority')
         importer.custom_field('Type')
         self.assertEqual(importer.custom_fields, {
-                'Foo': {
-                        'type': 'string',
-                        'label': 'Foo',
-                        'name': '_foo',
-                        'options': set(),
-                    },
-                'Milestone': {
-                        'type': 'milestone',
-                        'label': 'Milestone',
-                        'name': '_milestone',
-                        'options': set(),
-                    },
-                'Priority': {
-                        'type': 'select',
-                        'label': 'Priority',
-                        'name': '_priority',
-                        'options': set(),
-                    },
-                'Type': {
-                        'type': 'select',
-                        'label': 'Type',
-                        'name': '_type',
-                        'options': set(),
-                    },
-            })
+            'Foo': {
+                'type': 'string',
+                'label': 'Foo',
+                'name': '_foo',
+                'options': set(),
+            },
+            'Milestone': {
+                'type': 'milestone',
+                'label': 'Milestone',
+                'name': '_milestone',
+                'options': set(),
+            },
+            'Priority': {
+                'type': 'select',
+                'label': 'Priority',
+                'name': '_priority',
+                'options': set(),
+            },
+            'Type': {
+                'type': 'select',
+                'label': 'Type',
+                'name': '_type',
+                'options': set(),
+            },
+        })
         importer.custom_fields = {'Foo': {}}
         importer.custom_field('Foo')
         self.assertEqual(importer.custom_fields, {'Foo': {}})
@@ -153,33 +158,36 @@ class TestTrackerImporter(TestCase):
     def test_process_fields(self):
         ticket = mock.Mock()
         issue = mock.Mock(
-                get_issue_summary=lambda:'summary',
-                get_issue_description=lambda:'my *description* fool',
-                get_issue_status=lambda:'status',
-                get_issue_created_date=lambda:'created_date',
-                get_issue_mod_date=lambda:'mod_date',
-                get_issue_creator=lambda:'creator',
-                get_issue_owner=lambda:'owner',
-            )
+            get_issue_summary=lambda: 'summary',
+            get_issue_description=lambda: 'my *description* fool',
+            get_issue_status=lambda: 'status',
+            get_issue_created_date=lambda: 'created_date',
+            get_issue_mod_date=lambda: 'mod_date',
+            get_issue_creator=lambda: 'creator',
+            get_issue_owner=lambda: 'owner',
+        )
         importer = tracker.GoogleCodeTrackerImporter()
         with mock.patch.object(tracker, 'dateutil') as dt:
             dt.parser.parse.side_effect = lambda s: s
             importer.process_fields(ticket, issue)
             self.assertEqual(ticket.summary, 'summary')
-            self.assertEqual(ticket.description, '*Originally created by:* creator\n*Originally owned by:* owner\n\nmy *description* fool')
+            self.assertEqual(ticket.description,
+                             '*Originally created by:* creator\n*Originally owned by:* owner\n\nmy *description* fool')
             self.assertEqual(ticket.status, 'status')
             self.assertEqual(ticket.created_date, 'created_date')
             self.assertEqual(ticket.mod_date, 'mod_date')
             self.assertEqual(dt.parser.parse.call_args_list, [
-                    mock.call('created_date'),
-                    mock.call('mod_date'),
-                ])
+                mock.call('created_date'),
+                mock.call('mod_date'),
+            ])
 
     def test_process_labels(self):
         ticket = mock.Mock(custom_fields={}, labels=[])
-        issue = mock.Mock(get_issue_labels=lambda:['Foo-Bar', 'Baz', 'Foo-Qux'])
+        issue = mock.Mock(get_issue_labels=lambda:
+                          ['Foo-Bar', 'Baz', 'Foo-Qux'])
         importer = tracker.GoogleCodeTrackerImporter()
-        importer.custom_field = mock.Mock(side_effect=lambda n: {'name': '_%s' % n.lower(), 'options': set()})
+        importer.custom_field = mock.Mock(
+            side_effect=lambda n: {'name': '_%s' % n.lower(), 'options': set()})
         importer.process_labels(ticket, issue)
         self.assertEqual(ticket.labels, ['Baz'])
         self.assertEqual(ticket.custom_fields, {'_foo': 'Bar, Qux'})
@@ -193,94 +201,99 @@ class TestTrackerImporter(TestCase):
         ticket = mock.Mock()
         issue = mock.Mock()
         comments = issue.iter_comments.return_value = [
-                mock.Mock(
-                    author=_author(1),
-                    body='text1',
-                    annotated_text='annotated1',
-                    attachments='attachments1',
-                    created_date='Mon Jul 15 00:00:00 2013',
-                ),
-                mock.Mock(
-                    author=_author(2),
-                    body='text2',
-                    annotated_text='annotated2',
-                    attachments='attachments2',
-                    created_date='Mon Jul 16 00:00:00 2013',
-                ),
-            ]
-        comments[0].updates.items.return_value = [('Foo:', 'Bar'), ('Baz:', 'Qux')]
+            mock.Mock(
+                author=_author(1),
+                body='text1',
+                annotated_text='annotated1',
+                attachments='attachments1',
+                created_date='Mon Jul 15 00:00:00 2013',
+            ),
+            mock.Mock(
+                author=_author(2),
+                body='text2',
+                annotated_text='annotated2',
+                attachments='attachments2',
+                created_date='Mon Jul 16 00:00:00 2013',
+            ),
+        ]
+        comments[0].updates.items.return_value = [
+            ('Foo:', 'Bar'), ('Baz:', 'Qux')]
         comments[1].updates.items.return_value = []
         posts = ticket.discussion_thread.add_post.side_effect = [
-                mock.Mock(),
-                mock.Mock(),
-            ]
+            mock.Mock(),
+            mock.Mock(),
+        ]
         importer = tracker.GoogleCodeTrackerImporter()
         importer.process_comments(ticket, issue)
         self.assertEqual(ticket.discussion_thread.add_post.call_args_list[0], mock.call(
-                text='annotated1',
-                timestamp=datetime(2013, 7, 15),
-                ignore_security=True,
-            ))
-        posts[0].add_multiple_attachments.assert_called_once_with('attachments1')
+            text='annotated1',
+            timestamp=datetime(2013, 7, 15),
+            ignore_security=True,
+        ))
+        posts[0].add_multiple_attachments.assert_called_once_with(
+            'attachments1')
         self.assertEqual(ticket.discussion_thread.add_post.call_args_list[1], mock.call(
-                text='annotated2',
-                timestamp=datetime(2013, 7, 16),
-                ignore_security=True,
-            ))
-        posts[1].add_multiple_attachments.assert_called_once_with('attachments2')
+            text='annotated2',
+            timestamp=datetime(2013, 7, 16),
+            ignore_security=True,
+        ))
+        posts[1].add_multiple_attachments.assert_called_once_with(
+            'attachments2')
 
     @mock.patch.object(tracker, 'c')
     def test_postprocess_custom_fields(self, c):
         importer = tracker.GoogleCodeTrackerImporter()
         importer.open_milestones = set(['m2', 'm3'])
         importer.custom_fields = {
-                'Foo': {
-                    'name': '_foo',
-                    'type': 'string',
-                    'options': set(['foo', 'bar']),
-                },
-                'Milestone': {
-                    'name': '_milestone',
-                    'type': 'milestone',
-                    'options': set(['m3', 'm1', 'm2']),
-                },
-                'Priority': {
-                    'name': '_priority',
-                    'type': 'select',
-                    'options': set(['foo', 'bar']),
-                },
-            }
+            'Foo': {
+                'name': '_foo',
+                'type': 'string',
+                'options': set(['foo', 'bar']),
+            },
+            'Milestone': {
+                'name': '_milestone',
+                'type': 'milestone',
+                'options': set(['m3', 'm1', 'm2']),
+            },
+            'Priority': {
+                'name': '_priority',
+                'type': 'select',
+                'options': set(['foo', 'bar']),
+            },
+        }
         custom_fields = importer.postprocess_custom_fields()
         self.assertItemsEqual(custom_fields, [
-                {
-                    'name': '_foo',
-                    'type': 'string',
-                    'options': '',
-                },
-                {
-                    'name': '_milestone',
-                    'type': 'milestone',
-                    'options': '',
-                    'milestones': [
+            {
+                'name': '_foo',
+                'type': 'string',
+                'options': '',
+            },
+            {
+                'name': '_milestone',
+                'type': 'milestone',
+                'options': '',
+                'milestones': [
                         {'name': 'm1', 'due_date': None, 'complete': True},
-                        {'name': 'm2', 'due_date': None, 'complete': False},
-                        {'name': 'm3', 'due_date': None, 'complete': False},
-                    ],
-                },
-                {
-                    'name': '_priority',
-                    'type': 'select',
-                    'options': 'foo bar',
-                },
-            ])
+                    {'name': 'm2', 'due_date': None, 'complete': False},
+                    {'name': 'm3', 'due_date': None, 'complete': False},
+                ],
+            },
+            {
+                'name': '_priority',
+                'type': 'select',
+                'options': 'foo bar',
+            },
+        ])
 
 
 class TestGoogleCodeTrackerImportController(TestController, TestCase):
+
     def setUp(self):
         """Mount Google Code importer on the Tracker admin controller"""
         super(TestGoogleCodeTrackerImportController, self).setUp()
         from forgetracker.tracker_main import TrackerAdminController
-        TrackerAdminController._importer = tracker.GoogleCodeTrackerImportController()
+        TrackerAdminController._importer = tracker.GoogleCodeTrackerImportController(
+        )
 
     @with_tracker
     def test_index(self):
@@ -293,15 +306,18 @@ class TestGoogleCodeTrackerImportController(TestController, TestCase):
     @patch('forgeimporters.base.import_tool')
     def test_create(self, import_tool):
         params = dict(gc_project_name='test',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                status=302)
+                          status=302)
         self.assertEqual(r.location, 'http://localhost/p/test/admin/')
-        self.assertEqual(u'mymount', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mylabel', import_tool.post.call_args[1]['mount_label'])
-        self.assertEqual(u'test', import_tool.post.call_args[1]['project_name'])
+        self.assertEqual(
+            u'mymount', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mylabel', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual(
+            u'test', import_tool.post.call_args[1]['project_name'])
 
     @with_tracker
     @patch('forgeimporters.base.import_tool')
@@ -310,10 +326,10 @@ class TestGoogleCodeTrackerImportController(TestController, TestCase):
         project.set_tool_data('GoogleCodeTrackerImporter', pending=1)
         ThreadLocalORMSession.flush_all()
         params = dict(gc_project_name='test',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                status=302).follow()
+                          status=302).follow()
         self.assertIn('Please wait and try again', r)
         self.assertEqual(import_tool.post.call_count, 0)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/test_base.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/test_base.py b/ForgeImporters/forgeimporters/tests/test_base.py
index ade7b7a..91d9dff 100644
--- a/ForgeImporters/forgeimporters/tests/test_base.py
+++ b/ForgeImporters/forgeimporters/tests/test_base.py
@@ -32,6 +32,7 @@ from forgeimporters import base
 
 
 class TestProjectExtractor(TestCase):
+
     @mock.patch('forgeimporters.base.h.urlopen')
     @mock.patch('forgeimporters.base.urllib2.Request')
     def test_urlopen(self, Request, urlopen):
@@ -39,7 +40,7 @@ class TestProjectExtractor(TestCase):
         Request.assert_called_once_with('myurl', data='foo')
         req = Request.return_value
         req.add_header.assert_called_once_with(
-                'User-Agent', 'Allura Data Importer (https://forge-allura.apache.org/p/allura/)')
+            'User-Agent', 'Allura Data Importer (https://forge-allura.apache.org/p/allura/)')
         urlopen.assert_called_once_with(req, retries=3, codes=(408,))
         self.assertEqual(r, urlopen.return_value)
 
@@ -53,16 +54,17 @@ def test_import_tool(g, c, object_from_path):
     object_from_path.return_value = importer = mock.Mock()
     importer.return_value.source = 'source'
     importer.return_value.tool_label = 'label'
-    base.import_tool('forgeimporters.base.ToolImporter', project_name='project_name',
-            mount_point='mount_point', mount_label='mount_label')
+    base.import_tool(
+        'forgeimporters.base.ToolImporter', project_name='project_name',
+        mount_point='mount_point', mount_label='mount_label')
     importer.return_value.import_tool.assert_called_once_with(c.project,
-            c.user, project_name='project_name', mount_point='mount_point',
-            mount_label='mount_label')
+                                                              c.user, project_name='project_name', mount_point='mount_point',
+                                                              mount_label='mount_label')
     g.post_event.assert_called_once_with(
-            'import_tool_task_succeeded',
-            'source',
-            'label',
-        )
+        'import_tool_task_succeeded',
+        'source',
+        'label',
+    )
 
 
 @mock.patch.object(base.traceback, 'format_exc')
@@ -72,20 +74,21 @@ def test_import_tool_failed(g, ToolImporter, format_exc):
     format_exc.return_value = 'my traceback'
 
     importer = mock.Mock(source='importer_source',
-            tool_label='importer_tool_label')
+                         tool_label='importer_tool_label')
     importer.import_tool.side_effect = RuntimeError('my error')
     ToolImporter.return_value = importer
 
-    assert_raises(RuntimeError, base.import_tool, 'forgeimporters.base.ToolImporter',
-            project_name='project_name')
+    assert_raises(
+        RuntimeError, base.import_tool, 'forgeimporters.base.ToolImporter',
+        project_name='project_name')
     g.post_event.assert_called_once_with(
-            'import_tool_task_failed',
-            error=str(importer.import_tool.side_effect),
-            traceback='my traceback',
-            importer_source='importer_source',
-            importer_tool_label='importer_tool_label',
-            project_name='project_name',
-        )
+        'import_tool_task_failed',
+        error=str(importer.import_tool.side_effect),
+        traceback='my traceback',
+        importer_source='importer_source',
+        importer_tool_label='importer_tool_label',
+        project_name='project_name',
+    )
 
 
 def ep(name, source=None, importer=None, **kw):
@@ -101,12 +104,15 @@ def ep(name, source=None, importer=None, **kw):
 
 
 class TestProjectImporter(TestCase):
+
     @mock.patch.object(base.h, 'iter_entry_points')
     def test_tool_importers(self, iep):
-        eps = iep.return_value = [ep('ep1', 'foo'), ep('ep2', 'bar'), ep('ep3', 'foo')]
+        eps = iep.return_value = [
+            ep('ep1', 'foo'), ep('ep2', 'bar'), ep('ep3', 'foo')]
         pi = base.ProjectImporter(mock.Mock(name='neighborhood'))
         pi.source = 'foo'
-        self.assertEqual(pi.tool_importers, {'ep1': eps[0].lv, 'ep3': eps[2].lv})
+        self.assertEqual(pi.tool_importers,
+                         {'ep1': eps[0].lv, 'ep3': eps[2].lv})
         iep.assert_called_once_with('allura.importers')
 
     @mock.patch.object(base.ToolImporter, 'by_name')
@@ -123,15 +129,17 @@ class TestProjectImporter(TestCase):
         pi.after_project_create = mock.Mock()
         pi.neighborhood.register_project.return_value.script_name = 'script_name/'
         kw = {
-                'project_name': 'project_name',
-                'project_shortname': 'shortname',
-                'tools': ['tool'],
-            }
+            'project_name': 'project_name',
+            'project_shortname': 'shortname',
+            'tools': ['tool'],
+        }
         with mock.patch.dict(base.config, {'site_name': 'foo'}):
             pi.process(**kw)
-        pi.neighborhood.register_project.assert_called_once_with('shortname', project_name='project_name')
+        pi.neighborhood.register_project.assert_called_once_with(
+            'shortname', project_name='project_name')
         pi.after_project_create.assert_called_once_with(c.project, **kw)
-        import_tool.post.assert_called_once_with('forgeimporters.base.ToolImporter', **kw)
+        import_tool.post.assert_called_once_with(
+            'forgeimporters.base.ToolImporter', **kw)
         M.AuditLog.log.assert_called_once_with('import project from Source')
         self.assertEqual(flash.call_count, 1)
         redirect.assert_called_once_with('script_name/admin/overview')
@@ -160,30 +168,34 @@ class TestProjectImporter(TestCase):
         self.assertEqual(c.show_login_overlay, False)
 
 
-
 TA1 = mock.Mock(tool_label='foo', tool_description='foo_desc')
 TA2 = mock.Mock(tool_label='qux', tool_description='qux_desc')
 TA3 = mock.Mock(tool_label='baz', tool_description='baz_desc')
 
+
 class TI1Controller(object):
+
     @expose()
     def index(self, *a, **kw):
         return 'test importer 1 controller webpage'
 
+
 class TI1(base.ToolImporter):
     target_app = TA1
     controller = TI1Controller
 
+
 class TI2(base.ToolImporter):
     target_app = TA2
     tool_label = 'bar'
     tool_description = 'bar_desc'
 
+
 class TI3(base.ToolImporter):
     target_app = [TA2, TA2]
 
-class TestToolImporter(TestCase):
 
+class TestToolImporter(TestCase):
 
     @mock.patch.object(base.h, 'iter_entry_points')
     def test_by_name(self, iep):
@@ -201,15 +213,15 @@ class TestToolImporter(TestCase):
     @mock.patch.object(base.h, 'iter_entry_points')
     def test_by_app(self, iep):
         eps = iep.return_value = [
-                ep('importer1', importer=TI1),
-                ep('importer2', importer=TI2),
-                ep('importer3', importer=TI3),
-            ]
+            ep('importer1', importer=TI1),
+            ep('importer2', importer=TI2),
+            ep('importer3', importer=TI3),
+        ]
         importers = base.ToolImporter.by_app(TA2)
         self.assertEqual(set(importers.keys()), set([
-                'importer2',
-                'importer3',
-            ]))
+            'importer2',
+            'importer3',
+        ]))
         self.assertIsInstance(importers['importer2'], TI2)
         self.assertIsInstance(importers['importer3'], TI3)
 
@@ -225,6 +237,7 @@ class TestToolImporter(TestCase):
 
 
 class TestToolsValidator(TestCase):
+
     def setUp(self):
         self.tv = base.ToolsValidator('good-source')
 
@@ -251,24 +264,28 @@ class TestToolsValidator(TestCase):
 
     @mock.patch.object(base.ToolImporter, 'by_name')
     def test_multiple(self, by_name):
-        eps = by_name.side_effect = [ep('ep1', 'bad-source').lv, ep('ep2', 'good-source').lv, ep('ep3', 'bad-source').lv]
+        eps = by_name.side_effect = [
+            ep('ep1', 'bad-source').lv, ep('ep2', 'good-source').lv, ep('ep3', 'bad-source').lv]
         with self.assertRaises(Invalid) as cm:
             self.tv.to_python(['value1', 'value2', 'value3'])
-        self.assertEqual(cm.exception.msg, 'Invalid tools selected: value1, value3')
+        self.assertEqual(cm.exception.msg,
+                         'Invalid tools selected: value1, value3')
         self.assertEqual(by_name.call_args_list, [
-                mock.call('value1'),
-                mock.call('value2'),
-                mock.call('value3'),
-            ])
+            mock.call('value1'),
+            mock.call('value2'),
+            mock.call('value3'),
+        ])
 
     @mock.patch.object(base.ToolImporter, 'by_name')
     def test_valid(self, by_name):
-        eps = by_name.side_effect = [ep('ep1', 'good-source').lv, ep('ep2', 'good-source').lv, ep('ep3', 'bad-source').lv]
-        self.assertEqual(self.tv.to_python(['value1', 'value2']), ['value1', 'value2'])
+        eps = by_name.side_effect = [
+            ep('ep1', 'good-source').lv, ep('ep2', 'good-source').lv, ep('ep3', 'bad-source').lv]
+        self.assertEqual(
+            self.tv.to_python(['value1', 'value2']), ['value1', 'value2'])
         self.assertEqual(by_name.call_args_list, [
-                mock.call('value1'),
-                mock.call('value2'),
-            ])
+            mock.call('value1'),
+            mock.call('value2'),
+        ])
 
 
 class TestProjectToolsImportController(TestController):
@@ -309,23 +326,26 @@ class TestProjectToolsImportController(TestController):
 
 def test_get_importer_upload_path():
     project = mock.Mock(
-            shortname='prefix/shortname',
-            is_nbhd_project=False,
-            is_user_project=False,
-            is_root=False,
-            url=lambda: 'n_url/',
-            neighborhood=mock.Mock(url_prefix='p/'),
-        )
+        shortname='prefix/shortname',
+        is_nbhd_project=False,
+        is_user_project=False,
+        is_root=False,
+        url=lambda: 'n_url/',
+        neighborhood=mock.Mock(url_prefix='p/'),
+    )
     with h.push_config(config, importer_upload_path='path/{nbhd}/{project}'):
         assert_equal(base.get_importer_upload_path(project), 'path/p/prefix')
         project.is_nbhd_project = True
         assert_equal(base.get_importer_upload_path(project), 'path/p/n_url')
         project.is_nbhd_project = False
         project.is_user_project = True
-        assert_equal(base.get_importer_upload_path(project), 'path/p/shortname')
+        assert_equal(base.get_importer_upload_path(project),
+                     'path/p/shortname')
         project.is_user_project = False
         project.is_root = True
-        assert_equal(base.get_importer_upload_path(project), 'path/p/prefix/shortname')
+        assert_equal(base.get_importer_upload_path(project),
+                     'path/p/prefix/shortname')
+
 
 @mock.patch.object(base, 'os')
 @mock.patch.object(base, 'get_importer_upload_path')
@@ -342,7 +362,8 @@ def test_save_importer_upload(giup, os):
     fp.write.assert_called_once_with('data')
 
     os.makedirs.side_effect = OSError(errno.EACCES, 'foo')
-    assert_raises(OSError, base.save_importer_upload, 'project', 'file', 'data')
+    assert_raises(OSError, base.save_importer_upload,
+                  'project', 'file', 'data')
 
 
 class TestFile(object):
@@ -350,9 +371,9 @@ class TestFile(object):
     @mock.patch.object(base, 'ProjectExtractor')
     def test_type(self, PE):
         PE().page = {
-                'content-type': 'image/png',
-                'data': 'data',
-            }
+            'content-type': 'image/png',
+            'data': 'data',
+        }
         f = base.File('http://example.com/barbaz.jpg')
         assert_equal(f.type, 'image/jpeg')
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/trac/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/__init__.py b/ForgeImporters/forgeimporters/trac/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/trac/__init__.py
+++ b/ForgeImporters/forgeimporters/trac/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/trac/project.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/project.py b/ForgeImporters/forgeimporters/trac/project.py
index 71ef5f4..1dba980 100644
--- a/ForgeImporters/forgeimporters/trac/project.py
+++ b/ForgeImporters/forgeimporters/trac/project.py
@@ -37,6 +37,7 @@ class TracProjectForm(base.ProjectImportForm):
 
 
 class TracProjectImporter(base.ProjectImporter):
+
     """
     Project importer for Trac.
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/trac/tests/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tests/__init__.py b/ForgeImporters/forgeimporters/trac/tests/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/trac/tests/__init__.py
+++ b/ForgeImporters/forgeimporters/trac/tests/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
index eb97946..7ddb729 100644
--- a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
@@ -36,17 +36,18 @@ from forgeimporters.trac.tickets import (
     TracTicketImporter,
     TracTicketImportController,
     TracImportSupport,
-    )
+)
 
 
 class TestTracTicketImporter(TestCase):
+
     @patch('forgeimporters.trac.tickets.session')
     @patch('forgeimporters.trac.tickets.g')
     @patch('forgeimporters.trac.tickets.AuditLog')
     @patch('forgeimporters.trac.tickets.TracImportSupport')
     @patch('forgeimporters.trac.tickets.export')
     def test_import_tool(self, export, ImportSupport, AuditLog, g, session):
-        user_map = {"orig_user":"new_user"}
+        user_map = {"orig_user": "new_user"}
         importer = TracTicketImporter()
         app = Mock(name='ForgeTrackerApp')
         app.config.options.mount_point = 'bugs'
@@ -57,31 +58,31 @@ class TestTracTicketImporter(TestCase):
         user = Mock(name='User', _id='id')
         export.return_value = []
         res = importer.import_tool(project, user,
-                mount_point='bugs',
-                mount_label='Bugs',
-                trac_url='http://example.com/trac/url',
-                user_map=json.dumps(user_map),
-                )
+                                   mount_point='bugs',
+                                   mount_label='Bugs',
+                                   trac_url='http://example.com/trac/url',
+                                   user_map=json.dumps(user_map),
+                                   )
         self.assertEqual(res, app)
         project.install_app.assert_called_once_with(
-                'Tickets', mount_point='bugs', mount_label='Bugs',
-                open_status_names='new assigned accepted reopened',
-                closed_status_names='closed',
-                import_id={
-                        'source': 'Trac',
-                        'trac_url': 'http://example.com/trac/url/',
-                    })
+            'Tickets', mount_point='bugs', mount_label='Bugs',
+            open_status_names='new assigned accepted reopened',
+            closed_status_names='closed',
+            import_id={
+                'source': 'Trac',
+                'trac_url': 'http://example.com/trac/url/',
+            })
         export.assert_called_once_with('http://example.com/trac/url/')
         ImportSupport.return_value.perform_import.assert_called_once_with(
-                json.dumps(export.return_value),
-                json.dumps({
-                    "user_map": user_map,
-                    "usernames_match": False,
-                    }),
-                )
+            json.dumps(export.return_value),
+            json.dumps({
+                "user_map": user_map,
+                "usernames_match": False,
+            }),
+        )
         AuditLog.log.assert_called_once_with(
-                'import tool bugs from http://example.com/trac/url/',
-                project=project, user=user, url='foo')
+            'import tool bugs from http://example.com/trac/url/',
+            project=project, user=user, url='foo')
         g.post_event.assert_called_once_with('project_updated')
 
     @patch('forgeimporters.trac.tickets.session')
@@ -96,21 +97,23 @@ class TestTracTicketImporter(TestCase):
         export.side_effect = ValueError
 
         self.assertRaises(ValueError, importer.import_tool, project, user,
-                mount_point='bugs',
-                mount_label='Bugs',
-                trac_url='http://example.com/trac/url',
-                user_map=None,
-                )
+                          mount_point='bugs',
+                          mount_label='Bugs',
+                          trac_url='http://example.com/trac/url',
+                          user_map=None,
+                          )
 
         h.make_app_admin_only.assert_called_once_with(app)
 
 
 class TestTracTicketImportController(TestController, TestCase):
+
     def setUp(self):
         """Mount Trac import controller on the Tracker admin controller"""
         super(TestTracTicketImportController, self).setUp()
         from forgetracker.tracker_main import TrackerAdminController
-        self.importer = TrackerAdminController._importer = TracTicketImportController()
+        self.importer = TrackerAdminController._importer = TracTicketImportController(
+        )
 
     @with_tracker
     def test_index(self):
@@ -123,17 +126,22 @@ class TestTracTicketImportController(TestController, TestCase):
     @patch('forgeimporters.base.import_tool')
     def test_create(self, import_tool):
         params = dict(trac_url='http://example.com/trac/url',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                upload_files=[('user_map', 'myfile', '{"orig_user": "new_user"}')],
-                status=302)
+                          upload_files=[
+                              ('user_map', 'myfile', '{"orig_user": "new_user"}')],
+                          status=302)
         self.assertEqual(r.location, 'http://localhost/p/test/admin/')
-        self.assertEqual(u'mymount', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mylabel', import_tool.post.call_args[1]['mount_label'])
-        self.assertEqual('{"orig_user": "new_user"}', import_tool.post.call_args[1]['user_map'])
-        self.assertEqual(u'http://example.com/trac/url', import_tool.post.call_args[1]['trac_url'])
+        self.assertEqual(
+            u'mymount', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mylabel', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual('{"orig_user": "new_user"}',
+                         import_tool.post.call_args[1]['user_map'])
+        self.assertEqual(u'http://example.com/trac/url',
+                         import_tool.post.call_args[1]['trac_url'])
 
     @with_tracker
     @patch('forgeimporters.base.import_tool')
@@ -142,58 +150,62 @@ class TestTracTicketImportController(TestController, TestCase):
         project.set_tool_data('TracTicketImporter', pending=1)
         ThreadLocalORMSession.flush_all()
         params = dict(trac_url='http://example.com/trac/url',
-                mount_label='mylabel',
-                mount_point='mymount',
-                )
+                      mount_label='mylabel',
+                      mount_point='mymount',
+                      )
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                upload_files=[('user_map', 'myfile', '{"orig_user": "new_user"}')],
-                status=302).follow()
+                          upload_files=[
+                              ('user_map', 'myfile', '{"orig_user": "new_user"}')],
+                          status=302).follow()
         self.assertIn('Please wait and try again', r)
         self.assertEqual(import_tool.post.call_count, 0)
 
 
 class TestTracImportSupport(TestCase):
+
     def test_link_processing(self):
         import_support = TracImportSupport()
         import_support.get_slug_by_id = lambda ticket, comment: '123'
         cases = {
-                'test link [[2496]](http://testlink.com)':
-                "test link [\[2496\]](http://testlink.com)",
+            'test link [[2496]](http://testlink.com)':
+            "test link [\[2496\]](http://testlink.com)",
 
-                'test ticket ([#201](http://site.net/apps/trac/project/ticket/201))':
-                'test ticket ([#201](201))',
+            'test ticket ([#201](http://site.net/apps/trac/project/ticket/201))':
+            'test ticket ([#201](201))',
 
-                'Replying to [someuser](http://site.net/apps/trac/project/ticket/204#comment:1)':
-                'Replying to [someuser](204/#123)',
+            'Replying to [someuser](http://site.net/apps/trac/project/ticket/204#comment:1)':
+            'Replying to [someuser](204/#123)',
 
-                '**description** modified ([diff](http://site.net/apps/trac/project/ticket/205?action=diff&version=1))':
-                '**description** modified ([diff](205))',
+            '**description** modified ([diff](http://site.net/apps/trac/project/ticket/205?action=diff&version=1))':
+            '**description** modified ([diff](205))',
 
-                'Fixed in [r1000](http://site.net/apps/trac/project/changeset/1000)':
-                'Fixed in [r1000](r1000)',
+            'Fixed in [r1000](http://site.net/apps/trac/project/changeset/1000)':
+            'Fixed in [r1000](r1000)',
 
-                '[[Double brackets]](1) the [[whole way]](2).':
-                '[\[Double brackets\]](1) the [\[whole way\]](2).',
+            '[[Double brackets]](1) the [[whole way]](2).':
+            '[\[Double brackets\]](1) the [\[whole way\]](2).',
 
-                '#200 unchanged':
-                '#200 unchanged',
-            }
+            '#200 unchanged':
+            '#200 unchanged',
+        }
         for input, expected in cases.items():
             actual = import_support.link_processing(input)
             self.assertEqual(actual, expected)
 
 
 class TestTracImportSupportFunctional(TestRestApiBase, TestCase):
+
     @with_tracker
     def test_links(self):
-        doc_text = open(os.path.dirname(__file__) + '/data/trac-export.json').read()
+        doc_text = open(os.path.dirname(__file__)
+                        + '/data/trac-export.json').read()
 
         TracImportSupport().perform_import(doc_text,
-                '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
+                                           '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
 
         r = self.app.get('/p/test/bugs/204/')
         ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                    ticket_num=204)
+                                     ticket_num=204)
         slug = ticket.discussion_thread.post_class().query.find(dict(
             discussion_id=ticket.discussion_thread.discussion_id,
             thread_id=ticket.discussion_thread._id,
@@ -205,21 +217,24 @@ class TestTracImportSupportFunctional(TestRestApiBase, TestCase):
 
     @with_tracker
     def test_slug(self):
-        doc_text = open(os.path.dirname(__file__) + '/data/trac-export.json').read()
+        doc_text = open(os.path.dirname(__file__)
+                        + '/data/trac-export.json').read()
 
         TracImportSupport().perform_import(doc_text,
-                '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
+                                           '{"user_map": {"hinojosa4": "test-admin", "ma_boehm": "test-user"}}')
 
         ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                    ticket_num=204)
+                                     ticket_num=204)
         comments = ticket.discussion_thread.post_class().query.find(dict(
             discussion_id=ticket.discussion_thread.discussion_id,
             thread_id=ticket.discussion_thread._id,
             status={'$in': ['ok', 'pending']})).sort('timestamp').all()
 
         import_support = TracImportSupport()
-        self.assertEqual(import_support.get_slug_by_id('204', '1'), comments[0].slug)
-        self.assertEqual(import_support.get_slug_by_id('204', '2'), comments[1].slug)
+        self.assertEqual(
+            import_support.get_slug_by_id('204', '1'), comments[0].slug)
+        self.assertEqual(
+            import_support.get_slug_by_id('204', '2'), comments[1].slug)
 
     @with_tracker
     @skipif(module_not_available('html2text'))
@@ -233,14 +248,16 @@ class TestTracImportSupportFunctional(TestRestApiBase, TestCase):
             te.csvopen = lambda s: csv_fp
         with patch('allura.scripts.trac_export.urlopen', return_value=html_fp):
             json_data = {
-                    'class': 'PROJECT',
-                    'trackers': {'default': {'artifacts': list(te)}},
-                }
+                'class': 'PROJECT',
+                'trackers': {'default': {'artifacts': list(te)}},
+            }
         TracImportSupport().perform_import(
-                json.dumps(json_data, cls=DateJSONEncoder),
-                '{"user_map": {}}')
+            json.dumps(json_data, cls=DateJSONEncoder),
+            '{"user_map": {}}')
         ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                    ticket_num=390)
-        self.assertIn('To reproduce:  \n\\- open an mzML file', ticket.description)
-        self.assertIn('duplicate of:  \n\\- [#316](316)', ticket.discussion_thread.find_posts()[0].text)
+                                     ticket_num=390)
+        self.assertIn('To reproduce:  \n\\- open an mzML file',
+                      ticket.description)
+        self.assertIn('duplicate of:  \n\\- [#316](316)',
+                      ticket.discussion_thread.find_posts()[0].text)
         self.assertIn('will crash TOPPView.', ticket.description)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/trac/tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tickets.py b/ForgeImporters/forgeimporters/trac/tickets.py
index 4fd8fdf..c2c28c1 100644
--- a/ForgeImporters/forgeimporters/trac/tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tickets.py
@@ -24,15 +24,15 @@ from ming.orm import session
 from pylons import tmpl_context as c
 from pylons import app_globals as g
 from tg import (
-        expose,
-        flash,
-        redirect,
-        validate,
-        )
+    expose,
+    flash,
+    redirect,
+    validate,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.controllers import BaseController
 from allura.lib.decorators import require_post
@@ -40,14 +40,14 @@ from allura.lib import validators as v
 from allura.lib import helpers as h
 from allura.model import AuditLog
 from allura.scripts.trac_export import (
-        export,
-        DateJSONEncoder,
-        )
+    export,
+    DateJSONEncoder,
+)
 
 from forgeimporters.base import (
-        ToolImporter,
-        ToolImportForm,
-        )
+    ToolImporter,
+    ToolImportForm,
+)
 from forgetracker.tracker_main import ForgeTrackerApp
 from forgetracker.import_support import ImportSupport
 from forgetracker import model as TM
@@ -59,6 +59,7 @@ class TracTicketImportForm(ToolImportForm):
 
 
 class TracTicketImportController(BaseController):
+
     def __init__(self):
         self.importer = TracTicketImporter()
 
@@ -70,7 +71,7 @@ class TracTicketImportController(BaseController):
     @expose('jinja:forgeimporters.trac:templates/tickets/index.html')
     def index(self, **kw):
         return dict(importer=self.importer,
-                target_app=self.target_app)
+                    target_app=self.target_app)
 
     @without_trailing_slash
     @expose()
@@ -79,15 +80,16 @@ class TracTicketImportController(BaseController):
     def create(self, trac_url, mount_point, mount_label, user_map=None, **kw):
         if self.importer.enforce_limit(c.project):
             self.importer.post(
-                    project_name=trac_url,
-                    mount_point=mount_point,
-                    mount_label=mount_label,
-                    trac_url=trac_url,
-                    user_map=user_map)
+                project_name=trac_url,
+                mount_point=mount_point,
+                mount_label=mount_label,
+                trac_url=trac_url,
+                user_map=user_map)
             flash('Ticket import has begun. Your new tracker will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -99,39 +101,39 @@ class TracTicketImporter(ToolImporter):
     tool_description = 'Import your tickets from Trac'
 
     def import_tool(self, project, user, project_name=None, mount_point=None,
-            mount_label=None, trac_url=None, user_map=None, **kw):
+                    mount_label=None, trac_url=None, user_map=None, **kw):
         """ Import Trac tickets into a new Allura Tracker tool.
 
         """
         trac_url = trac_url.rstrip('/') + '/'
         mount_point = mount_point or 'tickets'
         app = project.install_app(
-                'Tickets',
-                mount_point=mount_point,
-                mount_label=mount_label or 'Tickets',
-                open_status_names='new assigned accepted reopened',
-                closed_status_names='closed',
-                import_id={
-                        'source': self.source,
-                        'trac_url': trac_url,
-                    },
-            )
+            'Tickets',
+            mount_point=mount_point,
+            mount_label=mount_label or 'Tickets',
+            open_status_names='new assigned accepted reopened',
+            closed_status_names='closed',
+            import_id={
+                'source': self.source,
+                'trac_url': trac_url,
+            },
+        )
         session(app.config).flush(app.config)
         session(app.globals).flush(app.globals)
         try:
             with h.push_config(c, app=app):
                 TracImportSupport().perform_import(
-                        json.dumps(export(trac_url), cls=DateJSONEncoder),
-                        json.dumps({
-                            'user_map': json.loads(user_map) if user_map else {},
-                            'usernames_match': self.usernames_match(trac_url),
-                            }),
-                        )
+                    json.dumps(export(trac_url), cls=DateJSONEncoder),
+                    json.dumps({
+                        'user_map': json.loads(user_map) if user_map else {},
+                        'usernames_match': self.usernames_match(trac_url),
+                    }),
+                )
             AuditLog.log(
                 'import tool %s from %s' % (
-                        app.config.options.mount_point,
-                        trac_url,
-                    ),
+                    app.config.options.mount_point,
+                    trac_url,
+                ),
                 project=project, user=user, url=app.url,
             )
             g.post_event('project_updated')
@@ -156,6 +158,7 @@ class TracTicketImporter(ToolImporter):
 
 
 class TracImportSupport(ImportSupport):
+
     """Provides Trac-specific ticket and comment text processing."""
 
     def ticket_link(self, m):
@@ -176,7 +179,7 @@ class TracImportSupport(ImportSupport):
             status={'$in': ['ok', 'pending']})).sort('timestamp')
 
         if comment <= comments.count():
-            return comments.all()[comment-1].slug
+            return comments.all()[comment - 1].slug
 
     def comment_link(self, m):
         """Convert a Trac-style comment url to it's equivalent Allura url."""
@@ -205,9 +208,11 @@ class TracImportSupport(ImportSupport):
             * Escape double-brackets
 
         """
-        comment_pattern = re.compile('\[(\S*\s*\S*)\]\(\S*/(\d+\n*\d*)#comment:(\d+)\)')
+        comment_pattern = re.compile(
+            '\[(\S*\s*\S*)\]\(\S*/(\d+\n*\d*)#comment:(\d+)\)')
         ticket_pattern = re.compile('(?<=\])\(\S*ticket/(\d+)(?:\?[^)]*)?\)')
-        changeset_pattern = re.compile(r'(?<=\])\(\S*/changeset/(\d+)(?:\?[^]]*)?\)')
+        changeset_pattern = re.compile(
+            r'(?<=\])\(\S*/changeset/(\d+)(?:\?[^]]*)?\)')
         brackets_pattern = re.compile('\[\[([^]]*)\]\]')
 
         text = comment_pattern.sub(self.comment_link, text)
@@ -223,4 +228,3 @@ class TracImportSupport(ImportSupport):
     def description_processing(self, description_text):
         """Modify ticket description before ticket is created."""
         return self.link_processing(description_text)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeLink/forgelink/link_main.py
----------------------------------------------------------------------
diff --git a/ForgeLink/forgelink/link_main.py b/ForgeLink/forgelink/link_main.py
index d37db1b..0f2292a 100644
--- a/ForgeLink/forgelink/link_main.py
+++ b/ForgeLink/forgelink/link_main.py
@@ -40,25 +40,26 @@ log = logging.getLogger(__name__)
 
 
 class ForgeLinkApp(Application):
+
     '''This is the Link app for PyForge'''
     __version__ = version.__version__
-    permissions = [ 'configure', 'read' ]
+    permissions = ['configure', 'read']
     permissions_desc = {
         'read': 'View link.',
     }
     config_options = Application.config_options + [
         ConfigOption('url', str, None)
     ]
-    searchable=True
-    exportable=True
-    tool_label='External Link'
-    default_mount_label='Link name'
-    default_mount_point='link'
-    ordinal=1
-    icons={
-        24:'images/ext_24.png',
-        32:'images/ext_32.png',
-        48:'images/ext_48.png'
+    searchable = True
+    exportable = True
+    tool_label = 'External Link'
+    default_mount_label = 'Link name'
+    default_mount_point = 'link'
+    ordinal = 1
+    icons = {
+        24: 'images/ext_24.png',
+        32: 'images/ext_32.png',
+        48: 'images/ext_48.png'
     }
 
     def __init__(self, project, config):
@@ -71,7 +72,7 @@ class ForgeLinkApp(Application):
     @h.exceptionless([], log)
     def sitemap(self):
         menu_id = self.config.options.mount_label
-        return [SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+        return [SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     def sidebar_menu(self):
         return []
@@ -89,14 +90,15 @@ class ForgeLinkApp(Application):
         self.config.acl = [
             M.ACE.allow(role_anon, 'read'),
             M.ACE.allow(role_admin, 'configure'),
-            ]
+        ]
 
     def uninstall(self, project):
         "Remove all the tool's artifacts from the database"
         super(ForgeLinkApp, self).uninstall(project)
 
     def bulk_export(self, f):
-        json.dump(RootRestController(self).link_json(), f, cls=jsonify.GenericJSON, indent=2)
+        json.dump(RootRestController(self).link_json(),
+                  f, cls=jsonify.GenericJSON, indent=2)
 
 
 class RootController(BaseController):
@@ -125,7 +127,7 @@ class LinkAdminController(DefaultAdminController):
     @expose()
     def index(self, **kw):
         flash('External link URL updated.')
-        redirect(c.project.url()+'admin/tools')
+        redirect(c.project.url() + 'admin/tools')
 
 
 class RootRestController(BaseController):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeLink/forgelink/tests/functional/test_rest.py
----------------------------------------------------------------------
diff --git a/ForgeLink/forgelink/tests/functional/test_rest.py b/ForgeLink/forgelink/tests/functional/test_rest.py
index 05a55a2..d7bb7da 100644
--- a/ForgeLink/forgelink/tests/functional/test_rest.py
+++ b/ForgeLink/forgelink/tests/functional/test_rest.py
@@ -38,10 +38,12 @@ class TestLinkApi(TestRestApiBase):
         r = self.api_get(u'/rest/p/test/link'.encode('utf-8'))
         assert_equal(r.json['url'], None)
 
-        r = self.api_post(u'/rest/p/test/link'.encode('utf-8'), url='http://google.com')
+        r = self.api_post(u'/rest/p/test/link'.encode('utf-8'),
+                          url='http://google.com')
         assert_equal(r.json['url'], 'http://google.com')
 
-        self.api_post(u'/rest/p/test/link'.encode('utf-8'), url='http://yahoo.com')
+        self.api_post(u'/rest/p/test/link'.encode('utf-8'),
+                      url='http://yahoo.com')
         r = self.api_get(u'/rest/p/test/link'.encode('utf-8'))
         assert_equal(r.json['url'], 'http://yahoo.com')
 
@@ -50,13 +52,15 @@ class TestLinkApi(TestRestApiBase):
         assert_equal(r.json['url'], 'http://yahoo.com')
 
     def test_rest_link_get_permissions(self):
-        self.app.get('/rest/p/test/link', extra_environ={'username': '*anonymous'}, status=200)
+        self.app.get('/rest/p/test/link',
+                     extra_environ={'username': '*anonymous'}, status=200)
         p = M.Project.query.get(shortname='test')
         acl = p.app_instance('link').config.acl
         anon = M.ProjectRole.by_name('*anonymous')._id
         anon_read = M.ACE.allow(anon, 'read')
         acl.remove(anon_read)
-        self.app.get('/rest/p/test/link', extra_environ={'username': '*anonymous'}, status=401)
+        self.app.get('/rest/p/test/link',
+                     extra_environ={'username': '*anonymous'}, status=401)
 
     def test_rest_link_post_permissions(self):
         self.app.post('/rest/p/test/link',
@@ -74,7 +78,3 @@ class TestLinkApi(TestRestApiBase):
                       status=200)
         r = self.api_get(u'/rest/p/test/link'.encode('utf-8'))
         assert_equal(r.json['url'], 'http://yahoo.com')
-
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeLink/forgelink/tests/functional/test_root.py
----------------------------------------------------------------------
diff --git a/ForgeLink/forgelink/tests/functional/test_root.py b/ForgeLink/forgelink/tests/functional/test_root.py
index a8f6778..ca57244 100644
--- a/ForgeLink/forgelink/tests/functional/test_root.py
+++ b/ForgeLink/forgelink/tests/functional/test_root.py
@@ -20,6 +20,7 @@ from alluratest.controller import TestController
 
 
 class TestRootController(TestController):
+
     def test_root_index_no_url(self):
         response = self.app.get('/link/index')
         assert 'Link is not configured' in response
@@ -57,4 +58,3 @@ class TestRootController(TestController):
         response = self.app.get('/link/help')
         # HACK: support for remote redirects is limited in follow()
         assert 'http://www.google.de/search?q=help' in response
-


[23/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/notification.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/notification.py b/Allura/allura/model/notification.py
index 5743ffe..10689f1 100644
--- a/Allura/allura/model/notification.py
+++ b/Allura/allura/model/notification.py
@@ -58,9 +58,11 @@ from .auth import User
 
 log = logging.getLogger(__name__)
 
-MAILBOX_QUIESCENT=None # Re-enable with [#1384]: timedelta(minutes=10)
+MAILBOX_QUIESCENT = None  # Re-enable with [#1384]: timedelta(minutes=10)
+
 
 class Notification(MappedClass):
+
     '''
     Temporarily store notifications that will be emailed or displayed as a web flash.
     This does not contain any recipient information.
@@ -74,31 +76,33 @@ class Notification(MappedClass):
     _id = FieldProperty(str, if_missing=h.gen_message_id)
 
     # Classify notifications
-    neighborhood_id = ForeignIdProperty('Neighborhood', if_missing=lambda:c.project.neighborhood._id)
-    project_id = ForeignIdProperty('Project', if_missing=lambda:c.project._id)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id)
-    tool_name = FieldProperty(str, if_missing=lambda:c.app.config.tool_name)
+    neighborhood_id = ForeignIdProperty(
+        'Neighborhood', if_missing=lambda: c.project.neighborhood._id)
+    project_id = ForeignIdProperty('Project', if_missing=lambda: c.project._id)
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: c.app.config._id)
+    tool_name = FieldProperty(str, if_missing=lambda: c.app.config.tool_name)
     ref_id = ForeignIdProperty('ArtifactReference')
     topic = FieldProperty(str)
 
     # Notification Content
-    in_reply_to=FieldProperty(str)
-    references=FieldProperty([str])
-    from_address=FieldProperty(str)
-    reply_to_address=FieldProperty(str)
-    subject=FieldProperty(str)
-    text=FieldProperty(str)
-    link=FieldProperty(str)
-    author_id=ForeignIdProperty('User')
-    feed_meta=FieldProperty(S.Deprecated)
+    in_reply_to = FieldProperty(str)
+    references = FieldProperty([str])
+    from_address = FieldProperty(str)
+    reply_to_address = FieldProperty(str)
+    subject = FieldProperty(str)
+    text = FieldProperty(str)
+    link = FieldProperty(str)
+    author_id = ForeignIdProperty('User')
+    feed_meta = FieldProperty(S.Deprecated)
     artifact_reference = FieldProperty(S.Deprecated)
     pubdate = FieldProperty(datetime, if_missing=datetime.utcnow)
 
     ref = RelationProperty('ArtifactReference')
 
     view = jinja2.Environment(
-            loader=jinja2.PackageLoader('allura', 'templates'),
-            auto_reload=asbool(config.get('auto_reload_templates', True)),
+        loader=jinja2.PackageLoader('allura', 'templates'),
+        auto_reload=asbool(config.get('auto_reload_templates', True)),
     )
 
     @classmethod
@@ -155,19 +159,24 @@ class Notification(MappedClass):
                     attach.file.seek(0, 2)
                     bytecount = attach.file.tell()
                     attach.file.seek(0)
-                    text = "%s %s (%s; %s) " % (text, attach.filename, h.do_filesizeformat(bytecount), attach.type)
+                    text = "%s %s (%s; %s) " % (
+                        text, attach.filename, h.do_filesizeformat(bytecount), attach.type)
 
             subject = post.subject or ''
             if post.parent_id and not subject.lower().startswith('re:'):
                 subject = 'Re: ' + subject
             author = post.author()
             msg_id = artifact.url() + post._id
-            parent_msg_id = artifact.url() + post.parent_id if post.parent_id else artifact.message_id()
+            parent_msg_id = artifact.url() + \
+                post.parent_id if post.parent_id else artifact.message_id(
+                )
             d = dict(
                 _id=msg_id,
-                from_address=str(author._id) if author != User.anonymous() else None,
+                from_address=str(
+                    author._id) if author != User.anonymous() else None,
                 reply_to_address='"%s" <%s>' % (
-                    subject_prefix, getattr(artifact, 'email_address', u'noreply@in.sf.net')),
+                    subject_prefix, getattr(
+                        artifact, 'email_address', u'noreply@in.sf.net')),
                 subject=subject_prefix + subject,
                 text=text,
                 in_reply_to=parent_msg_id,
@@ -181,7 +190,7 @@ class Notification(MappedClass):
             return n
         else:
             subject = kwargs.pop('subject', '%s modified by %s' % (
-                    h.get_first(idx, 'title'),c.user.get_pref('display_name')))
+                h.get_first(idx, 'title'), c.user.get_pref('display_name')))
             reply_to = '"%s" <%s>' % (
                 h.get_first(idx, 'title'),
                 getattr(artifact, 'email_address', u'noreply@in.sf.net'))
@@ -206,23 +215,27 @@ class Notification(MappedClass):
             d['text'] = ''
         try:
             ''' Add addional text to the notification e-mail based on the artifact type '''
-            template = cls.view.get_template('mail/' + artifact.type_s + '.txt')
-            d['text'] += template.render(dict(c=c, g=g, config=config, data=artifact, post=post, h=h))
+            template = cls.view.get_template(
+                'mail/' + artifact.type_s + '.txt')
+            d['text'] += template.render(dict(c=c, g=g,
+                                         config=config, data=artifact, post=post, h=h))
         except jinja2.TemplateNotFound:
             pass
         except:
             ''' Catch any errors loading or rendering the template,
             but the notification still gets sent if there is an error
             '''
-            log.warn('Could not render notification template %s' % artifact.type_s, exc_info=True)
+            log.warn('Could not render notification template %s' %
+                     artifact.type_s, exc_info=True)
 
         assert d['reply_to_address'] is not None
         project = c.project
         if d.get('project_id', c.project._id) != c.project._id:
             project = Project.query.get(_id=d['project_id'])
         if project.notifications_disabled:
-            log.info('Notifications disabled for project %s, not sending %s(%r)',
-                     project.shortname, topic, artifact)
+            log.info(
+                'Notifications disabled for project %s, not sending %s(%r)',
+                project.shortname, topic, artifact)
             return None
         n = cls(ref_id=artifact.index_id(),
                 topic=topic,
@@ -264,10 +277,12 @@ class Notification(MappedClass):
     def send_direct(self, user_id):
         user = User.query.get(_id=ObjectId(user_id), disabled=False)
         artifact = self.ref.artifact
-        log.debug('Sending direct notification %s to user %s', self._id, user_id)
+        log.debug('Sending direct notification %s to user %s',
+                  self._id, user_id)
         # Don't send if user disabled
         if not user:
-            log.debug("Skipping notification - enabled user %s not found" % user_id)
+            log.debug("Skipping notification - enabled user %s not found" %
+                      user_id)
             return
         # Don't send if user doesn't have read perms to the artifact
         if user and artifact and \
@@ -275,9 +290,10 @@ class Notification(MappedClass):
             log.debug("Skipping notification - User %s doesn't have read "
                       "access to artifact %s" % (user_id, str(self.ref_id)))
             log.debug("User roles [%s]; artifact ACL [%s]; PSC ACL [%s]",
-                    ', '.join([str(r) for r in security.Credentials.get().user_roles(user_id=user_id, project_id=artifact.project._id).reaching_ids]),
-                    ', '.join([str(a) for a in artifact.acl]),
-                    ', '.join([str(a) for a in artifact.parent_security_context().acl]))
+                      ', '.join([str(r) for r in security.Credentials.get().user_roles(
+                          user_id=user_id, project_id=artifact.project._id).reaching_ids]),
+                      ', '.join([str(a) for a in artifact.acl]),
+                      ', '.join([str(a) for a in artifact.parent_security_context().acl]))
             return
         allura.tasks.mail_tasks.sendmail.post(
             destinations=[str(user_id)],
@@ -293,23 +309,27 @@ class Notification(MappedClass):
     @classmethod
     def send_digest(self, user_id, from_address, subject, notifications,
                     reply_to_address=None):
-        if not notifications: return
+        if not notifications:
+            return
         user = User.query.get(_id=ObjectId(user_id), disabled=False)
         if not user:
-            log.debug("Skipping notification - enabled user %s not found " % user_id)
+            log.debug("Skipping notification - enabled user %s not found " %
+                      user_id)
             return
         # Filter out notifications for which the user doesn't have read
         # permissions to the artifact.
         artifact = self.ref.artifact
+
         def perm_check(notification):
             return not (user and artifact) or \
-                    security.has_access(artifact, 'read', user)()
+                security.has_access(artifact, 'read', user)()
         notifications = filter(perm_check, notifications)
 
-        log.debug('Sending digest of notifications [%s] to user %s', ', '.join([n._id for n in notifications]), user_id)
+        log.debug('Sending digest of notifications [%s] to user %s', ', '.join(
+            [n._id for n in notifications]), user_id)
         if reply_to_address is None:
             reply_to_address = from_address
-        text = [ 'Digest of %s' % subject ]
+        text = ['Digest of %s' % subject]
         for n in notifications:
             text.append('From: %s' % n.from_address)
             text.append('Subject: %s' % (n.subject or '(no subject)'))
@@ -328,9 +348,11 @@ class Notification(MappedClass):
 
     @classmethod
     def send_summary(self, user_id, from_address, subject, notifications):
-        if not notifications: return
-        log.debug('Sending summary of notifications [%s] to user %s', ', '.join([n._id for n in notifications]), user_id)
-        text = [ 'Digest of %s' % subject ]
+        if not notifications:
+            return
+        log.debug('Sending summary of notifications [%s] to user %s', ', '.join(
+            [n._id for n in notifications]), user_id)
+        text = ['Digest of %s' % subject]
         for n in notifications:
             text.append('From: %s' % n.from_address)
             text.append('Subject: %s' % (n.subject or '(no subject)'))
@@ -349,6 +371,7 @@ class Notification(MappedClass):
 
 
 class Mailbox(MappedClass):
+
     '''
     Holds a queue of notifications for an artifact, or a user (webflash messages)
     for a subscriber.
@@ -361,19 +384,21 @@ class Mailbox(MappedClass):
         unique_indexes = [
             ('user_id', 'project_id', 'app_config_id',
              'artifact_index_id', 'topic', 'is_flash'),
-            ]
+        ]
         indexes = [
             ('project_id', 'artifact_index_id'),
             ('is_flash', 'user_id'),
             ('type', 'next_scheduled'),  # for q_digest
             ('type', 'queue_empty'),  # for q_direct
-            ('project_id', 'app_config_id', 'artifact_index_id', 'topic'), # for deliver()
+            # for deliver()
+            ('project_id', 'app_config_id', 'artifact_index_id', 'topic'),
         ]
 
     _id = FieldProperty(S.ObjectId)
-    user_id = ForeignIdProperty('User', if_missing=lambda:c.user._id)
-    project_id = ForeignIdProperty('Project', if_missing=lambda:c.project._id)
-    app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id)
+    user_id = ForeignIdProperty('User', if_missing=lambda: c.user._id)
+    project_id = ForeignIdProperty('Project', if_missing=lambda: c.project._id)
+    app_config_id = ForeignIdProperty(
+        'AppConfig', if_missing=lambda: c.app.config._id)
 
     # Subscription filters
     artifact_title = FieldProperty(str)
@@ -385,9 +410,9 @@ class Mailbox(MappedClass):
     is_flash = FieldProperty(bool, if_missing=False)
     type = FieldProperty(S.OneOf('direct', 'digest', 'summary', 'flash'))
     frequency = FieldProperty(dict(
-            n=int,unit=S.OneOf('day', 'week', 'month')))
+        n=int, unit=S.OneOf('day', 'week', 'month')))
     next_scheduled = FieldProperty(datetime, if_missing=datetime.utcnow)
-    last_modified = FieldProperty(datetime, if_missing=datetime(2000,1,1))
+    last_modified = FieldProperty(datetime, if_missing=datetime(2000, 1, 1))
 
     # a list of notification _id values
     queue = FieldProperty([str])
@@ -398,17 +423,20 @@ class Mailbox(MappedClass):
 
     @classmethod
     def subscribe(
-        cls,
-        user_id=None, project_id=None, app_config_id=None,
-        artifact=None, topic=None,
-        type='direct', n=1, unit='day'):
-        if user_id is None: user_id = c.user._id
-        if project_id is None: project_id = c.project._id
-        if app_config_id is None: app_config_id = c.app.config._id
+            cls,
+            user_id=None, project_id=None, app_config_id=None,
+            artifact=None, topic=None,
+            type='direct', n=1, unit='day'):
+        if user_id is None:
+            user_id = c.user._id
+        if project_id is None:
+            project_id = c.project._id
+        if app_config_id is None:
+            app_config_id = c.app.config._id
         tool_already_subscribed = cls.query.get(user_id=user_id,
-            project_id=project_id,
-            app_config_id=app_config_id,
-            artifact_index_id=None)
+                                                project_id=project_id,
+                                                app_config_id=app_config_id,
+                                                artifact_index_id=None)
         if tool_already_subscribed:
             return
         if artifact is None:
@@ -421,13 +449,14 @@ class Mailbox(MappedClass):
             artifact_url = artifact.url()
             artifact_index_id = i['id']
             artifact_already_subscribed = cls.query.get(user_id=user_id,
-                project_id=project_id,
-                app_config_id=app_config_id,
-                artifact_index_id=artifact_index_id)
+                                                        project_id=project_id,
+                                                        app_config_id=app_config_id,
+                                                        artifact_index_id=artifact_index_id)
             if artifact_already_subscribed:
                 return
-        d = dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id,
-                 artifact_index_id=artifact_index_id, topic=topic)
+        d = dict(
+            user_id=user_id, project_id=project_id, app_config_id=app_config_id,
+            artifact_index_id=artifact_index_id, topic=topic)
         sess = session(cls)
         try:
             mbox = cls(
@@ -446,44 +475,51 @@ class Mailbox(MappedClass):
             mbox.frequency.unit = unit
             sess.flush(mbox)
         if not artifact_index_id:
-            # Unsubscribe from individual artifacts when subscribing to the tool
+            # Unsubscribe from individual artifacts when subscribing to the
+            # tool
             for other_mbox in cls.query.find(dict(
-                user_id=user_id, project_id=project_id, app_config_id=app_config_id)):
+                    user_id=user_id, project_id=project_id, app_config_id=app_config_id)):
                 if other_mbox is not mbox:
                     other_mbox.delete()
 
     @classmethod
     def unsubscribe(
-        cls,
-        user_id=None, project_id=None, app_config_id=None,
-        artifact_index_id=None, topic=None):
-        if user_id is None: user_id = c.user._id
-        if project_id is None: project_id = c.project._id
-        if app_config_id is None: app_config_id = c.app.config._id
+            cls,
+            user_id=None, project_id=None, app_config_id=None,
+            artifact_index_id=None, topic=None):
+        if user_id is None:
+            user_id = c.user._id
+        if project_id is None:
+            project_id = c.project._id
+        if app_config_id is None:
+            app_config_id = c.app.config._id
         cls.query.remove(dict(
-                user_id=user_id,
-                project_id=project_id,
-                app_config_id=app_config_id,
-                artifact_index_id=artifact_index_id,
-                topic=topic))
+            user_id=user_id,
+            project_id=project_id,
+            app_config_id=app_config_id,
+            artifact_index_id=artifact_index_id,
+            topic=topic))
 
     @classmethod
     def subscribed(
-        cls, user_id=None, project_id=None, app_config_id=None,
-        artifact=None, topic=None):
-        if user_id is None: user_id = c.user._id
-        if project_id is None: project_id = c.project._id
-        if app_config_id is None: app_config_id = c.app.config._id
+            cls, user_id=None, project_id=None, app_config_id=None,
+            artifact=None, topic=None):
+        if user_id is None:
+            user_id = c.user._id
+        if project_id is None:
+            project_id = c.project._id
+        if app_config_id is None:
+            app_config_id = c.app.config._id
         if artifact is None:
             artifact_index_id = None
         else:
             i = artifact.index()
             artifact_index_id = i['id']
         return cls.query.find(dict(
-                user_id=user_id,
-                project_id=project_id,
-                app_config_id=app_config_id,
-                artifact_index_id=artifact_index_id)).count() != 0
+            user_id=user_id,
+            project_id=project_id,
+            app_config_id=app_config_id,
+            artifact_index_id=artifact_index_id)).count() != 0
 
     @classmethod
     def deliver(cls, nid, artifact_index_id, topic):
@@ -492,20 +528,21 @@ class Mailbox(MappedClass):
         to the appropriate mailboxes.
         '''
         d = {
-            'project_id':c.project._id,
-            'app_config_id':c.app.config._id,
-            'artifact_index_id':{'$in':[None, artifact_index_id]},
-            'topic':{'$in':[None, topic]}
-            }
+            'project_id': c.project._id,
+            'app_config_id': c.app.config._id,
+            'artifact_index_id': {'$in': [None, artifact_index_id]},
+            'topic': {'$in': [None, topic]}
+        }
         mboxes = cls.query.find(d).all()
-        log.debug('Delivering notification %s to mailboxes [%s]', nid, ', '.join([str(m._id) for m in mboxes]))
+        log.debug('Delivering notification %s to mailboxes [%s]', nid, ', '.join(
+            [str(m._id) for m in mboxes]))
         for mbox in mboxes:
             try:
                 mbox.query.update(
-                    {'$push':dict(queue=nid),
-                     '$set':dict(last_modified=datetime.utcnow(),
-                                 queue_empty=False),
-                    })
+                    {'$push': dict(queue=nid),
+                     '$set': dict(last_modified=datetime.utcnow(),
+                                  queue_empty=False),
+                     })
                 # Make sure the mbox doesn't stick around to be flush()ed
                 session(mbox).expunge(mbox)
             except:
@@ -528,26 +565,29 @@ class Mailbox(MappedClass):
             queue_empty=False,
         )
         if MAILBOX_QUIESCENT:
-            q_direct['last_modified']={'$lt':now - MAILBOX_QUIESCENT}
+            q_direct['last_modified'] = {'$lt': now - MAILBOX_QUIESCENT}
         q_digest = dict(
             type={'$in': ['digest', 'summary']},
-            next_scheduled={'$lt':now})
+            next_scheduled={'$lt': now})
 
         def find_and_modify_direct_mbox():
             return cls.query.find_and_modify(
                 query=q_direct,
                 update={'$set': dict(
-                            queue=[],
-                            queue_empty=True,
-                        )},
+                    queue=[],
+                    queue_empty=True,
+                )},
                 new=False)
 
         for mbox in take_while_true(find_and_modify_direct_mbox):
             try:
                 mbox.fire(now)
             except:
-                log.exception('Error firing mbox: %s with queue: [%s]', str(mbox._id), ', '.join(mbox.queue))
-                raise  # re-raise so we don't keep (destructively) trying to process mboxes
+                log.exception(
+                    'Error firing mbox: %s with queue: [%s]', str(mbox._id), ', '.join(mbox.queue))
+                # re-raise so we don't keep (destructively) trying to process
+                # mboxes
+                raise
 
         for mbox in cls.query.find(q_digest):
             next_scheduled = now
@@ -571,12 +611,14 @@ class Mailbox(MappedClass):
         '''
         Send all notifications that this mailbox has enqueued.
         '''
-        notifications = Notification.query.find(dict(_id={'$in':self.queue}))
+        notifications = Notification.query.find(dict(_id={'$in': self.queue}))
         notifications = notifications.all()
         if len(notifications) != len(self.queue):
-            log.error('Mailbox queue error: Mailbox %s queued [%s], found [%s]', str(self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications]))
+            log.error('Mailbox queue error: Mailbox %s queued [%s], found [%s]', str(
+                self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications]))
         else:
-            log.debug('Firing mailbox %s notifications [%s], found [%s]', str(self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications]))
+            log.debug('Firing mailbox %s notifications [%s], found [%s]', str(
+                self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications]))
         if self.type == 'direct':
             ngroups = defaultdict(list)
             for n in notifications:
@@ -586,7 +628,8 @@ class Mailbox(MappedClass):
                         # Messages must be sent individually so they can be replied
                         # to individually
                     else:
-                        key = (n.subject, n.from_address, n.reply_to_address, n.author_id)
+                        key = (n.subject, n.from_address,
+                               n.reply_to_address, n.author_id)
                         ngroups[key].append(n)
                 except:
                     # log error but keep trying to deliver other notifications,
@@ -633,18 +676,19 @@ class MailFooter(object):
     @classmethod
     def standard(cls, notification):
         return cls._render('mail/footer.txt',
-            notification=notification,
-            prefix=config.get('forgemail.url', 'https://sourceforge.net'))
+                           notification=notification,
+                           prefix=config.get('forgemail.url', 'https://sourceforge.net'))
 
     @classmethod
     def monitored(cls, toaddr, app_url, setting_url):
         return cls._render('mail/monitor_email_footer.txt',
-            email=toaddr,
-            app_url=app_url,
-            setting_url=setting_url)
+                           email=toaddr,
+                           app_url=app_url,
+                           setting_url=setting_url)
 
 
 class SiteNotification(MappedClass):
+
     """
     Storage for site-wide notification.
     """
@@ -656,7 +700,8 @@ class SiteNotification(MappedClass):
     _id = FieldProperty(S.ObjectId)
     content = FieldProperty(str, if_missing='')
     active = FieldProperty(bool, if_missing=True)
-    impressions = FieldProperty(int, if_missing=lambda:config.get('site_notification.impressions', 0))
+    impressions = FieldProperty(
+        int, if_missing=lambda: config.get('site_notification.impressions', 0))
 
     @classmethod
     def current(cls):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/oauth.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/oauth.py b/Allura/allura/model/oauth.py
index 531fceb..670847a 100644
--- a/Allura/allura/model/oauth.py
+++ b/Allura/allura/model/oauth.py
@@ -31,17 +31,19 @@ from .types import MarkdownCache
 
 log = logging.getLogger(__name__)
 
+
 class OAuthToken(MappedClass):
+
     class __mongometa__:
         session = main_orm_session
-        name='oauth_token'
-        indexes = [ 'api_key' ]
-        polymorphic_on='type'
-        polymorphic_identity=None
+        name = 'oauth_token'
+        indexes = ['api_key']
+        polymorphic_on = 'type'
+        polymorphic_identity = None
 
     _id = FieldProperty(S.ObjectId)
-    type=FieldProperty(str)
-    api_key = FieldProperty(str, if_missing=lambda:h.nonce(20))
+    type = FieldProperty(str)
+    api_key = FieldProperty(str, if_missing=lambda: h.nonce(20))
     secret_key = FieldProperty(str, if_missing=h.cryptographic_nonce)
 
     def to_string(self):
@@ -50,19 +52,20 @@ class OAuthToken(MappedClass):
     def as_token(self):
         return oauth.Token(self.api_key, self.secret_key)
 
+
 class OAuthConsumerToken(OAuthToken):
+
     class __mongometa__:
-        polymorphic_identity='consumer'
-        name='oauth_consumer_token'
-        unique_indexes = [ 'name' ]
+        polymorphic_identity = 'consumer'
+        name = 'oauth_consumer_token'
+        unique_indexes = ['name']
 
     type = FieldProperty(str, if_missing='consumer')
-    user_id = ForeignIdProperty('User', if_missing=lambda:c.user._id)
+    user_id = ForeignIdProperty('User', if_missing=lambda: c.user._id)
     name = FieldProperty(str)
     description = FieldProperty(str)
     description_cache = FieldProperty(MarkdownCache)
 
-
     user = RelationProperty('User')
 
     @property
@@ -76,37 +79,43 @@ class OAuthConsumerToken(OAuthToken):
 
     @classmethod
     def for_user(cls, user=None):
-        if user is None: user = c.user
+        if user is None:
+            user = c.user
         return cls.query.find(dict(user_id=user._id)).all()
 
+
 class OAuthRequestToken(OAuthToken):
+
     class __mongometa__:
-        polymorphic_identity='request'
+        polymorphic_identity = 'request'
 
     type = FieldProperty(str, if_missing='request')
     consumer_token_id = ForeignIdProperty('OAuthConsumerToken')
-    user_id = ForeignIdProperty('User', if_missing=lambda:c.user._id)
+    user_id = ForeignIdProperty('User', if_missing=lambda: c.user._id)
     callback = FieldProperty(str)
     validation_pin = FieldProperty(str)
 
     consumer_token = RelationProperty('OAuthConsumerToken')
 
+
 class OAuthAccessToken(OAuthToken):
+
     class __mongometa__:
-        polymorphic_identity='access'
+        polymorphic_identity = 'access'
 
     type = FieldProperty(str, if_missing='access')
     consumer_token_id = ForeignIdProperty('OAuthConsumerToken')
     request_token_id = ForeignIdProperty('OAuthToken')
-    user_id = ForeignIdProperty('User', if_missing=lambda:c.user._id)
+    user_id = ForeignIdProperty('User', if_missing=lambda: c.user._id)
     is_bearer = FieldProperty(bool, if_missing=False)
 
     user = RelationProperty('User')
-    consumer_token = RelationProperty('OAuthConsumerToken', via='consumer_token_id')
+    consumer_token = RelationProperty(
+        'OAuthConsumerToken', via='consumer_token_id')
     request_token = RelationProperty('OAuthToken', via='request_token_id')
 
     @classmethod
     def for_user(cls, user=None):
-        if user is None: user = c.user
+        if user is None:
+            user = c.user
         return cls.query.find(dict(user_id=user._id, type='access')).all()
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/openid_model.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/openid_model.py b/Allura/allura/model/openid_model.py
index 9c401c9..adcf179 100644
--- a/Allura/allura/model/openid_model.py
+++ b/Allura/allura/model/openid_model.py
@@ -27,14 +27,16 @@ from ming.orm.declarative import MappedClass
 from .session import main_doc_session, main_orm_session
 from .session import project_doc_session, project_orm_session
 
+
 class OpenIdAssociation(MappedClass):
+
     class __mongometa__:
-        name='oid_store_assoc'
+        name = 'oid_store_assoc'
         session = main_orm_session
 
-    _id = FieldProperty(str) # server url
+    _id = FieldProperty(str)  # server url
     assocs = FieldProperty([dict(
-                key=str, value=str)])
+        key=str, value=str)])
 
     # Mimic openid.store.memstore.ServerAssocs
     def set_assoc(self, assoc):
@@ -54,7 +56,7 @@ class OpenIdAssociation(MappedClass):
         old_len = len(self.assocs)
         self.assocs = [
             a for a in self.assocs
-            if a['key'] != handle ]
+            if a['key'] != handle]
         return old_len != len(self.assocs)
 
     def best_assoc(self):
@@ -70,19 +72,22 @@ class OpenIdAssociation(MappedClass):
 
     def cleanup_assocs(self):
         old_len = len(self.assocs)
-        self.assocs = [ a for a in self.assocs
-                        if Association.deserialize(a['value']).getExpiresIn() != 0 ]
+        self.assocs = [a for a in self.assocs
+                       if Association.deserialize(a['value']).getExpiresIn() != 0]
         new_len = len(self.assocs)
         return (old_len - new_len), new_len
 
+
 class OpenIdNonce(MappedClass):
+
     class __mongometa__:
-        name='oid_store_nonce'
+        name = 'oid_store_nonce'
         session = main_orm_session
 
-    _id = FieldProperty(str) # Nonce value
+    _id = FieldProperty(str)  # Nonce value
     timestamp = FieldProperty(datetime, if_missing=datetime.utcnow)
-        
+
+
 class OpenIdStore(object):
 
     def _get_assocs(self, server_url):
@@ -90,7 +95,7 @@ class OpenIdStore(object):
         if assoc is None:
             assoc = OpenIdAssociation(_id=server_url)
         return assoc
-    
+
     def storeAssociation(self, server_url, association):
         assocs = self._get_assocs(server_url)
         assocs.set_assoc(deepcopy(association))
@@ -120,6 +125,5 @@ class OpenIdStore(object):
         now = datetime.utcnow()
         cutoff = now - timedelta(seconds=nonce.SKEW)
         num_removed = OpenIdNonce.query.remove(dict(
-                timestamp={'$lt': cutoff}))
+            timestamp={'$lt': cutoff}))
         return num_removed
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/project.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/project.py b/Allura/allura/model/project.py
index 36e29ab..1a52092 100644
--- a/Allura/allura/model/project.py
+++ b/Allura/allura/model/project.py
@@ -56,26 +56,30 @@ from filesystem import File
 
 log = logging.getLogger(__name__)
 
+
 class ProjectFile(File):
+
     class __mongometa__:
         session = main_orm_session
         indexes = [('project_id', 'category')]
 
-    project_id=FieldProperty(S.ObjectId)
-    category=FieldProperty(str)
-    caption=FieldProperty(str)
-    sort=FieldProperty(int)
+    project_id = FieldProperty(S.ObjectId)
+    category = FieldProperty(str)
+    caption = FieldProperty(str)
+    sort = FieldProperty(int)
+
 
 class ProjectCategory(MappedClass):
+
     class __mongometa__:
         session = main_orm_session
-        name='project_category'
+        name = 'project_category'
 
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
     parent_id = FieldProperty(S.ObjectId, if_missing=None)
-    name=FieldProperty(str)
-    label=FieldProperty(str, if_missing='')
-    description=FieldProperty(str, if_missing='')
+    name = FieldProperty(str)
+    label = FieldProperty(str, if_missing='')
+    description = FieldProperty(str, if_missing='')
 
     @property
     def parent_category(self):
@@ -85,13 +89,15 @@ class ProjectCategory(MappedClass):
     def subcategories(self):
         return self.query.find(dict(parent_id=self._id)).all()
 
+
 class TroveCategory(MappedClass):
+
     class __mongometa__:
         session = main_orm_session
-        name='trove_category'
-        indexes = [ 'trove_cat_id', 'trove_parent_id', 'shortname', 'fullpath' ]
+        name = 'trove_category'
+        indexes = ['trove_cat_id', 'trove_parent_id', 'shortname', 'fullpath']
 
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
     trove_cat_id = FieldProperty(int, if_missing=None)
     trove_parent_id = FieldProperty(int, if_missing=None)
     shortname = FieldProperty(str, if_missing='')
@@ -145,16 +151,20 @@ class TroveCategory(MappedClass):
             fullpath=self.fullpath,
         )
 
+
 class ProjectMapperExtension(MapperExtension):
+
     def after_insert(self, obj, st, sess):
         g.zarkov_event('project_create', project=obj)
 
+
 class Project(MappedClass, ActivityNode, ActivityObject):
-    _perms_base = [ 'read', 'update', 'admin', 'create']
-    _perms_init = _perms_base + [ 'register' ]
+    _perms_base = ['read', 'update', 'admin', 'create']
+    _perms_init = _perms_base + ['register']
+
     class __mongometa__:
         session = main_orm_session
-        name='project'
+        name = 'project'
         indexes = [
             'name',
             'neighborhood_id',
@@ -164,55 +174,55 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             ('deleted', 'shortname', 'neighborhood_id'),
             ('neighborhood_id', 'is_nbhd_project', 'deleted')]
         unique_indexes = [('neighborhood_id', 'shortname')]
-        extensions = [ ProjectMapperExtension ]
+        extensions = [ProjectMapperExtension]
 
     # Project schema
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
     parent_id = FieldProperty(S.ObjectId, if_missing=None)
     neighborhood_id = ForeignIdProperty(Neighborhood)
     shortname = FieldProperty(str)
-    name=FieldProperty(str)
-    show_download_button=FieldProperty(S.Deprecated)
-    short_description=FieldProperty(str, if_missing='')
-    summary=FieldProperty(str, if_missing='')
-    description=FieldProperty(str, if_missing='')
+    name = FieldProperty(str)
+    show_download_button = FieldProperty(S.Deprecated)
+    short_description = FieldProperty(str, if_missing='')
+    summary = FieldProperty(str, if_missing='')
+    description = FieldProperty(str, if_missing='')
     description_cache = FieldProperty(MarkdownCache)
-    homepage_title=FieldProperty(str, if_missing='')
-    external_homepage=FieldProperty(str, if_missing='')
-    support_page=FieldProperty(str, if_missing='')
-    support_page_url=FieldProperty(str, if_missing='')
-    socialnetworks=FieldProperty([dict(socialnetwork=str,accounturl=str)])
-    removal=FieldProperty(str, if_missing='')
-    moved_to_url=FieldProperty(str, if_missing='')
+    homepage_title = FieldProperty(str, if_missing='')
+    external_homepage = FieldProperty(str, if_missing='')
+    support_page = FieldProperty(str, if_missing='')
+    support_page_url = FieldProperty(str, if_missing='')
+    socialnetworks = FieldProperty([dict(socialnetwork=str, accounturl=str)])
+    removal = FieldProperty(str, if_missing='')
+    moved_to_url = FieldProperty(str, if_missing='')
     removal_changed_date = FieldProperty(datetime, if_missing=datetime.utcnow)
-    export_controlled=FieldProperty(bool, if_missing=False)
-    export_control_type=FieldProperty(str, if_missing=None)
-    database=FieldProperty(S.Deprecated)
-    database_uri=FieldProperty(S.Deprecated)
-    is_root=FieldProperty(bool)
+    export_controlled = FieldProperty(bool, if_missing=False)
+    export_control_type = FieldProperty(str, if_missing=None)
+    database = FieldProperty(S.Deprecated)
+    database_uri = FieldProperty(S.Deprecated)
+    is_root = FieldProperty(bool)
     acl = FieldProperty(ACL(permissions=_perms_init))
-    neighborhood_invitations=FieldProperty([S.ObjectId])
+    neighborhood_invitations = FieldProperty([S.ObjectId])
     neighborhood = RelationProperty(Neighborhood)
     app_configs = RelationProperty('AppConfig')
     category_id = FieldProperty(S.ObjectId, if_missing=None)
     deleted = FieldProperty(bool, if_missing=False)
     labels = FieldProperty([str])
     last_updated = FieldProperty(datetime, if_missing=None)
-    tool_data = FieldProperty({str:{str:None}}) # entry point: prefs dict
+    tool_data = FieldProperty({str: {str: None}})  # entry point: prefs dict
     ordinal = FieldProperty(int, if_missing=0)
     database_configured = FieldProperty(bool, if_missing=True)
     _extra_tool_status = FieldProperty([str])
-    trove_root_database=FieldProperty([S.ObjectId])
-    trove_developmentstatus=FieldProperty([S.ObjectId])
-    trove_audience=FieldProperty([S.ObjectId])
-    trove_license=FieldProperty([S.ObjectId])
-    trove_os=FieldProperty([S.ObjectId])
-    trove_language=FieldProperty([S.ObjectId])
-    trove_topic=FieldProperty([S.ObjectId])
-    trove_natlanguage=FieldProperty([S.ObjectId])
-    trove_environment=FieldProperty([S.ObjectId])
+    trove_root_database = FieldProperty([S.ObjectId])
+    trove_developmentstatus = FieldProperty([S.ObjectId])
+    trove_audience = FieldProperty([S.ObjectId])
+    trove_license = FieldProperty([S.ObjectId])
+    trove_os = FieldProperty([S.ObjectId])
+    trove_language = FieldProperty([S.ObjectId])
+    trove_topic = FieldProperty([S.ObjectId])
+    trove_natlanguage = FieldProperty([S.ObjectId])
+    trove_environment = FieldProperty([S.ObjectId])
     tracking_id = FieldProperty(str, if_missing='')
-    is_nbhd_project=FieldProperty(bool, if_missing=False)
+    is_nbhd_project = FieldProperty(bool, if_missing=False)
 
     # transient properties
     notifications_disabled = False
@@ -249,13 +259,13 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             result.append(SitemapEntry('Child Projects'))
             result += [
                 SitemapEntry(p.name or p.script_name, p.script_name)
-                for p in sps ]
+                for p in sps]
         return result
 
     def troves_by_type(self, trove_type):
         troves = getattr(self, 'trove_%s' % trove_type)
         if troves:
-            return TroveCategory.query.find({'_id':{'$in': troves}}).all()
+            return TroveCategory.query.find({'_id': {'$in': troves}}).all()
         else:
             return []
 
@@ -266,7 +276,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         troves = {}
         for attr in dir(self):
             if attr.startswith('trove_'):
-                trove_type = attr.replace('trove_','')
+                trove_type = attr.replace('trove_', '')
                 nice_name = dict(
                     natlanguage='translation',
                     root_database='database',
@@ -301,7 +311,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         if url.startswith('//'):
             try:
                 return request.scheme + ':' + url
-            except TypeError: # pragma no cover
+            except TypeError:  # pragma no cover
                 return 'http:' + url
         else:
             return url
@@ -312,8 +322,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     def get_screenshots(self):
         return ProjectFile.query.find(dict(
-                project_id=self._id,
-                category='screenshot')).sort('sort').all()
+            project_id=self._id,
+            category='screenshot')).sort('sort').all()
 
     @LazyProperty
     def icon(self):
@@ -327,20 +337,23 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     @property
     def parent_project(self):
-        if self.is_root: return None
+        if self.is_root:
+            return None
         return self.query.get(_id=self.parent_id)
 
     def _get_private(self):
         """Return True if this project is private, else False."""
         role_anon = ProjectRole.anonymous(project=self)
         return ACE.allow(role_anon._id, 'read') not in self.acl
+
     def _set_private(self, val):
         """Set whether this project is private or not."""
         new_val = bool(val)
         role_anon = ProjectRole.anonymous(project=self)
         ace = ACE.allow(role_anon._id, 'read')
         curr_val = ace not in self.acl
-        if new_val == curr_val: return
+        if new_val == curr_val:
+            return
         if new_val:
             self.acl.remove(ace)
         else:
@@ -358,12 +371,14 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         '''
         user = None
         if self.is_user_project:
-            user = plugin.AuthenticationProvider.get(request).user_by_project_shortname(self.shortname[2:])
+            user = plugin.AuthenticationProvider.get(
+                request).user_by_project_shortname(self.shortname[2:])
         return user
 
     @LazyProperty
     def root_project(self):
-        if self.is_root: return self
+        if self.is_root:
+            return self
         return self.parent_project.root_project
 
     @LazyProperty
@@ -373,9 +388,10 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         projects = set([self])
         while True:
             new_projects = set(
-                self.query.find(dict(parent_id={'$in':[p._id for p in projects]})))
+                self.query.find(dict(parent_id={'$in': [p._id for p in projects]})))
             new_projects.update(projects)
-            if new_projects == projects: break
+            if new_projects == projects:
+                break
             projects = new_projects
         return projects
 
@@ -395,17 +411,17 @@ class Project(MappedClass, ActivityNode, ActivityObject):
     def menus(cls, projects):
         '''Return a dict[project_id] = sitemap of sitemaps, efficiently'''
         from allura.app import SitemapEntry
-        pids = [ p._id for p in projects ]
+        pids = [p._id for p in projects]
         project_index = dict((p._id, p) for p in projects)
         entry_index = dict((pid, []) for pid in pids)
         q_subprojects = cls.query.find(dict(
-                parent_id={'$in': pids},
-                deleted=False))
+            parent_id={'$in': pids},
+            deleted=False))
         for sub in q_subprojects:
             entry_index[sub.parent_id].append(
                 dict(ordinal=sub.ordinal, entry=SitemapEntry(sub.name, sub.url())))
         q_app_configs = AppConfig.query.find(dict(
-                project_id={'$in': pids}))
+            project_id={'$in': pids}))
         for ac in q_app_configs:
             App = ac.load()
             project = project_index[ac.project_id]
@@ -413,13 +429,14 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             if app.is_visible_to(c.user):
                 for sm in app.main_menu():
                     entry = sm.bind_app(app)
-                    entry.ui_icon='tool-%s' % ac.tool_name
+                    entry.ui_icon = 'tool-%s' % ac.tool_name
                     ordinal = ac.options.get('ordinal', 0)
-                    entry_index[ac.project_id].append({'ordinal':ordinal,'entry':entry})
+                    entry_index[ac.project_id].append(
+                        {'ordinal': ordinal, 'entry': entry})
 
         sitemaps = dict((pid, []) for pid in pids)
         for pid, entries in entry_index.iteritems():
-            entries.sort(key=lambda e:e['ordinal'])
+            entries.sort(key=lambda e: e['ordinal'])
             sitemap = sitemaps[pid]
             for e in entries:
                 sitemap.append(e['entry'])
@@ -433,7 +450,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         for icon in ProjectFile.query.find(dict(
                 project_id={'$in': result.keys()},
                 category='icon')):
-            result[icon.project_id] = project_index[icon.project_id].url() + 'icon'
+            result[icon.project_id] = project_index[
+                icon.project_id].url() + 'icon'
         return result
 
     @classmethod
@@ -467,7 +485,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             ordinal = sub.ordinal + delta_ordinal
             if ordinal > max_ordinal:
                 max_ordinal = ordinal
-            entries.append({'ordinal':sub.ordinal + delta_ordinal,'entry':SitemapEntry(sub.name, sub.url())})
+            entries.append({'ordinal': sub.ordinal + delta_ordinal,
+                           'entry': SitemapEntry(sub.name, sub.url())})
         for ac in self.app_configs + [a.config for a in new_tools]:
             if excluded_tools and ac.tool_name in excluded_tools:
                 continue
@@ -476,7 +495,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                 App = ac.load()
             # If so, we don't want it listed
             except KeyError as e:
-                log.exception('AppConfig %s references invalid tool %s', ac._id, ac.tool_name)
+                log.exception('AppConfig %s references invalid tool %s',
+                              ac._id, ac.tool_name)
                 continue
             app = App(self, ac)
             if app.is_visible_to(c.user):
@@ -485,19 +505,23 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                     entry.tool_name = ac.tool_name
                     entry.ui_icon = 'tool-%s' % entry.tool_name.lower()
                     if not self.is_nbhd_project and (entry.tool_name.lower() in anchored_tools.keys()):
-                        ordinal = anchored_tools.keys().index(entry.tool_name.lower())
+                        ordinal = anchored_tools.keys().index(
+                            entry.tool_name.lower())
                     elif ac.tool_name == 'admin':
                         ordinal = 100
                     else:
-                        ordinal = int(ac.options.get('ordinal', 0)) + delta_ordinal
+                        ordinal = int(ac.options.get('ordinal', 0)) + \
+                            delta_ordinal
                     if self.is_nbhd_project and entry.label == 'Admin':
                         entry.matching_urls.append('%s_admin/' % self.url())
                     if ordinal > max_ordinal:
                         max_ordinal = ordinal
-                    entries.append({'ordinal':ordinal,'entry':entry})
+                    entries.append({'ordinal': ordinal, 'entry': entry})
 
         if self == self.neighborhood.neighborhood_project and h.has_access(self.neighborhood, 'admin'):
-            entries.append({'ordinal': max_ordinal + 1,'entry':SitemapEntry('Moderate', "%s_moderate/" % self.neighborhood.url(), ui_icon="tool-admin")})
+            entries.append(
+                {'ordinal': max_ordinal + 1, 'entry': SitemapEntry('Moderate',
+                                                                   "%s_moderate/" % self.neighborhood.url(), ui_icon="tool-admin")})
             max_ordinal += 1
 
         entries = sorted(entries, key=lambda e: e['ordinal'])
@@ -512,7 +536,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             for tool, label in anchored_tools.iteritems():
                 if (tool not in installed_tools) and (self.app_instance(tool) is None):
                     try:
-                        new_tools.append(self.install_app(tool, tool, label, i))
+                        new_tools.append(
+                            self.install_app(tool, tool, label, i))
                     except Exception:
                         log.error('%s is not available' % tool, exc_info=True)
                 i += 1
@@ -528,7 +553,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         grouped_nav = OrderedDict()
         # count how many tools of each type we have
         counts = Counter([e.tool_name.lower() for e in sitemap if e.tool_name])
-        grouping_threshold = self.get_tool_data('allura', 'grouping_threshold', 1)
+        grouping_threshold = self.get_tool_data(
+            'allura', 'grouping_threshold', 1)
         for e in sitemap:
             # if it's not a tool, add to navbar and continue
             if not e.tool_name:
@@ -543,7 +569,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                 if tool_name not in grouped_nav:
                     child = deepcopy(e)
                     # change label to be the tool name (type)
-                    e.label = g.entry_points['tool'][tool_name].tool_label + u' \u25be'
+                    e.label = g.entry_points['tool'][
+                        tool_name].tool_label + u' \u25be'
                     # add tool url to list of urls that will match this nav entry
                     # have to do this before changing the url to the list page
                     e.matching_urls.append(e.url)
@@ -552,7 +579,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                     e.children.append(child)
                     grouped_nav[tool_name] = e
                 else:
-                    # add tool url to list of urls that will match this nav entry
+                    # add tool url to list of urls that will match this nav
+                    # entry
                     grouped_nav[tool_name].matching_urls.append(e.url)
                     if len(grouped_nav[tool_name].children) < 10:
                         grouped_nav[tool_name].children.append(e)
@@ -571,7 +599,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     @property
     def subprojects(self):
-        q = self.query.find(dict(shortname={'$gt':self.shortname},
+        q = self.query.find(dict(shortname={'$gt': self.shortname},
                                  neighborhood_id=self.neighborhood._id)).sort('shortname')
         for project in q:
             if project.shortname.startswith(self.shortname + '/'):
@@ -590,7 +618,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     @property
     def named_roles(self):
-        roles_ids = [r['_id'] for r in g.credentials.project_roles(self.root_project._id).named]
+        roles_ids = [r['_id']
+                     for r in g.credentials.project_roles(self.root_project._id).named]
         roles = sorted(
             ProjectRole.query.find({'_id': {'$in': roles_ids}}),
             key=lambda r: r.name.lower())
@@ -604,10 +633,12 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             except fe.Invalid as e:
                 raise exceptions.ToolError(str(e))
         if ordinal is None:
-            ordinal = int(self.ordered_mounts(include_hidden=True)[-1]['ordinal']) + 1
+            ordinal = int(self.ordered_mounts(include_hidden=True)
+                          [-1]['ordinal']) + 1
         options = App.default_options()
         options['mount_point'] = mount_point
-        options['mount_label'] = mount_label or App.default_mount_label or mount_point
+        options[
+            'mount_label'] = mount_label or App.default_mount_label or mount_point
         options['ordinal'] = int(ordinal)
         options.update(override_options)
         cfg = AppConfig(
@@ -622,7 +653,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     def uninstall_app(self, mount_point):
         app = self.app_instance(mount_point)
-        if app is None: return
+        if app is None:
+            return
         if self.support_page == app.config.options.mount_point:
             self.support_page = ''
         with h.push_config(c, project=self, app=app):
@@ -636,15 +668,15 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         if app_config is None:
             return None
         App = app_config.load()
-        if App is None: # pragma no cover
+        if App is None:  # pragma no cover
             return None
         else:
             return App(self, app_config)
 
     def app_config(self, mount_point):
         return AppConfig.query.find({
-                'project_id':self._id,
-                'options.mount_point':mount_point}).first()
+            'project_id': self._id,
+            'options.mount_point': mount_point}).first()
 
     def app_config_by_tool_type(self, tool_type):
         for ac in self.app_configs:
@@ -654,7 +686,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
     def new_subproject(self, name, install_apps=True, user=None, project_name=None):
         provider = plugin.ProjectRegistrationProvider.get()
         try:
-            provider.shortname_validator.to_python(name, check_allowed=False, neighborhood=self.neighborhood)
+            provider.shortname_validator.to_python(
+                name, check_allowed=False, neighborhood=self.neighborhood)
         except exceptions.Invalid as e:
             raise exceptions.ToolError, 'Mount point "%s" is invalid' % name
         return provider.register_subproject(self, name, user or c.user, install_apps, project_name=project_name)
@@ -668,7 +701,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         self.install_anchored_tools()
 
         for sub in self.direct_subprojects:
-            result.append({'ordinal': int(sub.ordinal + i), 'sub': sub, 'rank': 1})
+            result.append(
+                {'ordinal': int(sub.ordinal + i), 'sub': sub, 'rank': 1})
         for ac in self.app_configs:
             App = g.entry_points['tool'].get(ac.tool_name)
             if include_hidden or App and not App.hidden:
@@ -676,8 +710,10 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                     ordinal = anchored_tools.keys().index(ac.tool_name.lower())
                 else:
                     ordinal = int(ac.options.get('ordinal', 0)) + i
-                rank = 0 if ac.options.get('mount_point', None) == 'home' else 1
-                result.append({'ordinal': int(ordinal), 'ac': ac, 'rank': rank})
+                rank = 0 if ac.options.get(
+                    'mount_point', None) == 'home' else 1
+                result.append(
+                    {'ordinal': int(ordinal), 'ac': ac, 'rank': rank})
         return sorted(result, key=lambda e: (e['ordinal'], e['rank']))
 
     def first_mount_visible(self, user):
@@ -698,7 +734,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         project.'''
         ordered_mounts = self.ordered_mounts(include_hidden=include_hidden)
         return int(ordered_mounts[-1]['ordinal']) + 1 \
-               if ordered_mounts else 0
+            if ordered_mounts else 0
 
     def delete(self):
         # Cascade to subprojects
@@ -710,18 +746,19 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         MappedClass.delete(self)
 
     def breadcrumbs(self):
-        entry = ( self.name, self.url() )
+        entry = (self.name, self.url())
         if self.parent_project:
-            return self.parent_project.breadcrumbs() + [ entry ]
+            return self.parent_project.breadcrumbs() + [entry]
         else:
-            return [ (self.neighborhood.name, self.neighborhood.url())] + [ entry ]
+            return [(self.neighborhood.name, self.neighborhood.url())] + [entry]
 
     def users(self):
         '''Find all the users who have named roles for this project'''
         named_roles = security.RoleCache(
             g.credentials,
             g.credentials.project_roles(project_id=self.root_project._id).named)
-        uids = [uid for uid in named_roles.userids_that_reach if uid is not None]
+        uids = [
+            uid for uid in named_roles.userids_that_reach if uid is not None]
         return list(User.query.find({'_id': {'$in': uids}, 'disabled': False}))
 
     def users_with_role(self, *role_names):
@@ -746,26 +783,29 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         u = User.by_username(username)
         if not u:
             return None
-        named_roles = g.credentials.project_roles(project_id=self.root_project._id).named
+        named_roles = g.credentials.project_roles(
+            project_id=self.root_project._id).named
         for r in named_roles.roles_that_reach:
-            if r.get('user_id') == u._id: return u
+            if r.get('user_id') == u._id:
+                return u
         return None
 
     def configure_project(
-        self,
-        users=None, apps=None,
-        is_user_project=False,
-        is_private_project=False):
+            self,
+            users=None, apps=None,
+            is_user_project=False,
+            is_private_project=False):
         from allura import model as M
 
         self.notifications_disabled = True
-        if users is None: users = [ c.user ]
+        if users is None:
+            users = [c.user]
         if apps is None:
             apps = []
             if is_user_project:
                 apps += [('Wiki', 'wiki', 'Wiki'),
-                        ('profile', 'profile', 'Profile'),
-                       ]
+                         ('profile', 'profile', 'Profile'),
+                         ]
             apps += [
                 ('admin', 'admin', 'Admin'),
                 ('search', 'search', 'Search'),
@@ -774,25 +814,30 @@ class Project(MappedClass, ActivityNode, ActivityObject):
                 apps.append(('activity', 'activity', 'Activity'))
         with h.push_config(c, project=self, user=users[0]):
             # Install default named roles (#78)
-            root_project_id=self.root_project._id
-            role_admin = M.ProjectRole.upsert(name='Admin', project_id=root_project_id)
-            role_developer = M.ProjectRole.upsert(name='Developer', project_id=root_project_id)
-            role_member = M.ProjectRole.upsert(name='Member', project_id=root_project_id)
-            role_auth = M.ProjectRole.upsert(name='*authenticated', project_id=root_project_id)
-            role_anon = M.ProjectRole.upsert(name='*anonymous', project_id=root_project_id)
+            root_project_id = self.root_project._id
+            role_admin = M.ProjectRole.upsert(
+                name='Admin', project_id=root_project_id)
+            role_developer = M.ProjectRole.upsert(
+                name='Developer', project_id=root_project_id)
+            role_member = M.ProjectRole.upsert(
+                name='Member', project_id=root_project_id)
+            role_auth = M.ProjectRole.upsert(
+                name='*authenticated', project_id=root_project_id)
+            role_anon = M.ProjectRole.upsert(
+                name='*anonymous', project_id=root_project_id)
             # Setup subroles
-            role_admin.roles = [ role_developer._id ]
-            role_developer.roles = [ role_member._id ]
+            role_admin.roles = [role_developer._id]
+            role_developer.roles = [role_member._id]
             self.acl = [
                 ACE.allow(role_developer._id, 'read'),
-                ACE.allow(role_member._id, 'read') ]
+                ACE.allow(role_member._id, 'read')]
             self.acl += [
                 M.ACE.allow(role_admin._id, perm)
-                for perm in self.permissions ]
+                for perm in self.permissions]
             self.private = is_private_project
             for user in users:
                 pr = ProjectRole.by_user(user, project=self, upsert=True)
-                pr.roles = [ role_admin._id ]
+                pr.roles = [role_admin._id]
             session(self).flush(self)
             # Setup apps
             for i, (ep_name, mount_point, label) in enumerate(apps):
@@ -818,7 +863,8 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
     def social_account(self, socialnetwork):
         try:
-            account = (sn for sn in self.socialnetworks if sn.socialnetwork == socialnetwork).next()
+            account = (
+                sn for sn in self.socialnetworks if sn.socialnetwork == socialnetwork).next()
         except StopIteration:
             return None
         else:
@@ -832,7 +878,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             self.socialnetworks.append(dict(
                 socialnetwork=socialnetwork,
                 accounturl=accounturl
-                ))
+            ))
 
     def bulk_export_path(self):
         shortname = self.shortname
@@ -843,9 +889,9 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         elif not self.is_root:
             shortname = self.shortname.split('/')[0]
         return config['bulk_export_path'].format(
-                nbhd=self.neighborhood.url_prefix.strip('/'),
-                project=shortname,
-                c=c,
+            nbhd=self.neighborhood.url_prefix.strip('/'),
+            project=shortname,
+            c=c,
         )
 
     def bulk_export_filename(self):
@@ -878,7 +924,6 @@ class Project(MappedClass, ActivityNode, ActivityObject):
         else:
             return 'busy'
 
-
     def __json__(self):
         return dict(
             shortname=self.shortname,
@@ -894,18 +939,23 @@ class Project(MappedClass, ActivityNode, ActivityObject):
             moved_to_url=self.moved_to_url,
             preferred_support_tool=self.support_page,
             preferred_support_url=self.support_page_url,
-            developers=[u.__json__() for u in self.users_with_role('Developer')],
+            developers=[u.__json__()
+                        for u in self.users_with_role('Developer')],
             tools=[dict(name=t.tool_name, mount_point=t.options.mount_point, label=t.options.mount_label)
                    for t in self.app_configs if h.has_access(t, 'read')],
             labels=list(self.labels),
             categories={
-                n: [t.__json__() for t in ts] for n, ts in self.all_troves().items()},
+                n: [t.__json__(
+                ) for t in ts] for n, ts in self.all_troves().items()},
             icon_url=h.absurl(self.url() + 'icon') if self.icon else None,
-            screenshots = [
+            screenshots=[
                 dict(
-                    url = h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename)),
-                    thumbnail_url = h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename) + '/thumb'),
-                    caption = ss.caption,
+                    url=h.absurl(self.url() + 'screenshot/' +
+                                 urllib.quote(ss.filename)),
+                    thumbnail_url=h.absurl(
+                        self.url(
+                        ) + 'screenshot/' + urllib.quote(ss.filename) + '/thumb'),
+                    caption=ss.caption,
                 )
                 for ss in self.get_screenshots()
             ]
@@ -913,6 +963,7 @@ class Project(MappedClass, ActivityNode, ActivityObject):
 
 
 class AppConfig(MappedClass):
+
     """
     Configuration information for an instantiated :class:`Application <allura.app.Application>`
     in a project
@@ -923,22 +974,22 @@ class AppConfig(MappedClass):
 
     class __mongometa__:
         session = project_orm_session
-        name='config'
+        name = 'config'
         indexes = [
             'project_id',
             'options.import_id',
             ('options.mount_point', 'project_id')]
 
     # AppConfig schema
-    _id=FieldProperty(S.ObjectId)
-    project_id=ForeignIdProperty(Project)
-    discussion_id=ForeignIdProperty('Discussion')
-    tool_name=FieldProperty(str)
-    version=FieldProperty(str)
-    options=FieldProperty(None)
+    _id = FieldProperty(S.ObjectId)
+    project_id = ForeignIdProperty(Project)
+    discussion_id = ForeignIdProperty('Discussion')
+    tool_name = FieldProperty(str)
+    version = FieldProperty(str)
+    options = FieldProperty(None)
     project = RelationProperty(Project, via='project_id')
     discussion = RelationProperty('Discussion', via='discussion_id')
-    tool_data = FieldProperty({str:{str:None}}) # entry point: prefs dict
+    tool_data = FieldProperty({str: {str: None}})  # entry point: prefs dict
 
     acl = FieldProperty(ACL())
 
@@ -975,10 +1026,11 @@ class AppConfig(MappedClass):
 
     def breadcrumbs(self):
         return self.project.breadcrumbs() + [
-            (self.options.mount_point, self.url()) ]
+            (self.options.mount_point, self.url())]
 
     def __json__(self):
         return dict(
             _id=self._id,
-            options=self.options._deinstrument(),  # strip away the ming instrumentation
+            # strip away the ming instrumentation
+            options=self.options._deinstrument(),
         )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/repo.py b/Allura/allura/model/repo.py
index 9228076..9396151 100644
--- a/Allura/allura/model/repo.py
+++ b/Allura/allura/model/repo.py
@@ -49,14 +49,15 @@ log = logging.getLogger(__name__)
 
 # Some schema types
 SUser = dict(name=str, email=str, date=datetime)
-SObjType=S.OneOf('blob', 'tree', 'submodule')
+SObjType = S.OneOf('blob', 'tree', 'submodule')
 
 # Used for when we're going to batch queries using $in
 QSIZE = 100
 README_RE = re.compile('^README(\.[^.]*)?$', re.IGNORECASE)
-VIEWABLE_EXTENSIONS = ['.php','.py','.js','.java','.html','.htm','.yaml','.sh',
-    '.rb','.phtml','.txt','.bat','.ps1','.xhtml','.css','.cfm','.jsp','.jspx',
-    '.pl','.php4','.php3','.rhtml','.svg','.markdown','.json','.ini','.tcl','.vbs','.xsl']
+VIEWABLE_EXTENSIONS = [
+    '.php', '.py', '.js', '.java', '.html', '.htm', '.yaml', '.sh',
+    '.rb', '.phtml', '.txt', '.bat', '.ps1', '.xhtml', '.css', '.cfm', '.jsp', '.jspx',
+    '.pl', '.php4', '.php3', '.rhtml', '.svg', '.markdown', '.json', '.ini', '.tcl', '.vbs', '.xsl']
 PYPELINE_EXTENSIONS = utils.MARKDOWN_EXTENSIONS + ['.rst']
 
 DIFF_SIMILARITY_THRESHOLD = .5  # used for determining file renames
@@ -73,7 +74,7 @@ CommitDoc = collection(
     Field('message', str),
     Field('parent_ids', [str], index=True),
     Field('child_ids', [str], index=True),
-    Field('repo_ids', [ S.ObjectId() ], index=True))
+    Field('repo_ids', [S.ObjectId()], index=True))
 
 # Basic tree information (also see TreesDoc)
 TreeDoc = collection(
@@ -109,7 +110,7 @@ DiffInfoDoc = collection(
     Field('_id', str),
     Field(
         'differences',
-        [ dict(name=str, lhs_id=str, rhs_id=str)]))
+        [dict(name=str, lhs_id=str, rhs_id=str)]))
 
 # List of commit runs (a run is a linear series of single-parent commits)
 # CommitRunDoc.commit_ids = [ CommitDoc._id, ... ]
@@ -120,9 +121,10 @@ CommitRunDoc = collection(
     Field('commit_ids', [str], index=True),
     Field('commit_times', [datetime]))
 
+
 class RepoObject(object):
 
-    def __repr__(self): # pragma no cover
+    def __repr__(self):  # pragma no cover
         return '<%s %s>' % (
             self.__class__.__name__, self._id)
 
@@ -143,20 +145,22 @@ class RepoObject(object):
     def upsert(cls, id, **kwargs):
         isnew = False
         r = cls.query.get(_id=id)
-        if r is not None: return r, isnew
+        if r is not None:
+            return r, isnew
         try:
             r = cls(_id=id, **kwargs)
             session(r).flush(r)
             isnew = True
-        except pymongo.errors.DuplicateKeyError: # pragma no cover
+        except pymongo.errors.DuplicateKeyError:  # pragma no cover
             session(r).expunge(r)
             r = cls.query.get(_id=id)
         return r, isnew
 
+
 class Commit(RepoObject, ActivityObject):
     type_s = 'Commit'
     # Ephemeral attrs
-    repo=None
+    repo = None
 
     def __init__(self, **kw):
         for k, v in kw.iteritems():
@@ -193,12 +197,14 @@ class Commit(RepoObject, ActivityObject):
     @LazyProperty
     def author_url(self):
         u = User.by_email_address(self.authored.email)
-        if u: return u.url()
+        if u:
+            return u.url()
 
     @LazyProperty
     def committer_url(self):
         u = User.by_email_address(self.committed.email)
-        if u: return u.url()
+        if u:
+            return u.url()
 
     @LazyProperty
     def tree(self):
@@ -226,8 +232,10 @@ class Commit(RepoObject, ActivityObject):
         return h.text.truncate(first_line, 50)
 
     def shorthand_id(self):
-        if self.repo is None: self.repo = self.guess_repo()
-        if self.repo is None: return repr(self)
+        if self.repo is None:
+            self.repo = self.guess_repo()
+        if self.repo is None:
+            return repr(self)
         return self.repo.shorthand_for_commit(self._id)
 
     @LazyProperty
@@ -262,13 +270,16 @@ class Commit(RepoObject, ActivityObject):
             ancestor = ancestor.get_parent()
 
     def url(self):
-        if self.repo is None: self.repo = self.guess_repo()
-        if self.repo is None: return '#'
+        if self.repo is None:
+            self.repo = self.guess_repo()
+        if self.repo is None:
+            return '#'
         return self.repo.url_for_commit(self)
 
     def guess_repo(self):
         import traceback
-        log.error('guess_repo: should not be called: %s' % ''.join(traceback.format_stack()))
+        log.error('guess_repo: should not be called: %s' %
+                  ''.join(traceback.format_stack()))
         for ac in c.project.app_configs:
             try:
                 app = c.project.app_instance(ac)
@@ -290,11 +301,13 @@ class Commit(RepoObject, ActivityObject):
     def context(self):
         result = dict(prev=None, next=None)
         if self.parent_ids:
-            result['prev'] = self.query.find(dict(_id={'$in': self.parent_ids })).all()
+            result['prev'] = self.query.find(
+                dict(_id={'$in': self.parent_ids})).all()
             for ci in result['prev']:
                 ci.set_context(self.repo)
         if self.child_ids:
-            result['next'] = self.query.find(dict(_id={'$in': self.child_ids })).all()
+            result['next'] = self.query.find(
+                dict(_id={'$in': self.child_ids})).all()
             for ci in result['next']:
                 ci.set_context(self.repo)
         return result
@@ -463,23 +476,24 @@ class Commit(RepoObject, ActivityObject):
             author_url=self.author_url,
             shortlink=self.shorthand_id(),
             summary=self.summary
-            )
+        )
+
 
 class Tree(RepoObject):
     # Ephemeral attrs
-    repo=None
-    commit=None
-    parent=None
-    name=None
+    repo = None
+    commit = None
+    parent = None
+    name = None
 
     def compute_hash(self):
         '''Compute a hash based on the contents of the tree.  Note that this
         hash does not necessarily correspond to any actual DVCS hash.
         '''
         lines = (
-            [ 'tree' + x.name + x.id for x in self.tree_ids ]
-            + [ 'blob' + x.name + x.id for x in self.blob_ids ]
-            + [ x.type + x.name + x.id for x in self.other_ids ])
+            ['tree' + x.name + x.id for x in self.tree_ids]
+            + ['blob' + x.name + x.id for x in self.blob_ids]
+            + [x.type + x.name + x.id for x in self.other_ids])
         sha_obj = sha1()
         for line in sorted(lines):
             sha_obj.update(line)
@@ -495,9 +509,11 @@ class Tree(RepoObject):
             raise KeyError, name
         obj = cache.get(Tree, dict(_id=obj['id']))
         if obj is None:
-            oid = self.repo.compute_tree_new(self.commit, self.path() + name + '/')
+            oid = self.repo.compute_tree_new(
+                self.commit, self.path() + name + '/')
             obj = cache.get(Tree, dict(_id=oid))
-        if obj is None: raise KeyError, name
+        if obj is None:
+            raise KeyError, name
         obj.set_context(self, name)
         return obj
 
@@ -562,7 +578,8 @@ class Tree(RepoObject):
         commit_infos = {c._id: c.info for c in commits}
         by_name = lambda n: n.name
         tree_names = sorted([n.name for n in self.tree_ids])
-        blob_names = sorted([n.name for n in chain(self.blob_ids, self.other_ids)])
+        blob_names = sorted(
+            [n.name for n in chain(self.blob_ids, self.other_ids)])
 
         results = []
         for type, names in (('DIR', tree_names), ('BLOB', blob_names)):
@@ -571,21 +588,22 @@ class Tree(RepoObject):
                 if not commit_info:
                     commit_info = defaultdict(str)
                 elif 'id' in commit_info:
-                    commit_info['href'] = self.repo.url_for_commit(commit_info['id'])
+                    commit_info['href'] = self.repo.url_for_commit(
+                        commit_info['id'])
                 results.append(dict(
-                        kind=type,
-                        name=name,
-                        href=name,
-                        last_commit=dict(
-                                author=commit_info['author'],
-                                author_email=commit_info['author_email'],
-                                author_url=commit_info['author_url'],
-                                date=commit_info.get('date'),
-                                href=commit_info.get('href',''),
-                                shortlink=commit_info['shortlink'],
-                                summary=commit_info['summary'],
-                            ),
-                    ))
+                    kind=type,
+                    name=name,
+                    href=name,
+                    last_commit=dict(
+                        author=commit_info['author'],
+                        author_email=commit_info['author_email'],
+                        author_url=commit_info['author_url'],
+                        date=commit_info.get('date'),
+                        href=commit_info.get('href', ''),
+                        shortlink=commit_info['shortlink'],
+                        summary=commit_info['summary'],
+                    ),
+                ))
         return results
 
     def path(self):
@@ -616,7 +634,9 @@ class Tree(RepoObject):
         x = self.by_name[name]
         return Blob(self, name, x.id)
 
+
 class Blob(object):
+
     '''Lightweight object representing a file in the repo'''
 
     def __init__(self, tree, name, _id):
@@ -678,10 +698,10 @@ class Blob(object):
     @property
     def has_html_view(self):
         if (self.content_type.startswith('text/') or
-            self.extension in VIEWABLE_EXTENSIONS or
-            self.extension in PYPELINE_EXTENSIONS or
-            self.extension in self.repo._additional_viewable_extensions or
-            utils.is_text_file(self.text)):
+                self.extension in VIEWABLE_EXTENSIONS or
+                self.extension in PYPELINE_EXTENSIONS or
+                self.extension in self.repo._additional_viewable_extensions or
+                utils.is_text_file(self.text)):
             return True
         return False
 
@@ -726,7 +746,9 @@ class Blob(object):
         differ = SequenceMatcher(v0, v1)
         return differ.get_opcodes()
 
+
 class LastCommit(RepoObject):
+
     def __repr__(self):
         return '<LastCommit /%s %s>' % (self.path, self.commit_id)
 
@@ -736,7 +758,8 @@ class LastCommit(RepoObject):
             rev = commit.repo.log(commit._id, path, id_only=True).next()
             return commit.repo.rev_to_commit_id(rev)
         except StopIteration:
-            log.error('Tree node not recognized by SCM: %s @ %s', path, commit._id)
+            log.error('Tree node not recognized by SCM: %s @ %s',
+                      path, commit._id)
             return commit._id
 
     @classmethod
@@ -779,10 +802,13 @@ class LastCommit(RepoObject):
         prev_lcd = None
         prev_lcd_cid = cls._prev_commit_id(tree.commit, path)
         if prev_lcd_cid:
-            prev_lcd = model_cache.get(cls, {'path': path, 'commit_id': prev_lcd_cid})
+            prev_lcd = model_cache.get(
+                cls, {'path': path, 'commit_id': prev_lcd_cid})
         entries = {}
-        nodes = set([node.name for node in chain(tree.tree_ids, tree.blob_ids, tree.other_ids)])
-        changed = set([node for node in nodes if os.path.join(path, node) in tree.commit.changed_paths])
+        nodes = set(
+            [node.name for node in chain(tree.tree_ids, tree.blob_ids, tree.other_ids)])
+        changed = set(
+            [node for node in nodes if os.path.join(path, node) in tree.commit.changed_paths])
         unchanged = [os.path.join(path, node) for node in nodes - changed]
         if prev_lcd:
             # get unchanged entries from previously computed LCD
@@ -796,16 +822,19 @@ class LastCommit(RepoObject):
                 # and possibly try again later
                 entries = {}
             # paths are fully-qualified; shorten them back to just node names
-            entries = {os.path.basename(path):commit_id for path,commit_id in entries.iteritems()}
+            entries = {
+                os.path.basename(path): commit_id for path, commit_id in entries.iteritems()}
         # update with the nodes changed in this tree's commit
         entries.update({node: tree.commit._id for node in changed})
-        # convert to a list of dicts, since mongo doesn't handle arbitrary keys well (i.e., . and $ not allowed)
-        entries = [{'name':name, 'commit_id':value} for name,value in entries.iteritems()]
+        # convert to a list of dicts, since mongo doesn't handle arbitrary keys
+        # well (i.e., . and $ not allowed)
+        entries = [{'name': name, 'commit_id': value}
+                   for name, value in entries.iteritems()]
         lcd = cls(
-                commit_id=tree.commit._id,
-                path=path,
-                entries=entries,
-            )
+            commit_id=tree.commit._id,
+            path=path,
+            entries=entries,
+        )
         model_cache.set(cls, {'path': path, 'commit_id': tree.commit._id}, lcd)
         return lcd
 
@@ -819,9 +848,11 @@ mapper(LastCommit, LastCommitDoc, repository_orm_session)
 
 
 class ModelCache(object):
+
     '''
     Cache model instances based on query params passed to get.
     '''
+
     def __init__(self, max_instances=None, max_queries=None):
         '''
         By default, each model type can have 2000 instances and
@@ -840,14 +871,15 @@ class ModelCache(object):
             max_instances_default = max_instances
         if isinstance(max_queries, int):
             max_queries_default = max_queries
-        self._max_instances = defaultdict(lambda:max_instances_default)
-        self._max_queries = defaultdict(lambda:max_queries_default)
+        self._max_instances = defaultdict(lambda: max_instances_default)
+        self._max_queries = defaultdict(lambda: max_queries_default)
         if hasattr(max_instances, 'items'):
             self._max_instances.update(max_instances)
         if hasattr(max_queries, 'items'):
             self._max_queries.update(max_queries)
 
-        self._query_cache = defaultdict(OrderedDict)  # keyed by query, holds _id
+        # keyed by query, holds _id
+        self._query_cache = defaultdict(OrderedDict)
         self._instance_cache = defaultdict(OrderedDict)  # keyed by _id
         self._synthetic_ids = defaultdict(set)
         self._synthetic_id_queries = defaultdict(set)
@@ -864,7 +896,8 @@ class ModelCache(object):
         elif hasattr(cls, 'm'):
             return cls.m
         else:
-            raise AttributeError('%s has neither "query" nor "m" attribute' % cls)
+            raise AttributeError(
+                '%s has neither "query" nor "m" attribute' % cls)
 
     def get(self, cls, query):
         _query = self._normalize_query(query)
@@ -886,9 +919,9 @@ class ModelCache(object):
         _query = self._normalize_query(query)
         if val is not None:
             _id = getattr(val, '_model_cache_id',
-                    getattr(val, '_id',
-                        self._query_cache[cls].get(_query,
-                            None)))
+                          getattr(val, '_id',
+                                  self._query_cache[cls].get(_query,
+                                                             None)))
             if _id is None:
                 _id = val._model_cache_id = bson.ObjectId()
                 self._synthetic_ids[cls].add(_id)
@@ -924,7 +957,8 @@ class ModelCache(object):
                 instance = self._instance_cache[cls][_id]
                 self._try_flush(instance, expunge=False)
         if self.num_instances(cls) > self._max_instances[cls]:
-            instance = self._remove_least_recently_used(self._instance_cache[cls])
+            instance = self._remove_least_recently_used(
+                self._instance_cache[cls])
             self._try_flush(instance, expunge=True)
 
     def _try_flush(self, instance, expunge=False):


[20/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/test_admin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/test_admin.py b/Allura/allura/tests/functional/test_admin.py
index 5919bb9..a7fb5bf 100644
--- a/Allura/allura/tests/functional/test_admin.py
+++ b/Allura/allura/tests/functional/test_admin.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 import re
-import os, allura
+import os
+import allura
 import shutil
 import pkg_resources
 import StringIO
@@ -51,6 +52,7 @@ from forgetracker.tracker_main import ForgeTrackerApp
 from forgewiki.model import Page
 from forgewiki.wiki_main import ForgeWikiApp
 
+
 @contextmanager
 def audits(*messages):
     M.AuditLog.query.remove()
@@ -59,11 +61,13 @@ def audits(*messages):
     if not messages:
         for e in entries:
             print e.message
-        import pdb; pdb.set_trace()
+        import pdb
+        pdb.set_trace()
     for message in messages:
         assert M.AuditLog.query.find(dict(
             message=re.compile(message))).count(), 'Could not find "%s"' % message
 
+
 class TestProjectAdmin(TestController):
 
     def get_available_tools(self):
@@ -74,15 +78,16 @@ class TestProjectAdmin(TestController):
     def test_admin_controller(self):
         self.app.get('/admin/')
         with audits(
-            'change summary to Milkshakes are for crazy monkeys',
-            'change project name to My Test Project',
-            u'change short description to (\u00bf A Test Project \?){45}'):
+                'change summary to Milkshakes are for crazy monkeys',
+                'change project name to My Test Project',
+                u'change short description to (\u00bf A Test Project \?){45}'):
             self.app.post('/admin/update', params=dict(
-                    name='My Test Project',
-                    shortname='test',
-                    summary='Milkshakes are for crazy monkeys',
-                    short_description=u'\u00bf A Test Project ?'.encode('utf-8') * 45,
-                    labels='aaa,bbb'))
+                name='My Test Project',
+                shortname='test',
+                summary='Milkshakes are for crazy monkeys',
+                short_description=u'\u00bf A Test Project ?'.encode(
+                        'utf-8') * 45,
+                labels='aaa,bbb'))
         r = self.app.get('/admin/overview')
         assert 'A Test Project ?\xc2\xbf A' in r
         assert 'Test Subproject' not in r
@@ -93,84 +98,84 @@ class TestProjectAdmin(TestController):
         # Add a subproject
         with audits('create subproject test-subproject'):
             self.app.post('/admin/update_mounts', params={
-                    'new.install':'install',
-                    'new.ep_name':'',
-                    'new.ordinal':'1',
-                    'new.mount_point':'test-subproject',
-                    'new.mount_label':'Test Subproject'})
+                'new.install': 'install',
+                'new.ep_name': '',
+                'new.ordinal': '1',
+                'new.mount_point': 'test-subproject',
+                'new.mount_label': 'Test Subproject'})
         r = self.app.get('/admin/overview')
         assert 'Test Subproject' in r
         # Rename a subproject
         with audits('update subproject test/test-subproject'):
             self.app.post('/admin/update_mounts', params={
-                    'subproject-0.shortname':'test/test-subproject',
-                    'subproject-0.name':'Tst Sbprj',
-                    'subproject-0.ordinal':'100',
-                    })
+                'subproject-0.shortname': 'test/test-subproject',
+                'subproject-0.name': 'Tst Sbprj',
+                'subproject-0.ordinal': '100',
+            })
         r = self.app.get('/admin/overview')
         assert 'Tst Sbprj' in r
         # Remove a subproject
         with audits('delete subproject test/test-subproject'):
             self.app.post('/admin/update_mounts', params={
-                    'subproject-0.delete':'on',
-                    'subproject-0.shortname':'test/test-subproject',
-                    'new.ep_name':'',
-                    })
+                'subproject-0.delete': 'on',
+                'subproject-0.shortname': 'test/test-subproject',
+                'new.ep_name': '',
+            })
 
         # Add a tool
         with audits('install tool test-tool'):
             r = self.app.post('/admin/update_mounts', params={
-                    'new.install':'install',
-                    'new.ep_name':'Wiki',
-                    'new.ordinal':'1',
-                    'new.mount_point':'test-tool',
-                    'new.mount_label':'Test Tool'})
+                'new.install': 'install',
+                'new.ep_name': 'Wiki',
+                'new.ordinal': '1',
+                'new.mount_point': 'test-tool',
+                'new.mount_label': 'Test Tool'})
         assert 'error' not in self.webflash(r)
         # check tool in the nav
         r = self.app.get('/p/test/test-tool/').follow()
-        active_link = r.html.findAll('li',{'class':'selected'})
+        active_link = r.html.findAll('li', {'class': 'selected'})
         assert_equals(len(active_link), 1)
         assert active_link[0].contents[1]['href'] == '/p/test/test-tool/'
         with audits('install tool test-tool2'):
             r = self.app.post('/admin/update_mounts', params={
-                    'new.install':'install',
-                    'new.ep_name':'Wiki',
-                    'new.ordinal':'1',
-                    'new.mount_point':'test-tool2',
-                    'new.mount_label':'Test Tool2'})
+                'new.install': 'install',
+                'new.ep_name': 'Wiki',
+                'new.ordinal': '1',
+                'new.mount_point': 'test-tool2',
+                'new.mount_label': 'Test Tool2'})
         assert 'error' not in self.webflash(r)
         # check the nav - tools of same type are grouped
         r = self.app.get('/p/test/test-tool/Home/')
-        active_link = r.html.findAll('li',{'class':'selected'})
+        active_link = r.html.findAll('li', {'class': 'selected'})
         assert len(active_link) == 2
         assert active_link[0].contents[1]['href'] == '/p/test/_list/wiki'
-        assert r.html.findAll('a', {'href':'/p/test/test-tool2/'})
-        assert r.html.findAll('a', {'href':'/p/test/test-tool/'})
+        assert r.html.findAll('a', {'href': '/p/test/test-tool2/'})
+        assert r.html.findAll('a', {'href': '/p/test/test-tool/'})
 
         # check can't create dup tool
         r = self.app.post('/admin/update_mounts', params={
-                'new.install':'install',
-                'new.ep_name':'Wiki',
-                'new.ordinal':'1',
-                'new.mount_point':'test-tool',
-                'new.mount_label':'Test Tool'})
+            'new.install': 'install',
+            'new.ep_name': 'Wiki',
+            'new.ordinal': '1',
+            'new.mount_point': 'test-tool',
+            'new.mount_label': 'Test Tool'})
         assert 'error' in self.webflash(r)
         # Rename a tool
         with audits('update tool test-tool'):
             self.app.post('/admin/update_mounts', params={
-                    'tool-0.mount_point':'test-tool',
-                    'tool-0.mount_label':'Tst Tuul',
-                    'tool-0.ordinal':'200',
-                    })
+                'tool-0.mount_point': 'test-tool',
+                'tool-0.mount_label': 'Tst Tuul',
+                'tool-0.ordinal': '200',
+            })
         r = self.app.get('/admin/overview')
         assert 'Tst Tuul' in r
         # Remove a tool
         with audits('uninstall tool test-tool'):
             self.app.post('/admin/update_mounts', params={
-                    'tool-0.delete':'on',
-                    'tool-0.mount_point':'test-tool',
-                    'new.ep_name':'',
-                    })
+                'tool-0.delete': 'on',
+                'tool-0.mount_point': 'test-tool',
+                'new.ep_name': '',
+            })
 
         # Check the audit log
         r = self.app.get('/admin/audit/')
@@ -178,12 +183,14 @@ class TestProjectAdmin(TestController):
 
     @td.with_wiki
     def test_block_user_empty_data(self):
-        r = self.app.post('/admin/wiki/block_user', params={'username': '', 'perm': '', 'reason': ''})
+        r = self.app.post('/admin/wiki/block_user',
+                          params={'username': '', 'perm': '', 'reason': ''})
         assert_equals(r.json, dict(error='Enter username'))
 
     @td.with_wiki
     def test_unblock_user_empty_data(self):
-        r = self.app.post('/admin/wiki/unblock_user', params={'user_id': '', 'perm': ''})
+        r = self.app.post('/admin/wiki/unblock_user',
+                          params={'user_id': '', 'perm': ''})
         assert_equals(r.json, dict(error='Select user to unblock'))
 
     @td.with_wiki
@@ -192,8 +199,10 @@ class TestProjectAdmin(TestController):
         assert '<input type="checkbox" name="user_id"' not in r
 
         user = M.User.by_username('test-admin')
-        r = self.app.post('/admin/wiki/block_user', params={'username': 'test-admin', 'perm': 'read', 'reason': 'Comment'})
-        assert_equals(r.json, dict(user_id=str(user._id), username='test-admin', reason='Comment'))
+        r = self.app.post('/admin/wiki/block_user',
+                          params={'username': 'test-admin', 'perm': 'read', 'reason': 'Comment'})
+        assert_equals(
+            r.json, dict(user_id=str(user._id), username='test-admin', reason='Comment'))
         user = M.User.by_username('test-admin')
         admin_role = M.ProjectRole.by_user(user)
         app = M.Project.query.get(shortname='test').app_instance('wiki')
@@ -204,7 +213,8 @@ class TestProjectAdmin(TestController):
 
     @td.with_wiki
     def test_unblock_user(self):
-        r = self.app.post('/admin/wiki/block_user', params={'username': 'test-admin', 'perm': 'read'})
+        r = self.app.post('/admin/wiki/block_user',
+                          params={'username': 'test-admin', 'perm': 'read'})
         user = M.User.by_username('test-admin')
         admin_role = M.ProjectRole.by_user(user)
         app = M.Project.query.get(shortname='test').app_instance('wiki')
@@ -213,7 +223,8 @@ class TestProjectAdmin(TestController):
         assert '<input type="checkbox" name="user_id" value="%s">test-admin' % user._id in r
         app = M.Project.query.get(shortname='test').app_instance('wiki')
         assert M.ACL.contains(ace, app.acl) is not None
-        r = self.app.post('/admin/wiki/unblock_user', params={'user_id': str(user._id), 'perm': 'read'})
+        r = self.app.post('/admin/wiki/unblock_user',
+                          params={'user_id': str(user._id), 'perm': 'read'})
         assert_equals(r.json, dict(unblocked=[str(user._id)]))
         assert M.ACL.contains(ace, app.acl) is None
         r = self.app.get('/admin/wiki/permissions')
@@ -221,8 +232,10 @@ class TestProjectAdmin(TestController):
 
     @td.with_wiki
     def test_block_unblock_multiple_users(self):
-        self.app.post('/admin/wiki/block_user', params={'username': 'test-admin', 'perm': 'read', 'reason': 'Spammer'})
-        self.app.post('/admin/wiki/block_user', params={'username': 'test-user', 'perm': 'read'})
+        self.app.post('/admin/wiki/block_user',
+                      params={'username': 'test-admin', 'perm': 'read', 'reason': 'Spammer'})
+        self.app.post('/admin/wiki/block_user',
+                      params={'username': 'test-user', 'perm': 'read'})
         admin = M.User.by_username('test-admin')
         user = M.User.by_username('test-user')
         admin_role = M.ProjectRole.by_user(admin)
@@ -236,8 +249,10 @@ class TestProjectAdmin(TestController):
         assert '<input type="checkbox" name="user_id" value="%s">test-admin (Spammer)' % admin._id in r
         assert '<input type="checkbox" name="user_id" value="%s">test-user' % user._id in r
 
-        self.app.post('/admin/wiki/unblock_user', params={'user_id': str(user._id), 'perm': 'read'})
-        self.app.post('/admin/wiki/unblock_user', params={'user_id': str(admin._id), 'perm': 'read'})
+        self.app.post('/admin/wiki/unblock_user',
+                      params={'user_id': str(user._id), 'perm': 'read'})
+        self.app.post('/admin/wiki/unblock_user',
+                      params={'user_id': str(admin._id), 'perm': 'read'})
         app = M.Project.query.get(shortname='test').app_instance('wiki')
         assert M.ACL.contains(deny_admin, app.acl) is None
         assert M.ACL.contains(deny_user, app.acl) is None
@@ -246,8 +261,10 @@ class TestProjectAdmin(TestController):
 
     @td.with_wiki
     def test_blocked_users_remains_after_saving_all_permissions(self):
-        self.app.post('/admin/wiki/block_user', params={'username': 'test-user', 'perm': 'read', 'reason': 'Comment'})
-        self.app.post('/admin/wiki/block_user', params={'username': 'test-user', 'perm': 'post', 'reason': 'Comment'})
+        self.app.post('/admin/wiki/block_user',
+                      params={'username': 'test-user', 'perm': 'read', 'reason': 'Comment'})
+        self.app.post('/admin/wiki/block_user',
+                      params={'username': 'test-user', 'perm': 'post', 'reason': 'Comment'})
         user = M.User.by_username('test-user')
         user_role = M.ProjectRole.by_user(user)
         app = M.Project.query.get(shortname='test').app_instance('wiki')
@@ -268,39 +285,40 @@ class TestProjectAdmin(TestController):
 
     def test_tool_permissions(self):
         BUILTIN_APPS = ['activity', 'blog', 'discussion', 'git', 'link',
-                'shorturl', 'svn', 'tickets', 'userstats', 'wiki']
+                        'shorturl', 'svn', 'tickets', 'userstats', 'wiki']
         self.app.get('/admin/')
         project = M.Project.query.get(shortname='test')
         for i, ep in enumerate(pkg_resources.iter_entry_points('allura')):
             App = ep.load()
             tool = ep.name
             cfg = M.AppConfig(
-                    project_id=project._id,
-                    tool_name=tool,
-                    options={'mount_point': '', 'mount_label': ''})
+                project_id=project._id,
+                tool_name=tool,
+                options={'mount_point': '', 'mount_label': ''})
             app = App(project, cfg)
             if not app.installable or ep.name.lower() not in BUILTIN_APPS:
                 continue
             with audits('install tool test-%d' % i):
                 self.app.post('/admin/update_mounts', params={
-                        'new.install':'install',
-                        'new.ep_name':tool,
-                        'new.ordinal':str(i),
-                        'new.mount_point':'test-%d' % i,
-                        'new.mount_label':tool })
+                    'new.install': 'install',
+                    'new.ep_name': tool,
+                    'new.ordinal': str(i),
+                    'new.mount_point': 'test-%d' % i,
+                    'new.mount_label': tool})
             r = self.app.get('/admin/test-%d/permissions' % i)
             cards = [
                 tag for tag in r.html.findAll('input')
                 if (
                     tag.get('type') == 'hidden' and
                     tag['name'].startswith('card-') and
-                    tag['name'].endswith('.id')) ]
+                    tag['name'].endswith('.id'))]
             assert len(cards) == len(app.permissions), cards
 
     def test_tool_list(self):
         r = self.app.get('/admin/tools')
-        new_ep_opts = r.html.findAll('a',{'class':"install_trig"})
-        tool_strings = [ ' '.join(opt.find('span').string.strip().split()) for opt in new_ep_opts ]
+        new_ep_opts = r.html.findAll('a', {'class': "install_trig"})
+        tool_strings = [' '.join(opt.find('span').string.strip().split())
+                        for opt in new_ep_opts]
         expected_tools = [
             'Wiki',
             'Tickets',
@@ -318,76 +336,82 @@ class TestProjectAdmin(TestController):
             available_tools = self.get_available_tools()
             assert_in('Wiki', available_tools)
             r = self.app.post('/admin/update_mounts/', params={
-                    'new.install': 'install',
-                    'new.ep_name': 'Wiki',
-                    'new.ordinal': '1',
-                    'new.mount_point': 'wiki',
-                    'new.mount_label': 'Wiki'})
+                'new.install': 'install',
+                'new.ep_name': 'Wiki',
+                'new.ordinal': '1',
+                'new.mount_point': 'wiki',
+                'new.mount_label': 'Wiki'})
             available_tools = self.get_available_tools()
             assert_not_in('Wiki', available_tools)
             r = self.app.post('/admin/update_mounts/', params={
-                    'new.install': 'install',
-                    'new.ep_name': 'Wiki',
-                    'new.ordinal': '1',
-                    'new.mount_point': 'wiki2',
-                    'new.mount_label': 'Wiki 2'})
+                'new.install': 'install',
+                'new.ep_name': 'Wiki',
+                'new.ordinal': '1',
+                'new.mount_point': 'wiki2',
+                'new.mount_label': 'Wiki 2'})
             assert 'error' in self.webflash(r)
             assert 'limit exceeded' in self.webflash(r)
 
     def test_grouping_threshold(self):
         r = self.app.get('/admin/tools')
-        grouping_threshold = r.html.find('input',{'name':'grouping_threshold'})
+        grouping_threshold = r.html.find(
+            'input', {'name': 'grouping_threshold'})
         assert_equals(grouping_threshold['value'], '1')
         r = self.app.post('/admin/configure_tool_grouping', params={
-                'grouping_threshold': '2',
-            }).follow()
-        grouping_threshold = r.html.find('input',{'name':'grouping_threshold'})
+            'grouping_threshold': '2',
+        }).follow()
+        grouping_threshold = r.html.find(
+            'input', {'name': 'grouping_threshold'})
         assert_equals(grouping_threshold['value'], '2')
         r = self.app.get('/admin/tools')
-        grouping_threshold = r.html.find('input',{'name':'grouping_threshold'})
+        grouping_threshold = r.html.find(
+            'input', {'name': 'grouping_threshold'})
         assert_equals(grouping_threshold['value'], '2')
 
     def test_project_icon(self):
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('icon', file_name, file_data)
 
         self.app.get('/admin/')
         with audits('update project icon'):
             self.app.post('/admin/update', params=dict(
-                    name='Test Project',
-                    shortname='test',
-                    short_description='A Test Project'),
-                    upload_files=[upload])
+                name='Test Project',
+                shortname='test',
+                short_description='A Test Project'),
+                upload_files=[upload])
         r = self.app.get('/p/test/icon')
         image = PIL.Image.open(StringIO.StringIO(r.body))
-        assert image.size == (48,48)
+        assert image.size == (48, 48)
 
         r = self.app.get('/p/test/icon?foo=bar')
 
     def test_project_screenshot(self):
         file_name = 'neo-icon-set-454545-256x350.png'
-        file_path = os.path.join(allura.__path__[0],'nf','allura','images',file_name)
+        file_path = os.path.join(
+            allura.__path__[0], 'nf', 'allura', 'images', file_name)
         file_data = file(file_path).read()
         upload = ('screenshot', file_name, file_data)
 
         self.app.get('/admin/')
         with audits('add screenshot'):
             self.app.post('/admin/add_screenshot', params=dict(
-                    caption='test me'),
-                    upload_files=[upload])
+                caption='test me'),
+                upload_files=[upload])
         p_nbhd = M.Neighborhood.query.get(name='Projects')
-        project = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+        project = M.Project.query.get(
+            shortname='test', neighborhood_id=p_nbhd._id)
         filename = project.get_screenshots()[0].filename
-        r = self.app.get('/p/test/screenshot/'+filename)
+        r = self.app.get('/p/test/screenshot/' + filename)
         uploaded = PIL.Image.open(file_path)
         screenshot = PIL.Image.open(StringIO.StringIO(r.body))
         assert uploaded.size == screenshot.size
-        r = self.app.get('/p/test/screenshot/'+filename+'/thumb')
+        r = self.app.get('/p/test/screenshot/' + filename + '/thumb')
         thumb = PIL.Image.open(StringIO.StringIO(r.body))
-        assert thumb.size == (150,150)
-        #FIX: home pages don't currently support screenshots (now that they're a wiki);
+        assert thumb.size == (150, 150)
+        # FIX: home pages don't currently support screenshots (now that they're a wiki);
         # reinstate this code (or appropriate) when we have a macro for that
         #r = self.app.get('/p/test/home/')
         #assert '/p/test/screenshot/'+filename in r
@@ -407,22 +431,22 @@ class TestProjectAdmin(TestController):
     def test_sort_screenshots(self):
         for file_name in ('admin_24.png', 'admin_32.png'):
             file_path = os.path.join(allura.__path__[0], 'nf', 'allura',
-                    'images', file_name)
+                                     'images', file_name)
             file_data = file(file_path).read()
             upload = ('screenshot', file_name, file_data)
             self.app.post('/admin/add_screenshot', params=dict(
-                    caption=file_name),
-                    upload_files=[upload])
+                caption=file_name),
+                upload_files=[upload])
 
         p_nbhd = M.Neighborhood.query.get(name='Projects')
         project = M.Project.query.get(shortname='test',
-                neighborhood_id=p_nbhd._id)
+                                      neighborhood_id=p_nbhd._id)
         # first uploaded is first by default
         screenshots = project.get_screenshots()
         assert_equals(screenshots[0].filename, 'admin_24.png')
         # reverse order
         params = dict((str(ss._id), len(screenshots) - 1 - i)
-                for i, ss in enumerate(screenshots))
+                      for i, ss in enumerate(screenshots))
         self.app.post('/admin/sort_screenshots', params)
         assert_equals(project.get_screenshots()[0].filename, 'admin_32.png')
 
@@ -430,40 +454,46 @@ class TestProjectAdmin(TestController):
         # create a subproject
         with audits('create subproject sub-del-undel'):
             self.app.post('/admin/update_mounts', params={
-                    'new.install':'install',
-                    'new.ep_name':'',
-                    'new.ordinal':'1',
-                    'new.mount_point':'sub-del-undel',
-                    'new.mount_label':'sub-del-undel'})
+                'new.install': 'install',
+                'new.ep_name': '',
+                'new.ordinal': '1',
+                'new.mount_point': 'sub-del-undel',
+                'new.mount_label': 'sub-del-undel'})
         r = self.app.get('/p/test/admin/overview')
         assert 'This project has been deleted and is not visible to non-admin users' not in r
-        assert r.html.find('input',{'name':'removal','value':''}).has_key('checked')
-        assert not r.html.find('input',{'name':'removal','value':'deleted'}).has_key('checked')
+        assert r.html.find(
+            'input', {'name': 'removal', 'value': ''}).has_key('checked')
+        assert not r.html.find(
+            'input', {'name': 'removal', 'value': 'deleted'}).has_key('checked')
         with audits('delete project'):
             self.app.post('/admin/update', params=dict(
-                    name='Test Project',
-                    shortname='test',
-                    removal='deleted',
-                    short_description='A Test Project',
-                    delete='on'))
+                name='Test Project',
+                shortname='test',
+                removal='deleted',
+                short_description='A Test Project',
+                delete='on'))
         r = self.app.get('/p/test/admin/overview')
         assert 'This project has been deleted and is not visible to non-admin users' in r
-        assert not r.html.find('input',{'name':'removal','value':''}).has_key('checked')
-        assert r.html.find('input',{'name':'removal','value':'deleted'}).has_key('checked')
+        assert not r.html.find(
+            'input', {'name': 'removal', 'value': ''}).has_key('checked')
+        assert r.html.find(
+            'input', {'name': 'removal', 'value': 'deleted'}).has_key('checked')
         # make sure subprojects get deleted too
         r = self.app.get('/p/test/sub-del-undel/admin/overview')
         assert 'This project has been deleted and is not visible to non-admin users' in r
         with audits('undelete project'):
             self.app.post('/admin/update', params=dict(
-                    name='Test Project',
-                    shortname='test',
-                    removal='',
-                    short_description='A Test Project',
-                    undelete='on'))
+                name='Test Project',
+                shortname='test',
+                removal='',
+                short_description='A Test Project',
+                undelete='on'))
         r = self.app.get('/p/test/admin/overview')
         assert 'This project has been deleted and is not visible to non-admin users' not in r
-        assert r.html.find('input',{'name':'removal','value':''}).has_key('checked')
-        assert not r.html.find('input',{'name':'removal','value':'deleted'}).has_key('checked')
+        assert r.html.find(
+            'input', {'name': 'removal', 'value': ''}).has_key('checked')
+        assert not r.html.find(
+            'input', {'name': 'removal', 'value': 'deleted'}).has_key('checked')
 
     def test_project_delete_not_allowed(self):
         # turn off project delete option
@@ -474,39 +504,42 @@ class TestProjectAdmin(TestController):
             # create a subproject
             with audits('create subproject sub-no-del'):
                 self.app.post('/admin/update_mounts', params={
-                        'new.install':'install',
-                        'new.ep_name':'',
-                        'new.ordinal':'1',
-                        'new.mount_point':'sub-no-del',
-                        'new.mount_label':'sub-no-del'})
+                    'new.install': 'install',
+                    'new.ep_name': '',
+                    'new.ordinal': '1',
+                    'new.mount_point': 'sub-no-del',
+                    'new.mount_label': 'sub-no-del'})
             # root project doesn't have delete option
             r = self.app.get('/p/test/admin/overview')
-            assert not r.html.find('input',{'name':'removal','value':'deleted'})
+            assert not r.html.find(
+                'input', {'name': 'removal', 'value': 'deleted'})
             # subprojects can still be deleted
             r = self.app.get('/p/test/sub-no-del/admin/overview')
-            assert r.html.find('input',{'name':'removal','value':'deleted'})
+            assert r.html.find(
+                'input', {'name': 'removal', 'value': 'deleted'})
             # attempt to delete root project won't do anything
             self.app.post('/admin/update', params=dict(
-                    name='Test Project',
-                    shortname='test',
-                    removal='deleted',
-                    short_description='A Test Project',
-                    delete='on'))
+                name='Test Project',
+                shortname='test',
+                removal='deleted',
+                short_description='A Test Project',
+                delete='on'))
             r = self.app.get('/p/test/admin/overview')
             assert 'This project has been deleted and is not visible to non-admin users' not in r
             # make sure subproject delete works
             with audits(
-                'change project removal status to deleted',
-                'delete project'):
+                    'change project removal status to deleted',
+                    'delete project'):
                 self.app.post('/p/test/sub-no-del/admin/update', params=dict(
-                        name='sub1',
-                        shortname='sub1',
-                        removal='deleted',
-                        short_description='A Test Project',
-                        delete='on'))
+                    name='sub1',
+                    shortname='sub1',
+                    removal='deleted',
+                    short_description='A Test Project',
+                    delete='on'))
             r = self.app.get('/p/test/sub-no-del/admin/overview')
             assert 'This project has been deleted and is not visible to non-admin users' in r
-            assert r.html.find('input',{'name':'removal','value':'deleted'}).has_key('checked')
+            assert r.html.find(
+                'input', {'name': 'removal', 'value': 'deleted'}).has_key('checked')
         finally:
             if old_allow_project_delete == ():
                 del config['allow_project_delete']
@@ -563,14 +596,14 @@ class TestProjectAdmin(TestController):
 
         with audits('updated "admin" permission: "Admin" => "Admin, Developer" for wiki'):
             self.app.post('/admin/wiki/update', params={
-                        'card-0.new': opt_developer['value'],
-                        'card-0.value': opt_admin['value'],
-                        'card-0.id': 'admin'})
+                'card-0.new': opt_developer['value'],
+                'card-0.value': opt_admin['value'],
+                'card-0.id': 'admin'})
 
         with audits('updated "admin" permission: "Admin, Developer" => "Admin" for wiki'):
             self.app.post('/admin/wiki/update', params={
-                        'card-0.value': opt_admin['value'],
-                        'card-0.id': 'admin'})
+                'card-0.value': opt_admin['value'],
+                'card-0.id': 'admin'})
 
     def test_project_permissions(self):
         r = self.app.get('/admin/permissions/')
@@ -582,11 +615,12 @@ class TestProjectAdmin(TestController):
         assert opt_developer.name == 'option'
         with audits('updated "admin" permissions: "Admin" => "Admin,Developer"'):
             r = self.app.post('/admin/permissions/update', params={
-                    'card-0.new': opt_developer['value'],
-                    'card-0.value': opt_admin['value'],
-                    'card-0.id': 'admin'})
+                'card-0.new': opt_developer['value'],
+                'card-0.value': opt_admin['value'],
+                'card-0.id': 'admin'})
         r = self.app.get('/admin/permissions/')
-        assigned_ids = [t['value'] for t in r.html.findAll('input', {'name': 'card-0.value'})]
+        assigned_ids = [t['value']
+                        for t in r.html.findAll('input', {'name': 'card-0.value'})]
         assert len(assigned_ids) == 2
         assert opt_developer['value'] in assigned_ids
         assert opt_admin['value'] in assigned_ids
@@ -594,11 +628,11 @@ class TestProjectAdmin(TestController):
     def test_subproject_permissions(self):
         with audits('create subproject test-subproject'):
             self.app.post('/admin/update_mounts', params={
-                    'new.install':'install',
-                    'new.ep_name':'',
-                    'new.ordinal':'1',
-                    'new.mount_point':'test-subproject',
-                    'new.mount_label':'Test Subproject'})
+                'new.install': 'install',
+                'new.ep_name': '',
+                'new.ordinal': '1',
+                'new.mount_point': 'test-subproject',
+                'new.mount_label': 'Test Subproject'})
         r = self.app.get('/test-subproject/admin/permissions/')
         assert len(r.html.findAll('input', {'name': 'card-0.value'})) == 0
         select = r.html.find('select', {'name': 'card-0.new'})
@@ -608,34 +642,40 @@ class TestProjectAdmin(TestController):
         assert opt_developer.name == 'option'
         with audits('updated "admin" permissions: "" => "Admin,Developer"'):
             r = self.app.post('/test-subproject/admin/permissions/update', params={
-                    'card-0.new': opt_developer['value'],
-                    'card-0.value': opt_admin['value'],
-                    'card-0.id': 'admin'})
+                'card-0.new': opt_developer['value'],
+                'card-0.value': opt_admin['value'],
+                'card-0.id': 'admin'})
         r = self.app.get('/test-subproject/admin/permissions/')
-        assigned_ids = [t['value'] for t in r.html.findAll('input', {'name': 'card-0.value'})]
+        assigned_ids = [t['value']
+                        for t in r.html.findAll('input', {'name': 'card-0.value'})]
         assert len(assigned_ids) == 2
         assert opt_developer['value'] in assigned_ids
         assert opt_admin['value'] in assigned_ids
 
     def test_project_groups(self):
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
         developer_id = dev_holder['data-group']
         with audits('add user test-user to Developer'):
             r = self.app.post('/admin/groups/add_user', params={
-                    'role_id': developer_id,
-                    'username': 'test-user'})
+                'role_id': developer_id,
+                'username': 'test-user'})
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-        users = dev_holder.find('ul',{'class':'users'}).findAll('li',{'class':'deleter'})
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+        users = dev_holder.find('ul', {'class': 'users'}).findAll(
+            'li', {'class': 'deleter'})
         assert 'test-user' in users[0]['data-user']
         # Make sure we can open role page for builtin role
-        r = self.app.get('/admin/groups/' + developer_id + '/', validate_chunk=True)
+        r = self.app.get('/admin/groups/' + developer_id +
+                         '/', validate_chunk=True)
 
     def test_new_admin_subscriptions(self):
         """Newly added admin must be subscribed to all the tools in the project"""
         r = self.app.get('/admin/groups/')
-        admin_holder = r.html.find('table', {'id': 'usergroup_admin'}).findAll('tr')[1]
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
         admin_id = admin_holder['data-group']
         with audits('add user test-user to Admin'):
             self.app.post('/admin/groups/add_user', params={
@@ -645,13 +685,15 @@ class TestProjectAdmin(TestController):
         p = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
         uid = M.User.by_username('test-user')._id
         for ac in p.app_configs:
-            sub = M.Mailbox.subscribed(user_id=uid, project_id=p._id, app_config_id=ac._id)
+            sub = M.Mailbox.subscribed(
+                user_id=uid, project_id=p._id, app_config_id=ac._id)
             assert sub, 'New admin not subscribed to app %s' % ac
 
     def test_new_user_subscriptions(self):
         """Newly added user must not be subscribed to all the tools in the project if he is not admin"""
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
         developer_id = dev_holder['data-group']
         with audits('add user test-user to Developer'):
             self.app.post('/admin/groups/add_user', params={
@@ -661,33 +703,37 @@ class TestProjectAdmin(TestController):
         p = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
         uid = M.User.by_username('test-user')._id
         for ac in p.app_configs:
-            sub = M.Mailbox.subscribed(user_id=uid, project_id=p._id, app_config_id=ac._id)
+            sub = M.Mailbox.subscribed(
+                user_id=uid, project_id=p._id, app_config_id=ac._id)
             assert not sub, 'New user subscribed to app %s' % ac
 
     def test_subroles(self):
         """Make sure subroles are preserved during group updates."""
         def check_roles(r):
-            dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-            mem_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[3]
+            dev_holder = r.html.find(
+                'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+            mem_holder = r.html.find(
+                'table', {'id': 'usergroup_admin'}).findAll('tr')[3]
             assert 'All users in Admin group' in str(dev_holder)
             assert 'All users in Developer group' in str(mem_holder)
 
         r = self.app.get('/admin/groups/')
 
-        admin_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[1]
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
         admin_id = admin_holder['data-group']
         # test that subroles are intact after user added
         with audits('add user test-user to Admin'):
             r = self.app.post('/admin/groups/add_user', params={
-                    'role_id': admin_id,
-                    'username': 'test-user'})
+                'role_id': admin_id,
+                'username': 'test-user'})
         r = self.app.get('/admin/groups/')
         check_roles(r)
         # test that subroles are intact after user deleted
         with audits('remove user test-user from Admin'):
             r = self.app.post('/admin/groups/remove_user', params={
-                    'role_id': admin_id,
-                    'username': 'test-user'})
+                'role_id': admin_id,
+                'username': 'test-user'})
         r = self.app.get('/admin/groups/')
         check_roles(r)
 
@@ -695,56 +741,66 @@ class TestProjectAdmin(TestController):
         """Must always have at least one user with the Admin role (and anon
         doesn't count)."""
         r = self.app.get('/admin/groups/')
-        admin_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[1]
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
         admin_id = admin_holder['data-group']
-        users = admin_holder.find('ul',{'class':'users'}).findAll('li',{'class':'deleter'})
+        users = admin_holder.find('ul', {'class': 'users'}).findAll(
+            'li', {'class': 'deleter'})
         assert len(users) == 1
         r = self.app.post('/admin/groups/remove_user', params={
-                'role_id': admin_id,
-                'username': 'admin1'})
-        assert r.json['error'] == 'You must have at least one user with the Admin role.'
+            'role_id': admin_id,
+            'username': 'admin1'})
+        assert r.json[
+            'error'] == 'You must have at least one user with the Admin role.'
         r = self.app.get('/admin/groups/')
-        admin_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[1]
-        users = admin_holder.find('ul',{'class':'users'}).findAll('li',{'class':'deleter'})
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
+        users = admin_holder.find('ul', {'class': 'users'}).findAll(
+            'li', {'class': 'deleter'})
         assert len(users) == 1
 
     def test_cannot_add_anon_to_group(self):
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
         developer_id = dev_holder['data-group']
         r = self.app.post('/admin/groups/add_user', params={
-                'role_id': developer_id,
-                'username': ''})
+            'role_id': developer_id,
+            'username': ''})
         assert r.json['error'] == 'You must choose a user to add.'
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-        users = dev_holder.find('ul',{'class':'users'}).findAll('li',{'class':'deleter'})
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+        users = dev_holder.find('ul', {'class': 'users'}).findAll(
+            'li', {'class': 'deleter'})
         # no user was added
         assert len(users) == 0
         assert M.ProjectRole.query.find(dict(
-                name='*anonymous', user_id=None,
-                roles={'$ne': []})).count() == 0
+            name='*anonymous', user_id=None,
+            roles={'$ne': []})).count() == 0
 
     def test_project_multi_groups(self):
         r = self.app.get('/admin/groups/')
         user_id = M.User.by_username('test-admin')._id
-        admin_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[1]
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
         admin_id = admin_holder['data-group']
         with audits('add user test-user to Admin'):
             r = self.app.post('/admin/groups/add_user', params={
-                    'role_id': admin_id,
-                    'username': 'test-user'})
-            assert 'error' not in r.json
-        r = self.app.post('/admin/groups/add_user', params={
                 'role_id': admin_id,
                 'username': 'test-user'})
-        assert r.json['error'] == 'Test User (test-user) is already in the group Admin.'
+            assert 'error' not in r.json
+        r = self.app.post('/admin/groups/add_user', params={
+            'role_id': admin_id,
+            'username': 'test-user'})
+        assert r.json[
+            'error'] == 'Test User (test-user) is already in the group Admin.'
         r = self.app.get('/admin/groups/')
         assert 'test-user' in str(r), r.showbrowser()
         with audits('remove user test-user from Admin'):
             r = self.app.post('/admin/groups/remove_user', params={
-                    'role_id': admin_id,
-                    'username': 'test-user'})
+                'role_id': admin_id,
+                'username': 'test-user'})
         r = self.app.get('/admin/groups/')
         assert 'test-user' not in str(r), r.showbrowser()
 
@@ -752,12 +808,15 @@ class TestProjectAdmin(TestController):
     def test_new_group(self):
         r = self.app.get('/admin/groups/new', validate_chunk=True)
         with audits('create group Developer'):
-            r = self.app.post('/admin/groups/create', params={'name': 'Developer'})
+            r = self.app.post('/admin/groups/create',
+                              params={'name': 'Developer'})
         assert 'error' in self.webflash(r)
         with audits('create group RoleNew1'):
-            r = self.app.post('/admin/groups/create', params={'name': 'RoleNew1'})
+            r = self.app.post('/admin/groups/create',
+                              params={'name': 'RoleNew1'})
         r = self.app.get('/admin/groups/')
-        role_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[4]
+        role_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[4]
         assert 'RoleNew1' in str(role_holder)
         role_id = role_holder['data-group']
         r = self.app.get('/admin/groups/' + role_id + '/', validate_chunk=True)
@@ -767,120 +826,147 @@ class TestProjectAdmin(TestController):
         assert 'already exists' in self.webflash(r)
 
         with audits('update group name RoleNew1=>rleNew2'):
-            r = self.app.post('/admin/groups/' + str(role_id) + '/update', params={'_id': role_id, 'name': 'rleNew2'}).follow()
+            r = self.app.post('/admin/groups/' + str(role_id) + '/update',
+                              params={'_id': role_id, 'name': 'rleNew2'}).follow()
         assert 'RoleNew1' not in r
         assert 'rleNew2' in r
 
         # add test-user to role
-        role_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[4]
+        role_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[4]
         rleNew2_id = role_holder['data-group']
         with audits('add user test-user to rleNew2'):
             r = self.app.post('/admin/groups/add_user', params={
-                    'role_id': rleNew2_id,
-                    'username': 'test-user'})
+                'role_id': rleNew2_id,
+                'username': 'test-user'})
 
         with audits('delete group rleNew2'):
             r = self.app.post('/admin/groups/delete_group', params={
-                    'group_name': 'rleNew2'})
+                'group_name': 'rleNew2'})
         assert 'deleted' in self.webflash(r)
         r = self.app.get('/admin/groups/', status=200)
-        roles = [str(t) for t in r.html.findAll('td',{'class':'group'})]
+        roles = [str(t) for t in r.html.findAll('td', {'class': 'group'})]
         assert 'RoleNew1' not in roles
         assert 'rleNew2' not in roles
 
-        # make sure can still access homepage after one of user's roles were deleted
-        r = self.app.get('/p/test/wiki/', extra_environ=dict(username='test-user')).follow()
+        # make sure can still access homepage after one of user's roles were
+        # deleted
+        r = self.app.get('/p/test/wiki/',
+                         extra_environ=dict(username='test-user')).follow()
         assert r.status == '200 OK'
 
     def test_change_perms(self):
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-        mem_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[3]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+        mem_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[3]
         mem_id = mem_holder['data-group']
         # neither group has update permission
         assert dev_holder.findAll('ul')[1].findAll('li')[2]['class'] == "no"
         assert mem_holder.findAll('ul')[1].findAll('li')[2]['class'] == "no"
         # add update permission to Member
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': mem_id,
-                'permission': 'create',
-                'allow': 'true'})
+            'role_id': mem_id,
+            'permission': 'create',
+            'allow': 'true'})
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-        mem_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[3]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+        mem_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[3]
         # Member now has update permission
         assert mem_holder.findAll('ul')[1].findAll('li')[2]['class'] == "yes"
         # Developer has inherited update permission from Member
-        assert dev_holder.findAll('ul')[1].findAll('li')[2]['class'] == "inherit"
+        assert dev_holder.findAll('ul')[1].findAll(
+            'li')[2]['class'] == "inherit"
         # remove update permission from Member
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': mem_id,
-                'permission': 'create',
-                'allow': 'false'})
+            'role_id': mem_id,
+            'permission': 'create',
+            'allow': 'false'})
         r = self.app.get('/admin/groups/')
-        dev_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[2]
-        mem_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[3]
+        dev_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[2]
+        mem_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[3]
         # neither group has update permission
         assert dev_holder.findAll('ul')[1].findAll('li')[2]['class'] == "no"
         assert mem_holder.findAll('ul')[1].findAll('li')[2]['class'] == "no"
 
     def test_permission_inherit(self):
         r = self.app.get('/admin/groups/')
-        admin_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[1]
+        admin_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[1]
         admin_id = admin_holder['data-group']
-        mem_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[3]
+        mem_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[3]
         mem_id = mem_holder['data-group']
-        anon_holder = r.html.find('table',{'id':'usergroup_admin'}).findAll('tr')[5]
+        anon_holder = r.html.find(
+            'table', {'id': 'usergroup_admin'}).findAll('tr')[5]
         anon_id = anon_holder['data-group']
-        #first remove create from Admin so we can see it inherit
+        # first remove create from Admin so we can see it inherit
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': admin_id,
-                'permission': 'create',
-                'allow': 'false'})
+            'role_id': admin_id,
+            'permission': 'create',
+            'allow': 'false'})
         # updates to anon inherit up
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': anon_id,
-                'permission': 'create',
-                'allow': 'true'})
-        assert {u'text': u'Inherited permission create from Anonymous', u'has': u'inherit', u'name': u'create'} in r.json[admin_id]
-        assert {u'text': u'Inherited permission create from Anonymous', u'has': u'inherit', u'name': u'create'} in r.json[mem_id]
-        assert {u'text': u'Has permission create', u'has': u'yes', u'name': u'create'} in r.json[anon_id]
+            'role_id': anon_id,
+            'permission': 'create',
+            'allow': 'true'})
+        assert {u'text': u'Inherited permission create from Anonymous',
+                u'has': u'inherit', u'name': u'create'} in r.json[admin_id]
+        assert {u'text': u'Inherited permission create from Anonymous',
+                u'has': u'inherit', u'name': u'create'} in r.json[mem_id]
+        assert {u'text': u'Has permission create', u'has':
+                u'yes', u'name': u'create'} in r.json[anon_id]
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': anon_id,
-                'permission': 'create',
-                'allow': 'false'})
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[admin_id]
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[mem_id]
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[anon_id]
+            'role_id': anon_id,
+            'permission': 'create',
+            'allow': 'false'})
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[admin_id]
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[mem_id]
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[anon_id]
         # updates to Member inherit up
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': mem_id,
-                'permission': 'create',
-                'allow': 'true'})
-        assert {u'text': u'Inherited permission create from Member', u'has': u'inherit', u'name': u'create'} in r.json[admin_id]
-        assert {u'text': u'Has permission create', u'has': u'yes', u'name': u'create'} in r.json[mem_id]
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[anon_id]
+            'role_id': mem_id,
+            'permission': 'create',
+            'allow': 'true'})
+        assert {u'text': u'Inherited permission create from Member',
+                u'has': u'inherit', u'name': u'create'} in r.json[admin_id]
+        assert {u'text': u'Has permission create', u'has':
+                u'yes', u'name': u'create'} in r.json[mem_id]
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[anon_id]
         r = self.app.post('/admin/groups/change_perm', params={
-                'role_id': mem_id,
-                'permission': 'create',
-                'allow': 'false'})
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[admin_id]
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[mem_id]
-        assert {u'text': u'Does not have permission create', u'has': u'no', u'name': u'create'} in r.json[anon_id]
-
+            'role_id': mem_id,
+            'permission': 'create',
+            'allow': 'false'})
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[admin_id]
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[mem_id]
+        assert {u'text': u'Does not have permission create',
+                u'has': u'no', u'name': u'create'} in r.json[anon_id]
 
     def test_admin_extension_sidebar(self):
 
         class FooSettingsController(object):
+
             @expose()
             def index(self, *a, **kw):
                 return 'here the foo settings go'
 
-
         class FooSettingsExtension(AdminExtension):
+
             def update_project_sidebar_menu(self, sidebar_links):
-                base_url = c.project.url()+'admin/ext/'
-                sidebar_links.append(SitemapEntry('Foo Settings', base_url+'foo'))
+                base_url = c.project.url() + 'admin/ext/'
+                sidebar_links.append(
+                    SitemapEntry('Foo Settings', base_url + 'foo'))
 
             @property
             def project_admin_controllers(self):
@@ -916,13 +1002,15 @@ class TestExport(TestController):
     def test_exportable_tools_for(self):
         project = M.Project.query.get(shortname='test')
         exportable_tools = AdminApp.exportable_tools_for(project)
-        exportable_mount_points = [t.options.mount_point for t in exportable_tools]
+        exportable_mount_points = [
+            t.options.mount_point for t in exportable_tools]
         assert_equals(exportable_mount_points, [u'admin', u'wiki', u'wiki2'])
 
     def test_access(self):
         r = self.app.get('/admin/export',
                          extra_environ={'username': '*anonymous'}).follow()
-        assert_equals(r.request.url, 'http://localhost/auth/?return_to=%2Fadmin%2Fexport')
+        assert_equals(r.request.url,
+                      'http://localhost/auth/?return_to=%2Fadmin%2Fexport')
         self.app.get('/admin/export',
                      extra_environ={'username': 'test-user'},
                      status=403)
@@ -945,15 +1033,19 @@ class TestExport(TestController):
     def test_export_page_contains_exportable_tools(self):
         r = self.app.get('/admin/export')
         assert_in('Wiki</label> <a href="/p/test/wiki/">/p/test/wiki/</a>', r)
-        assert_in('Wiki2</label> <a href="/p/test/wiki2/">/p/test/wiki2/</a>', r)
-        assert_not_in('Search</label> <a href="/p/test/search/">/p/test/search/</a>', r)
+        assert_in(
+            'Wiki2</label> <a href="/p/test/wiki2/">/p/test/wiki2/</a>', r)
+        assert_not_in(
+            'Search</label> <a href="/p/test/search/">/p/test/search/</a>', r)
 
     def test_export_page_contains_hidden_tools(self):
         with mock.patch('allura.ext.search.search_main.SearchApp.exportable'):
             project = M.Project.query.get(shortname='test')
             exportable_tools = AdminApp.exportable_tools_for(project)
-            exportable_mount_points = [t.options.mount_point for t in exportable_tools]
-            assert_equals(exportable_mount_points, [u'admin', u'search', u'wiki', u'wiki2'])
+            exportable_mount_points = [
+                t.options.mount_point for t in exportable_tools]
+            assert_equals(exportable_mount_points,
+                          [u'admin', u'search', u'wiki', u'wiki2'])
 
     def test_tools_not_selected(self):
         r = self.app.post('/admin/export')
@@ -967,13 +1059,15 @@ class TestExport(TestController):
     def test_selected_one_tool(self, export_tasks):
         r = self.app.post('/admin/export', {'tools': u'wiki'})
         assert_in('ok', self.webflash(r))
-        export_tasks.bulk_export.post.assert_called_once_with([u'wiki'], 'test.zip', send_email=True)
+        export_tasks.bulk_export.post.assert_called_once_with(
+            [u'wiki'], 'test.zip', send_email=True)
 
     @mock.patch('allura.ext.admin.admin_main.export_tasks')
     def test_selected_multiple_tools(self, export_tasks):
         r = self.app.post('/admin/export', {'tools': [u'wiki', u'wiki2']})
         assert_in('ok', self.webflash(r))
-        export_tasks.bulk_export.post.assert_called_once_with([u'wiki', u'wiki2'], 'test.zip', send_email=True)
+        export_tasks.bulk_export.post.assert_called_once_with(
+            [u'wiki', u'wiki2'], 'test.zip', send_email=True)
 
     def test_export_in_progress(self):
         from allura.tasks import export_tasks
@@ -984,7 +1078,8 @@ class TestExport(TestController):
     @td.with_user_project('test-user')
     def test_bulk_export_path_for_user_project(self):
         project = M.Project.query.get(shortname='u/test-user')
-        assert_equals(project.bulk_export_path(), '/tmp/bulk_export/u/test-user')
+        assert_equals(project.bulk_export_path(),
+                      '/tmp/bulk_export/u/test-user')
 
     @td.with_user_project('test-user')
     def test_bulk_export_filename_for_user_project(self):
@@ -1006,6 +1101,7 @@ class TestExport(TestController):
 
 
 class TestRestExport(TestRestApiBase):
+
     @mock.patch('allura.model.project.MonQTask')
     def test_export_status(self, MonQTask):
         MonQTask.query.get.return_value = None
@@ -1022,7 +1118,8 @@ class TestRestExport(TestRestApiBase):
     def test_export_no_exportable_tools(self, bulk_export, exportable_tools, MonQTask):
         MonQTask.query.get.return_value = None
         exportable_tools.return_value = []
-        r = self.api_post('/rest/p/test/admin/export', tools='tickets, discussion', status=400)
+        r = self.api_post('/rest/p/test/admin/export',
+                          tools='tickets, discussion', status=400)
         assert_equals(bulk_export.post.call_count, 0)
 
     @mock.patch('allura.model.project.MonQTask')
@@ -1031,9 +1128,9 @@ class TestRestExport(TestRestApiBase):
     def test_export_no_tools_specified(self, bulk_export, exportable_tools, MonQTask):
         MonQTask.query.get.return_value = None
         exportable_tools.return_value = [
-                mock.Mock(options=mock.Mock(mount_point='tickets')),
-                mock.Mock(options=mock.Mock(mount_point='discussion')),
-            ]
+            mock.Mock(options=mock.Mock(mount_point='tickets')),
+            mock.Mock(options=mock.Mock(mount_point='discussion')),
+        ]
         r = self.api_post('/rest/p/test/admin/export', status=400)
         assert_equals(bulk_export.post.call_count, 0)
 
@@ -1043,10 +1140,11 @@ class TestRestExport(TestRestApiBase):
     def test_export_busy(self, bulk_export, exportable_tools, MonQTask):
         MonQTask.query.get.return_value = 'something'
         exportable_tools.return_value = [
-                mock.Mock(options=mock.Mock(mount_point='tickets')),
-                mock.Mock(options=mock.Mock(mount_point='discussion')),
-            ]
-        r = self.api_post('/rest/p/test/admin/export', tools='tickets, discussion', status=503)
+            mock.Mock(options=mock.Mock(mount_point='tickets')),
+            mock.Mock(options=mock.Mock(mount_point='discussion')),
+        ]
+        r = self.api_post('/rest/p/test/admin/export',
+                          tools='tickets, discussion', status=503)
         assert_equals(bulk_export.post.call_count, 0)
 
     @mock.patch('allura.model.project.MonQTask')
@@ -1055,18 +1153,21 @@ class TestRestExport(TestRestApiBase):
     def test_export_ok(self, bulk_export, exportable_tools, MonQTask):
         MonQTask.query.get.return_value = None
         exportable_tools.return_value = [
-                mock.Mock(options=mock.Mock(mount_point='tickets')),
-                mock.Mock(options=mock.Mock(mount_point='discussion')),
-            ]
-        r = self.api_post('/rest/p/test/admin/export', tools='tickets, discussion', status=200)
+            mock.Mock(options=mock.Mock(mount_point='tickets')),
+            mock.Mock(options=mock.Mock(mount_point='discussion')),
+        ]
+        r = self.api_post('/rest/p/test/admin/export',
+                          tools='tickets, discussion', status=200)
         assert_equals(r.json, {
-                'filename': 'test.zip',
-                'status': 'in progress',
-            })
-        bulk_export.post.assert_called_once_with(['tickets', 'discussion'], 'test.zip', send_email=False)
+            'filename': 'test.zip',
+            'status': 'in progress',
+        })
+        bulk_export.post.assert_called_once_with(
+            ['tickets', 'discussion'], 'test.zip', send_email=False)
 
 
 class TestRestInstallTool(TestRestApiBase):
+
     def test_missing_mount_info(self):
         r = self.api_get('/rest/p/test/')
         tools_names = [t['name'] for t in r.json['tools']]
@@ -1091,7 +1192,8 @@ class TestRestInstallTool(TestRestApiBase):
         }
         r = self.api_post('/rest/p/test/admin/install_tool/', **data)
         assert_equals(r.json['success'], False)
-        assert_equals(r.json['info'], 'Incorrect tool name, or limit is reached.')
+        assert_equals(r.json['info'],
+                      'Incorrect tool name, or limit is reached.')
 
     def test_bad_mount(self):
         r = self.api_get('/rest/p/test/')
@@ -1105,7 +1207,8 @@ class TestRestInstallTool(TestRestApiBase):
         }
         r = self.api_post('/rest/p/test/admin/install_tool/', **data)
         assert_equals(r.json['success'], False)
-        assert_equals(r.json['info'], 'Mount point "tickets_mount1" is invalid')
+        assert_equals(r.json['info'],
+                      'Mount point "tickets_mount1" is invalid')
 
     def test_install_tool_ok(self):
         r = self.api_get('/rest/p/test/')
@@ -1120,12 +1223,14 @@ class TestRestInstallTool(TestRestApiBase):
         r = self.api_post('/rest/p/test/admin/install_tool/', **data)
         assert_equals(r.json['success'], True)
         assert_equals(r.json['info'],
-                     'Tool %s with mount_point %s and mount_label %s was created.'
-                     % ('tickets', 'ticketsmount1', 'tickets_label1'))
+                      'Tool %s with mount_point %s and mount_label %s was created.'
+                      % ('tickets', 'ticketsmount1', 'tickets_label1'))
 
         project = M.Project.query.get(shortname='test')
-        assert_equals(project.ordered_mounts()[-1]['ac'].options.mount_point, 'ticketsmount1')
-        audit_log = M.AuditLog.query.find({'project_id': project._id}).sort({'_id': -1}).first()
+        assert_equals(project.ordered_mounts()
+                      [-1]['ac'].options.mount_point, 'ticketsmount1')
+        audit_log = M.AuditLog.query.find(
+            {'project_id': project._id}).sort({'_id': -1}).first()
         assert_equals(audit_log.message, 'install tool ticketsmount1')
 
     def test_tool_exists(self):
@@ -1164,7 +1269,8 @@ class TestRestInstallTool(TestRestApiBase):
             data['mount_label'] = 'wiki_label1'
             r = self.api_post('/rest/p/test/admin/install_tool/', **data)
             assert_equals(r.json['success'], False)
-            assert_equals(r.json['info'], 'Incorrect tool name, or limit is reached.')
+            assert_equals(r.json['info'],
+                          'Incorrect tool name, or limit is reached.')
 
     def test_unauthorized(self):
         r = self.api_get('/rest/p/test/')
@@ -1177,9 +1283,9 @@ class TestRestInstallTool(TestRestApiBase):
             'mount_label': 'wiki_label1'
         }
         r = self.app.post('/rest/p/test/admin/install_tool/',
-                             extra_environ={'username': '*anonymous'},
-                             status=401,
-                             params=data)
+                          extra_environ={'username': '*anonymous'},
+                          status=401,
+                          params=data)
         assert_equals(r.status, '401 Unauthorized')
 
     def test_order(self):
@@ -1192,38 +1298,40 @@ class TestRestInstallTool(TestRestApiBase):
                 elif 'sub' in mount:
                     labels.append(mount['sub'].name)
             return labels
-        assert_equals(get_labels(), ['Admin', 'Search', 'Activity', 'A Subproject'])
+        assert_equals(get_labels(),
+                      ['Admin', 'Search', 'Activity', 'A Subproject'])
 
         data = [
-                {
-                    'tool': 'tickets',
-                    'mount_point': 'ticketsmount1',
-                    'mount_label': 'ta',
-                },
-                {
-                    'tool': 'tickets',
-                    'mount_point': 'ticketsmount2',
-                    'mount_label': 'tc',
-                    'order': 'last'
-                },
-                {
-                    'tool': 'tickets',
-                    'mount_point': 'ticketsmount3',
-                    'mount_label': 'tb',
-                    'order': 'alpha_tool'
-                },
-                {
-                    'tool': 'tickets',
-                    'mount_point': 'ticketsmount4',
-                    'mount_label': 't1',
-                    'order': 'first'
-                },
-            ]
+            {
+                'tool': 'tickets',
+                'mount_point': 'ticketsmount1',
+                'mount_label': 'ta',
+            },
+            {
+                'tool': 'tickets',
+                'mount_point': 'ticketsmount2',
+                'mount_label': 'tc',
+                'order': 'last'
+            },
+            {
+                'tool': 'tickets',
+                'mount_point': 'ticketsmount3',
+                'mount_label': 'tb',
+                'order': 'alpha_tool'
+            },
+            {
+                'tool': 'tickets',
+                'mount_point': 'ticketsmount4',
+                'mount_label': 't1',
+                'order': 'first'
+            },
+        ]
         for datum in data:
             r = self.api_post('/rest/p/test/admin/install_tool/', **datum)
             assert_equals(r.json['success'], True)
             assert_equals(r.json['info'],
-                         'Tool %s with mount_point %s and mount_label %s was created.'
-                         % (datum['tool'], datum['mount_point'], datum['mount_label']))
+                          'Tool %s with mount_point %s and mount_label %s was created.'
+                          % (datum['tool'], datum['mount_point'], datum['mount_label']))
 
-        assert_equals(get_labels(), ['t1', 'Admin', 'Search', 'Activity', 'A Subproject', 'ta', 'tb', 'tc'])
+        assert_equals(
+            get_labels(), ['t1', 'Admin', 'Search', 'Activity', 'A Subproject', 'ta', 'tb', 'tc'])


[10/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/google/tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/google/tracker.py b/ForgeImporters/forgeimporters/google/tracker.py
index 48bfbfe..ab131a8 100644
--- a/ForgeImporters/forgeimporters/google/tracker.py
+++ b/ForgeImporters/forgeimporters/google/tracker.py
@@ -26,15 +26,15 @@ from ming.orm import session, ThreadLocalORMSession
 import dateutil.parser
 
 from tg import (
-        expose,
-        flash,
-        redirect,
-        validate,
-        )
+    expose,
+    flash,
+    redirect,
+    validate,
+)
 from tg.decorators import (
-        with_trailing_slash,
-        without_trailing_slash,
-        )
+    with_trailing_slash,
+    without_trailing_slash,
+)
 
 from allura.controllers import BaseController
 from allura.lib import helpers as h
@@ -47,9 +47,9 @@ from forgetracker import model as TM
 from forgeimporters.google import GoogleCodeProjectExtractor
 from forgeimporters.google import GoogleCodeProjectNameValidator
 from forgeimporters.base import (
-        ToolImporter,
-        ToolImportForm,
-        )
+    ToolImporter,
+    ToolImportForm,
+)
 
 
 class GoogleCodeTrackerImportForm(ToolImportForm):
@@ -57,6 +57,7 @@ class GoogleCodeTrackerImportForm(ToolImportForm):
 
 
 class GoogleCodeTrackerImportController(BaseController):
+
     def __init__(self):
         self.importer = GoogleCodeTrackerImporter()
 
@@ -68,7 +69,7 @@ class GoogleCodeTrackerImportController(BaseController):
     @expose('jinja:forgeimporters.google:templates/tracker/index.html')
     def index(self, **kw):
         return dict(importer=self.importer,
-                target_app=self.target_app)
+                    target_app=self.target_app)
 
     @without_trailing_slash
     @expose()
@@ -77,14 +78,15 @@ class GoogleCodeTrackerImportController(BaseController):
     def create(self, gc_project_name, mount_point, mount_label, **kw):
         if self.importer.enforce_limit(c.project):
             self.importer.post(
-                    project_name=gc_project_name,
-                    mount_point=mount_point,
-                    mount_label=mount_label,
-                    )
+                project_name=gc_project_name,
+                mount_point=mount_point,
+                mount_label=mount_label,
+            )
             flash('Ticket import has begun. Your new tracker will be available '
-                    'when the import is complete.')
+                  'when the import is complete.')
         else:
-            flash('There are too many imports pending at this time.  Please wait and try again.', 'error')
+            flash(
+                'There are too many imports pending at this time.  Please wait and try again.', 'error')
         redirect(c.project.url() + 'admin/')
 
 
@@ -96,10 +98,10 @@ class GoogleCodeTrackerImporter(ToolImporter):
     tool_description = 'Import your public tickets from Google Code'
 
     field_types = defaultdict(lambda: 'string',
-            milestone='milestone',
-            priority='select',
-            type='select',
-        )
+                              milestone='milestone',
+                              priority='select',
+                              type='select',
+                              )
 
     def __init__(self, *args, **kwargs):
         super(GoogleCodeTrackerImporter, self).__init__(*args, **kwargs)
@@ -108,17 +110,17 @@ class GoogleCodeTrackerImporter(ToolImporter):
         self.max_ticket_num = 0
 
     def import_tool(self, project, user, project_name, mount_point=None,
-            mount_label=None, **kw):
+                    mount_label=None, **kw):
         import_id_converter = ImportIdConverter.get()
         app = project.install_app('tickets', mount_point, mount_label,
-                EnableVoting=True,
-                open_status_names='New Accepted Started',
-                closed_status_names='Fixed Verified Invalid Duplicate WontFix Done',
-                import_id={
-                        'source': self.source,
-                        'project_name': project_name,
-                    },
-            )
+                                  EnableVoting=True,
+                                  open_status_names='New Accepted Started',
+                                  closed_status_names='Fixed Verified Invalid Duplicate WontFix Done',
+                                  import_id={
+                                      'source': self.source,
+                                      'project_name': project_name,
+                                  },
+                                  )
         ThreadLocalORMSession.flush_all()
         try:
             M.session.artifact_orm_session._get().skip_mod_date = True
@@ -139,14 +141,14 @@ class GoogleCodeTrackerImporter(ToolImporter):
                 app.globals.last_ticket_num = self.max_ticket_num
                 ThreadLocalORMSession.flush_all()
             M.AuditLog.log(
-                    'import tool %s from %s on %s' % (
-                            app.config.options.mount_point,
-                            project_name, self.source,
-                        ),
-                    project=project,
-                    user=user,
-                    url=app.url,
-                )
+                'import tool %s from %s on %s' % (
+                    app.config.options.mount_point,
+                    project_name, self.source,
+                ),
+                project=project,
+                user=user,
+                url=app.url,
+            )
             g.post_event('project_updated')
             app.globals.invalidate_bin_counts()
             return app
@@ -159,17 +161,18 @@ class GoogleCodeTrackerImporter(ToolImporter):
     def custom_field(self, name):
         if name not in self.custom_fields:
             self.custom_fields[name] = {
-                    'type': self.field_types[name.lower()],
-                    'label': name,
-                    'name': u'_%s' % name.lower(),
-                    'options': set(),
-                }
+                'type': self.field_types[name.lower()],
+                'label': name,
+                'name': u'_%s' % name.lower(),
+                'options': set(),
+            }
         return self.custom_fields[name]
 
     def process_fields(self, ticket, issue):
         ticket.summary = issue.get_issue_summary()
         ticket.status = issue.get_issue_status()
-        ticket.created_date = dateutil.parser.parse(issue.get_issue_created_date())
+        ticket.created_date = dateutil.parser.parse(
+            issue.get_issue_created_date())
         ticket.mod_date = dateutil.parser.parse(issue.get_issue_mod_date())
         ticket.votes_up = issue.get_issue_stars()
         ticket.votes = issue.get_issue_stars()
@@ -179,14 +182,14 @@ class GoogleCodeTrackerImporter(ToolImporter):
         else:
             owner_line = ''
         ticket.description = (
-                u'*Originally created by:* {creator}\n'
-                u'{owner}'
-                u'\n'
-                u'{body}').format(
-                    creator=issue.get_issue_creator(),
-                    owner=owner_line,
-                    body=issue.get_issue_description(),
-                )
+            u'*Originally created by:* {creator}\n'
+            u'{owner}'
+            u'\n'
+            u'{body}').format(
+            creator=issue.get_issue_creator(),
+            owner=owner_line,
+            body=issue.get_issue_description(),
+        )
         ticket.add_multiple_attachments(issue.get_issue_attachments())
 
     def process_labels(self, ticket, issue):
@@ -203,15 +206,16 @@ class GoogleCodeTrackerImporter(ToolImporter):
             else:
                 labels.add(label)
         ticket.labels = list(labels)
-        ticket.custom_fields = {n: u', '.join(sorted(v)) for n,v in custom_fields.iteritems()}
+        ticket.custom_fields = {n: u', '.join(sorted(v))
+                                for n, v in custom_fields.iteritems()}
 
     def process_comments(self, ticket, issue):
         for comment in issue.iter_comments():
             p = ticket.discussion_thread.add_post(
-                    text = comment.annotated_text,
-                    ignore_security = True,
-                    timestamp = dateutil.parser.parse(comment.created_date),
-                )
+                text=comment.annotated_text,
+                ignore_security=True,
+                timestamp=dateutil.parser.parse(comment.created_date),
+            )
             p.add_multiple_attachments(comment.attachments)
 
     def postprocess_custom_fields(self):
@@ -219,10 +223,10 @@ class GoogleCodeTrackerImporter(ToolImporter):
         for name, field in self.custom_fields.iteritems():
             if field['name'] == '_milestone':
                 field['milestones'] = [{
-                        'name': milestone,
-                        'due_date': None,
-                        'complete': milestone not in self.open_milestones,
-                    } for milestone in sorted(field['options'])]
+                    'name': milestone,
+                    'due_date': None,
+                    'complete': milestone not in self.open_milestones,
+                } for milestone in sorted(field['options'])]
                 field['options'] = ''
             elif field['type'] == 'select':
                 field['options'] = ' '.join(field['options'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/forge/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/forge/__init__.py b/ForgeImporters/forgeimporters/tests/forge/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/tests/forge/__init__.py
+++ b/ForgeImporters/forgeimporters/tests/forge/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/forge/test_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/forge/test_tracker.py b/ForgeImporters/forgeimporters/tests/forge/test_tracker.py
index 85f4650..152e5bc 100644
--- a/ForgeImporters/forgeimporters/tests/forge/test_tracker.py
+++ b/ForgeImporters/forgeimporters/tests/forge/test_tracker.py
@@ -30,6 +30,7 @@ from forgeimporters.forge import tracker
 
 
 class TestTrackerImporter(TestCase):
+
     @mock.patch.object(tracker.h, 'make_app_admin_only')
     @mock.patch.object(tracker, 'g')
     @mock.patch.object(tracker, 'c')
@@ -40,156 +41,158 @@ class TestTrackerImporter(TestCase):
     def test_import_tool(self, TM, M, session, tlos, c, g, mao):
         importer = tracker.ForgeTrackerImporter()
         importer._load_json = mock.Mock(return_value={
-                'tracker_config': {
-                        '_id': 'orig_id',
-                        'options': {
-                                'foo': 'bar',
-                            },
-                    },
-                'open_status_names': 'open statuses',
-                'closed_status_names': 'closed statuses',
-                'custom_fields': 'fields',
-                'saved_bins': 'bins',
-                'tickets': [
-                        {
-                                'reported_by': 'rb1',
-                                'assigned_to': 'at1',
-                                'ticket_num': 1,
-                                'description': 'd1',
-                                'created_date': '2013-09-01',
-                                'mod_date': '2013-09-02',
-                                'summary': 's1',
-                                'custom_fields': 'cf1',
-                                'status': 'st1',
-                                'labels': 'l1',
-                                'votes_down': 1,
-                                'votes_up': 2,
-                                'private': False,
-                                'discussion_thread': {'posts': 'comments1'},
-                            },
-                        {
-                                'reported_by': 'rb2',
-                                'assigned_to': 'at2',
-                                'ticket_num': 100,
-                                'description': 'd2',
-                                'created_date': '2013-09-03',
-                                'mod_date': '2013-09-04',
-                                'summary': 's2',
-                                'custom_fields': 'cf2',
-                                'status': 'st2',
-                                'labels': 'l2',
-                                'votes_down': 3,
-                                'votes_up': 5,
-                                'private': True,
-                                'discussion_thread': {'posts': 'comments2'},
-                            },
-                    ],
-            })
-        anonymous = mock.Mock(_id=None, is_anonymous=lambda:True)
-        reporter = mock.Mock(is_anonymous=lambda:False)
-        author = mock.Mock(is_anonymous=lambda:False)
+            'tracker_config': {
+                '_id': 'orig_id',
+                'options': {
+                    'foo': 'bar',
+                },
+            },
+            'open_status_names': 'open statuses',
+            'closed_status_names': 'closed statuses',
+            'custom_fields': 'fields',
+            'saved_bins': 'bins',
+            'tickets': [
+                {
+                    'reported_by': 'rb1',
+                    'assigned_to': 'at1',
+                    'ticket_num': 1,
+                    'description': 'd1',
+                    'created_date': '2013-09-01',
+                    'mod_date': '2013-09-02',
+                    'summary': 's1',
+                    'custom_fields': 'cf1',
+                    'status': 'st1',
+                    'labels': 'l1',
+                    'votes_down': 1,
+                    'votes_up': 2,
+                    'private': False,
+                    'discussion_thread': {'posts': 'comments1'},
+                },
+                {
+                    'reported_by': 'rb2',
+                    'assigned_to': 'at2',
+                    'ticket_num': 100,
+                    'description': 'd2',
+                    'created_date': '2013-09-03',
+                    'mod_date': '2013-09-04',
+                    'summary': 's2',
+                    'custom_fields': 'cf2',
+                    'status': 'st2',
+                    'labels': 'l2',
+                    'votes_down': 3,
+                    'votes_up': 5,
+                    'private': True,
+                    'discussion_thread': {'posts': 'comments2'},
+                },
+            ],
+        })
+        anonymous = mock.Mock(_id=None, is_anonymous=lambda: True)
+        reporter = mock.Mock(is_anonymous=lambda: False)
+        author = mock.Mock(is_anonymous=lambda: False)
         importer.get_user = mock.Mock(side_effect=[
-                reporter, author,
-                anonymous, anonymous,
-            ])
-        importer.annotate = mock.Mock(side_effect=['ad1', 'aad1', 'ad2', 'aad2'])
+            reporter, author,
+            anonymous, anonymous,
+        ])
+        importer.annotate = mock.Mock(
+            side_effect=['ad1', 'aad1', 'ad2', 'aad2'])
         importer.process_comments = mock.Mock()
         importer.process_bins = mock.Mock()
         project, user = mock.Mock(), mock.Mock()
         app = project.install_app.return_value
         app.config.options.mount_point = 'mount_point'
         app.config.options.import_id = {
-                'source': 'Allura',
-                'app_config_id': 'orig_id',
-            }
+            'source': 'Allura',
+            'app_config_id': 'orig_id',
+        }
         app.config.options.get = lambda *a: getattr(app.config.options, *a)
         app.url = 'foo'
         tickets = TM.Ticket.side_effect = [mock.Mock(), mock.Mock()]
 
         importer.import_tool(project, user,
-                mount_point='mount_point', mount_label='mount_label')
+                             mount_point='mount_point', mount_label='mount_label')
 
-        project.install_app.assert_called_once_with('tickets', 'mount_point', 'mount_label',
-                open_status_names='open statuses',
-                closed_status_names='closed statuses',
-                import_id={
-                        'source': 'Allura',
-                        'app_config_id': 'orig_id',
-                    },
-                foo='bar',
-            )
+        project.install_app.assert_called_once_with(
+            'tickets', 'mount_point', 'mount_label',
+            open_status_names='open statuses',
+            closed_status_names='closed statuses',
+            import_id={
+                'source': 'Allura',
+                'app_config_id': 'orig_id',
+            },
+            foo='bar',
+        )
         self.assertEqual(importer.annotate.call_args_list, [
-                mock.call('d1', author, 'at1', label=' owned'),
-                mock.call('ad1', reporter, 'rb1', label=' created'),
-                mock.call('d2', anonymous, 'at2', label=' owned'),
-                mock.call('ad2', anonymous, 'rb2', label=' created'),
-            ])
+            mock.call('d1', author, 'at1', label=' owned'),
+            mock.call('ad1', reporter, 'rb1', label=' created'),
+            mock.call('d2', anonymous, 'at2', label=' owned'),
+            mock.call('ad2', anonymous, 'rb2', label=' created'),
+        ])
         self.assertEqual(TM.Ticket.call_args_list, [
-                mock.call(
-                        app_config_id=app.config._id,
-                        import_id={
-                                'source': 'Allura',
-                                'app_config_id': 'orig_id',
-                                'source_id': 1,
-                            },
-                        description='aad1',
-                        created_date=datetime(2013, 9, 1),
-                        mod_date=datetime(2013, 9, 2),
-                        ticket_num=1,
-                        summary='s1',
-                        custom_fields='cf1',
-                        status='st1',
-                        labels='l1',
-                        votes_down=1,
-                        votes_up=2,
-                        votes=1,
-                        assigned_to_id=author._id,
-                    ),
-                mock.call(
-                        app_config_id=app.config._id,
-                        import_id={
-                                'source': 'Allura',
-                                'app_config_id': 'orig_id',
-                                'source_id': 100,
-                            },
-                        description='aad2',
-                        created_date=datetime(2013, 9, 3),
-                        mod_date=datetime(2013, 9, 4),
-                        ticket_num=100,
-                        summary='s2',
-                        custom_fields='cf2',
-                        status='st2',
-                        labels='l2',
-                        votes_down=3,
-                        votes_up=5,
-                        votes=2,
-                        assigned_to_id=None,
-                    ),
-            ])
+            mock.call(
+                app_config_id=app.config._id,
+                import_id={
+                    'source': 'Allura',
+                    'app_config_id': 'orig_id',
+                    'source_id': 1,
+                },
+                description='aad1',
+                created_date=datetime(2013, 9, 1),
+                mod_date=datetime(2013, 9, 2),
+                ticket_num=1,
+                summary='s1',
+                custom_fields='cf1',
+                status='st1',
+                labels='l1',
+                votes_down=1,
+                votes_up=2,
+                votes=1,
+                assigned_to_id=author._id,
+            ),
+            mock.call(
+                app_config_id=app.config._id,
+                import_id={
+                    'source': 'Allura',
+                    'app_config_id': 'orig_id',
+                    'source_id': 100,
+                },
+                description='aad2',
+                created_date=datetime(2013, 9, 3),
+                mod_date=datetime(2013, 9, 4),
+                ticket_num=100,
+                summary='s2',
+                custom_fields='cf2',
+                status='st2',
+                labels='l2',
+                votes_down=3,
+                votes_up=5,
+                votes=2,
+                assigned_to_id=None,
+            ),
+        ])
         self.assertEqual(tickets[0].private, False)
         self.assertEqual(tickets[1].private, True)
         self.assertEqual(importer.process_comments.call_args_list, [
-                mock.call(tickets[0], 'comments1'),
-                mock.call(tickets[1], 'comments2'),
-            ])
+            mock.call(tickets[0], 'comments1'),
+            mock.call(tickets[1], 'comments2'),
+        ])
         self.assertEqual(tlos.flush_all.call_args_list, [
-                mock.call(),
-                mock.call(),
-            ])
+            mock.call(),
+            mock.call(),
+        ])
         self.assertEqual(session.return_value.flush.call_args_list, [
-                mock.call(tickets[0]),
-                mock.call(tickets[1]),
-            ])
+            mock.call(tickets[0]),
+            mock.call(tickets[1]),
+        ])
         self.assertEqual(session.return_value.expunge.call_args_list, [
-                mock.call(tickets[0]),
-                mock.call(tickets[1]),
-            ])
+            mock.call(tickets[0]),
+            mock.call(tickets[1]),
+        ])
         self.assertEqual(app.globals.custom_fields, 'fields')
         importer.process_bins.assert_called_once_with(app, 'bins')
         self.assertEqual(app.globals.last_ticket_num, 100)
         M.AuditLog.log.assert_called_once_with(
-                'import tool mount_point from exported Allura JSON',
-                project=project, user=user, url='foo')
+            'import tool mount_point from exported Allura JSON',
+            project=project, user=user, url='foo')
         g.post_event.assert_called_once_with('project_updated')
         app.globals.invalidate_bin_counts.assert_called_once_with()
 
@@ -201,17 +204,19 @@ class TestTrackerImporter(TestCase):
         project = mock.Mock()
         user = mock.Mock()
         tracker_json = {
-                'tracker_config': {'_id': 'orig_id', 'options': {}},
-                'open_status_names': 'os',
-                'closed_status_names': 'cs',
-            }
+            'tracker_config': {'_id': 'orig_id', 'options': {}},
+            'open_status_names': 'os',
+            'closed_status_names': 'cs',
+        }
 
         importer = tracker.ForgeTrackerImporter()
         importer._load_json = mock.Mock(return_value=tracker_json)
-        self.assertRaises(ValueError, importer.import_tool, project, user, project_name='project_name',
-                mount_point='mount_point', mount_label='mount_label')
+        self.assertRaises(
+            ValueError, importer.import_tool, project, user, project_name='project_name',
+            mount_point='mount_point', mount_label='mount_label')
 
-        h.make_app_admin_only.assert_called_once_with(project.install_app.return_value)
+        h.make_app_admin_only.assert_called_once_with(
+            project.install_app.return_value)
 
     @mock.patch.object(tracker, 'M')
     def test_get_user(self, M):
@@ -235,7 +240,8 @@ class TestTrackerImporter(TestCase):
         user.is_anonymous.return_value = False
         self.assertEqual(importer.annotate('foo', user, 'bar'), 'foo')
         user.is_anonymous.return_value = True
-        self.assertEqual(importer.annotate('foo', user, 'bar'), '*Originally by:* bar\n\nfoo')
+        self.assertEqual(importer.annotate('foo', user, 'bar'),
+                         '*Originally by:* bar\n\nfoo')
         self.assertEqual(importer.annotate('foo', user, 'nobody'), 'foo')
         self.assertEqual(importer.annotate('foo', user, None), 'foo')
 
@@ -251,41 +257,44 @@ class TestTrackerImporter(TestCase):
         ama = add_post.return_value.add_multiple_attachments
         File.side_effect = ['f1', 'f2', 'f3', 'f4']
         comments = [
-                {
-                    'author': 'a1',
-                    'text': 't1',
-                    'timestamp': '2013-09-01',
-                    'attachments': [{'url': 'u1'}, {'url': 'u2'}],
-                },
-                {
-                    'author': 'a2',
-                    'text': 't2',
-                    'timestamp': '2013-09-02',
-                    'attachments': [{'url': 'u3'}, {'url': 'u4'}],
-                },
-            ]
+            {
+                'author': 'a1',
+                'text': 't1',
+                'timestamp': '2013-09-01',
+                'attachments': [{'url': 'u1'}, {'url': 'u2'}],
+            },
+            {
+                'author': 'a2',
+                'text': 't2',
+                'timestamp': '2013-09-02',
+                'attachments': [{'url': 'u3'}, {'url': 'u4'}],
+            },
+        ]
 
         importer.process_comments(ticket, comments)
 
-        self.assertEqual(importer.get_user.call_args_list, [mock.call('a1'), mock.call('a2')])
+        self.assertEqual(importer.get_user.call_args_list,
+                         [mock.call('a1'), mock.call('a2')])
         self.assertEqual(importer.annotate.call_args_list, [
-                mock.call('t1', author, 'a1'),
-                mock.call('t2', author, 'a2'),
-            ])
+            mock.call('t1', author, 'a1'),
+            mock.call('t2', author, 'a2'),
+        ])
         self.assertEqual(add_post.call_args_list, [
-                mock.call(text='at1', ignore_security=True, timestamp=datetime(2013, 9, 1)),
-                mock.call(text='at2', ignore_security=True, timestamp=datetime(2013, 9, 2)),
-            ])
+            mock.call(text='at1', ignore_security=True,
+                      timestamp=datetime(2013, 9, 1)),
+            mock.call(text='at2', ignore_security=True,
+                      timestamp=datetime(2013, 9, 2)),
+        ])
         self.assertEqual(File.call_args_list, [
-                mock.call('u1'),
-                mock.call('u2'),
-                mock.call('u3'),
-                mock.call('u4'),
-            ])
+            mock.call('u1'),
+            mock.call('u2'),
+            mock.call('u3'),
+            mock.call('u4'),
+        ])
         self.assertEqual(ama.call_args_list, [
-                mock.call(['f1', 'f2']),
-                mock.call(['f3', 'f4']),
-            ])
+            mock.call(['f1', 'f2']),
+            mock.call(['f3', 'f4']),
+        ])
 
     @mock.patch.object(tracker, 'TM')
     def test_process_bins(self, TM):
@@ -295,17 +304,19 @@ class TestTrackerImporter(TestCase):
         importer.process_bins(app, [{'_id': 1, 'b': 1}, {'b': 2}])
         TM.Bin.query.remove.assert_called_once_with({'app_config_id': 1})
         self.assertEqual(TM.Bin.call_args_list, [
-                mock.call(app_config_id=1, b=1),
-                mock.call(app_config_id=1, b=2),
-            ])
+            mock.call(app_config_id=1, b=1),
+            mock.call(app_config_id=1, b=2),
+        ])
 
 
 class TestForgeTrackerImportController(TestController, TestCase):
+
     def setUp(self):
         """Mount Allura importer on the Tracker admin controller"""
         super(TestForgeTrackerImportController, self).setUp()
         from forgetracker.tracker_main import TrackerAdminController
-        TrackerAdminController._importer = tracker.ForgeTrackerImportController()
+        TrackerAdminController._importer = tracker.ForgeTrackerImportController(
+        )
 
     @with_tracker
     def test_index(self):
@@ -320,16 +331,18 @@ class TestForgeTrackerImportController(TestController, TestCase):
     def test_create(self, import_tool, sui):
         project = M.Project.query.get(shortname='test')
         params = {
-                'tickets_json': webtest.Upload('tickets.json', '{"key": "val"}'),
-                'mount_label': 'mylabel',
-                'mount_point': 'mymount',
-            }
+            'tickets_json': webtest.Upload('tickets.json', '{"key": "val"}'),
+            'mount_label': 'mylabel',
+            'mount_point': 'mymount',
+        }
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                status=302)
+                          status=302)
         self.assertEqual(r.location, 'http://localhost/p/test/admin/')
         sui.assert_called_once_with(project, 'tickets.json', '{"key": "val"}')
-        self.assertEqual(u'mymount', import_tool.post.call_args[1]['mount_point'])
-        self.assertEqual(u'mylabel', import_tool.post.call_args[1]['mount_label'])
+        self.assertEqual(
+            u'mymount', import_tool.post.call_args[1]['mount_point'])
+        self.assertEqual(
+            u'mylabel', import_tool.post.call_args[1]['mount_label'])
 
     @with_tracker
     @mock.patch('forgeimporters.forge.tracker.save_importer_upload')
@@ -339,11 +352,11 @@ class TestForgeTrackerImportController(TestController, TestCase):
         project.set_tool_data('ForgeTrackerImporter', pending=1)
         ThreadLocalORMSession.flush_all()
         params = {
-                'tickets_json': webtest.Upload('tickets.json', '{"key": "val"}'),
-                'mount_label': 'mylabel',
-                'mount_point': 'mymount',
-            }
+            'tickets_json': webtest.Upload('tickets.json', '{"key": "val"}'),
+            'mount_label': 'mylabel',
+            'mount_point': 'mymount',
+        }
         r = self.app.post('/p/test/admin/bugs/_importer/create', params,
-                status=302).follow()
+                          status=302).follow()
         self.assertIn('Please wait and try again', r)
         self.assertEqual(import_tool.post.call_count, 0)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/github/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/github/__init__.py b/ForgeImporters/forgeimporters/tests/github/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/tests/github/__init__.py
+++ b/ForgeImporters/forgeimporters/tests/github/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/github/functional/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/github/functional/__init__.py b/ForgeImporters/forgeimporters/tests/github/functional/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/tests/github/functional/__init__.py
+++ b/ForgeImporters/forgeimporters/tests/github/functional/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/github/functional/test_github.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/github/functional/test_github.py b/ForgeImporters/forgeimporters/tests/github/functional/test_github.py
index a9c5052..6577e4d 100644
--- a/ForgeImporters/forgeimporters/tests/github/functional/test_github.py
+++ b/ForgeImporters/forgeimporters/tests/github/functional/test_github.py
@@ -23,6 +23,7 @@ from unittest import TestCase
 from allura.tests import TestController
 from allura import model as M
 
+
 class TestGitHubImportController(TestController, TestCase):
 
     def test_index(self):
@@ -34,11 +35,13 @@ class TestGitHubImportController(TestController, TestCase):
         assert '<input name="tool_option" value="import_history" type="checkbox" checked="checked"/>' in r
 
     def test_login_overlay(self):
-        r = self.app.get('/p/import_project/github/', extra_environ=dict(username='*anonymous'))
+        r = self.app.get('/p/import_project/github/',
+                         extra_environ=dict(username='*anonymous'))
         self.assertIn('GitHub Project Importer', r)
         self.assertIn('Login Required', r)
 
-        r = self.app.post('/p/import_project/github/process', extra_environ=dict(username='*anonymous'), status=302)
+        r = self.app.post('/p/import_project/github/process',
+                          extra_environ=dict(username='*anonymous'), status=302)
         self.assertIn('/auth/', r.location)
 
 
@@ -65,7 +68,8 @@ class TestGitHubOAuth(TestController):
         assert_equal(r.location, redirect)
         session.__setitem__.assert_has_calls([
             call('github.oauth.state', 'state'),
-            call('github.oauth.redirect', 'http://localhost/p/import_project/github/')
+            call('github.oauth.redirect',
+                 'http://localhost/p/import_project/github/')
         ])
         session.save.assert_called_once()
 
@@ -78,4 +82,5 @@ class TestGitHubOAuth(TestController):
         assert_equal(user.get_tool_data('GitHubProjectImport', 'token'), 'abc')
 
         r = self.app.get('/p/import_project/github/')
-        assert_equal(r.status_int, 200)  # token in user data, so oauth isn't triggered
+        # token in user data, so oauth isn't triggered
+        assert_equal(r.status_int, 200)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/github/test_extractor.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/github/test_extractor.py b/ForgeImporters/forgeimporters/tests/github/test_extractor.py
index e6c96e3..a8ae0a5 100644
--- a/ForgeImporters/forgeimporters/tests/github/test_extractor.py
+++ b/ForgeImporters/forgeimporters/tests/github/test_extractor.py
@@ -93,11 +93,11 @@ class TestGitHubProjectExtractor(TestCase):
         self.assertIsNone(self.extractor.get_next_page_url(''))
         link = '<https://api.github.com/repositories/8560576/issues?state=open&page=2>; rel="next", <https://api.github.com/repositories/8560576/issues?state=open&page=10>; rel="last"'
         self.assertEqual(self.extractor.get_next_page_url(link),
-                'https://api.github.com/repositories/8560576/issues?state=open&page=2')
+                         'https://api.github.com/repositories/8560576/issues?state=open&page=2')
 
         link = '<https://api.github.com/repositories/8560576/issues?state=open&page=2>; rel="next"'
         self.assertEqual(self.extractor.get_next_page_url(link),
-                'https://api.github.com/repositories/8560576/issues?state=open&page=2')
+                         'https://api.github.com/repositories/8560576/issues?state=open&page=2')
 
         link = '<https://api.github.com/repositories/8560576/issues?state=open&page=1>; rel="prev"'
         self.assertIsNone(self.extractor.get_next_page_url(link))
@@ -110,7 +110,7 @@ class TestGitHubProjectExtractor(TestCase):
 
     def test_iter_issues(self):
         issues = list(self.extractor.iter_issues())
-        all_issues = zip((1,2), self.CLOSED_ISSUES_LIST)
+        all_issues = zip((1, 2), self.CLOSED_ISSUES_LIST)
         all_issues += zip((3, 4, 5), self.OPENED_ISSUES_LIST)
         all_issues += zip((6, 7, 8), self.OPENED_ISSUES_LIST_PAGE2)
         self.assertEqual(issues, all_issues)
@@ -118,18 +118,21 @@ class TestGitHubProjectExtractor(TestCase):
     def test_iter_comments(self):
         mock_issue = {'comments_url': '/issues/1/comments'}
         comments = list(self.extractor.iter_comments(mock_issue))
-        self.assertEqual(comments, self.ISSUE_COMMENTS + self.ISSUE_COMMENTS_PAGE2)
+        self.assertEqual(comments, self.ISSUE_COMMENTS +
+                         self.ISSUE_COMMENTS_PAGE2)
 
     def test_iter_events(self):
         mock_issue = {'events_url': '/issues/1/events'}
         events = list(self.extractor.iter_events(mock_issue))
-        self.assertEqual(events, self.ISSUE_EVENTS + self.ISSUE_EVENTS_PAGE2[:1])
+        self.assertEqual(events, self.ISSUE_EVENTS +
+                         self.ISSUE_EVENTS_PAGE2[:1])
 
     def test_has_wiki(self):
         assert self.extractor.has_wiki()
 
     def test_get_wiki_url(self):
-        self.assertEqual(self.extractor.get_page_url('wiki_url'), 'https://github.com/test_project.wiki')
+        self.assertEqual(self.extractor.get_page_url('wiki_url'),
+                         'https://github.com/test_project.wiki')
 
     @patch('forgeimporters.base.h.urlopen')
     def test_urlopen(self, urlopen):
@@ -173,7 +176,9 @@ class TestGitHubProjectExtractor(TestCase):
             'Rate limit exceeded (10 requests/hour). '
             'Sleeping until 2013-10-25 09:32:02 UTC'
         )
-        sleep.reset_mock(); urlopen.reset_mock(); log.warn.reset_mock()
+        sleep.reset_mock()
+        urlopen.reset_mock()
+        log.warn.reset_mock()
         response_ok = StringIO('{}')
         response_ok.info = lambda: {}
         urlopen.side_effect = [response_ok]
@@ -192,6 +197,7 @@ class TestGitHubProjectExtractor(TestCase):
             'X-RateLimit-Remaining': '0',
             'X-RateLimit-Reset': '1382693522',
         }
+
         def urlopen_side_effect(*a, **kw):
             mock_resp = StringIO('{}')
             mock_resp.info = lambda: {}

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/github/test_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/github/test_tracker.py b/ForgeImporters/forgeimporters/tests/github/test_tracker.py
index b32df0a..e02ac5a 100644
--- a/ForgeImporters/forgeimporters/tests/github/test_tracker.py
+++ b/ForgeImporters/forgeimporters/tests/github/test_tracker.py
@@ -25,6 +25,7 @@ from forgeimporters.github.utils import GitHubMarkdownConverter
 
 
 class TestTrackerImporter(TestCase):
+
     @mock.patch.object(tracker, 'g')
     @mock.patch.object(tracker, 'c')
     @mock.patch.object(tracker, 'ThreadLocalORMSession')
@@ -37,7 +38,7 @@ class TestTrackerImporter(TestCase):
         importer.process_fields = mock.Mock()
         importer.process_milestones = mock.Mock()
         importer.process_comments = mock.Mock()
-        importer.postprocess_milestones= mock.Mock()
+        importer.postprocess_milestones = mock.Mock()
         project, user = mock.Mock(), mock.Mock()
         app = project.install_app.return_value
         app.config.options.mount_point = 'mount_point'
@@ -45,25 +46,26 @@ class TestTrackerImporter(TestCase):
         gpe.iter_issues.return_value = [(50, mock.Mock()), (100, mock.Mock())]
 
         importer.import_tool(project, user, project_name='project_name',
-                mount_point='mount_point', mount_label='mount_label', user_name='me')
+                             mount_point='mount_point', mount_label='mount_label', user_name='me')
 
-        project.install_app.assert_called_once_with('tickets', 'mount_point', 'mount_label',
-                EnableVoting=False,
-                open_status_names='open',
-                closed_status_names='closed',
-                import_id={
-                    'source': 'GitHub',
-                    'project_name': 'me/project_name',
-                }
-            )
+        project.install_app.assert_called_once_with(
+            'tickets', 'mount_point', 'mount_label',
+            EnableVoting=False,
+            open_status_names='open',
+            closed_status_names='closed',
+            import_id={
+                'source': 'GitHub',
+                'project_name': 'me/project_name',
+            }
+        )
         gpe.iter_issues.assert_called_once()
         self.assertEqual(tlos.flush_all.call_args_list, [
-                mock.call(),
-                mock.call(),
-            ])
+            mock.call(),
+            mock.call(),
+        ])
         M.AuditLog.log.assert_called_once_with(
-                'import tool mount_point from me/project_name on GitHub',
-                project=project, user=user, url='foo')
+            'import tool mount_point from me/project_name on GitHub',
+            project=project, user=user, url='foo')
         g.post_event.assert_called_once_with('project_updated')
         app.globals.invalidate_bin_counts.assert_called_once_with()
 
@@ -80,21 +82,23 @@ class TestTrackerImporter(TestCase):
             'labels': [{'name': 'first'}, {'name': 'second'}],
         }
         importer = tracker.GitHubTrackerImporter()
-        importer.github_markdown_converter = GitHubMarkdownConverter('user', 'project')
+        importer.github_markdown_converter = GitHubMarkdownConverter(
+            'user', 'project')
         extractor = mock.Mock()
         extractor.urlopen().read.return_value = 'data'
         with mock.patch.object(tracker, 'datetime') as dt:
-            dt.strptime.side_effect = lambda s,f: s
+            dt.strptime.side_effect = lambda s, f: s
             importer.process_fields(extractor, ticket, issue)
             self.assertEqual(ticket.summary, 'title')
-            self.assertEqual(ticket.description, '*Originally created by:* [creator](https://github.com/creator)\n*Originally owned by:* [owner](https://github.com/owner)\n\nhello')
+            self.assertEqual(ticket.description,
+                             '*Originally created by:* [creator](https://github.com/creator)\n*Originally owned by:* [owner](https://github.com/owner)\n\nhello')
             self.assertEqual(ticket.status, 'New')
             self.assertEqual(ticket.created_date, 'created_at')
             self.assertEqual(ticket.mod_date, 'updated_at')
             self.assertEqual(dt.strptime.call_args_list, [
-                    mock.call('created_at', '%Y-%m-%dT%H:%M:%SZ'),
-                    mock.call('updated_at', '%Y-%m-%dT%H:%M:%SZ'),
-                ])
+                mock.call('created_at', '%Y-%m-%dT%H:%M:%SZ'),
+                mock.call('updated_at', '%Y-%m-%dT%H:%M:%SZ'),
+            ])
             self.assertEqual(ticket.labels, ['first', 'second'])
 
     @mock.patch.object(tracker, 'c')
@@ -102,40 +106,45 @@ class TestTrackerImporter(TestCase):
         importer = tracker.GitHubTrackerImporter()
         importer.open_milestones = set([
             ('first', datetime(day=23, month=4, year=2015)),
-            ('second',datetime(day=25, month=4, year=2015))
+            ('second', datetime(day=25, month=4, year=2015))
         ])
         milestones = importer.postprocess_milestones()
         self.assertItemsEqual(milestones, [
-                {
-                    'name': '_milestone',
-                    'type': 'milestone',
-                    'label': 'Milestone',
-                    'milestones': [
-                        {'name': 'first', 'due_date': u'2015-04-23', 'complete': False},
-                        {'name': 'second', 'due_date': u'2015-04-25', 'complete': False},
-                    ],
-                },
-            ])
+            {
+                'name': '_milestone',
+                'type': 'milestone',
+                'label': 'Milestone',
+                'milestones': [
+                        {'name': 'first', 'due_date':
+                            u'2015-04-23', 'complete': False},
+                    {'name': 'second', 'due_date':
+                     u'2015-04-25', 'complete': False},
+                ],
+            },
+        ])
 
     def test_get_attachments(self):
         importer = tracker.GitHubTrackerImporter()
         extractor = mock.Mock()
         extractor.urlopen().read.return_value = 'data'
         body = 'hello\n' \
-        '![cdbpzjc5ex4](https://f.cloud.github.com/assets/979771/1027411/a393ab5e-0e70-11e3-8a38-b93a3df904cf.jpg)\r\n' \
-        '![screensh0t](http://f.cl.ly/items/13453x43053r2G0d3x0v/Screen%20Shot%202012-04-28%20at%2010.48.17%20AM.png)'
+            '![cdbpzjc5ex4](https://f.cloud.github.com/assets/979771/1027411/a393ab5e-0e70-11e3-8a38-b93a3df904cf.jpg)\r\n' \
+            '![screensh0t](http://f.cl.ly/items/13453x43053r2G0d3x0v/Screen%20Shot%202012-04-28%20at%2010.48.17%20AM.png)'
         new_body, attachments = importer._get_attachments(extractor, body)
         self.assertEqual(new_body, 'hello\n')
         self.assertEqual(len(attachments), 2)
-        self.assertEqual(attachments[0].url, 'https://f.cloud.github.com/assets/979771/1027411/a393ab5e-0e70-11e3-8a38-b93a3df904cf.jpg')
-        self.assertEqual(attachments[1].url, 'http://f.cl.ly/items/13453x43053r2G0d3x0v/Screen%20Shot%202012-04-28%20at%2010.48.17%20AM.png')
+        self.assertEqual(
+            attachments[0].url, 'https://f.cloud.github.com/assets/979771/1027411/a393ab5e-0e70-11e3-8a38-b93a3df904cf.jpg')
+        self.assertEqual(
+            attachments[1].url, 'http://f.cl.ly/items/13453x43053r2G0d3x0v/Screen%20Shot%202012-04-28%20at%2010.48.17%20AM.png')
         self.assertEqual(attachments[0].file.read(), 'data')
         self.assertEqual(attachments[1].file.read(), 'data')
 
     def test_get_attachments_404(self):
         importer = tracker.GitHubTrackerImporter()
         extractor = mock.Mock()
-        extractor.urlopen.side_effect = HTTPError('url', 404, 'mock', None, None)
+        extractor.urlopen.side_effect = HTTPError(
+            'url', 404, 'mock', None, None)
         body = 'hello\n' \
             '![cdbpzjc5ex4](https://f.cloud.github.com/assets/979771/1027411/a393ab5e-0e70-11e3-8a38-b93a3df904cf.jpg)\r\n'
         new_body, attachments = importer._get_attachments(extractor, body)
@@ -147,20 +156,21 @@ class TestTrackerImporter(TestCase):
         extractor = mock.Mock()
         issue = {'comments_url': '/comments'}
         extractor.iter_comments.return_value = [
-                {
-                    'body': 'hello',
-                    'created_at': '2013-08-26T16:57:53Z',
-                    'user': {'login': 'me'},
-                }
-            ]
+            {
+                'body': 'hello',
+                'created_at': '2013-08-26T16:57:53Z',
+                'user': {'login': 'me'},
+            }
+        ]
         importer = tracker.GitHubTrackerImporter()
-        importer.github_markdown_converter = GitHubMarkdownConverter('user', 'project')
+        importer.github_markdown_converter = GitHubMarkdownConverter(
+            'user', 'project')
         importer.process_comments(extractor, ticket, issue)
         self.assertEqual(ticket.discussion_thread.add_post.call_args_list[0], mock.call(
-                text='*Originally posted by:* [me](https://github.com/me)\n\nhello',
-                timestamp=datetime(2013, 8, 26, 16, 57, 53),
-                ignore_security=True,
-            ))
+            text='*Originally posted by:* [me](https://github.com/me)\n\nhello',
+            timestamp=datetime(2013, 8, 26, 16, 57, 53),
+            ignore_security=True,
+        ))
 
     def test_process_events(self):
         ticket = mock.Mock()
@@ -229,11 +239,12 @@ Hello
             'labels': [{'name': 'first'}, {'name': 'second'}],
         }
         importer = tracker.GitHubTrackerImporter()
-        importer.github_markdown_converter = GitHubMarkdownConverter('user', 'project')
+        importer.github_markdown_converter = GitHubMarkdownConverter(
+            'user', 'project')
         extractor = mock.Mock()
         extractor.urlopen().read.return_value = 'data'
         with mock.patch.object(tracker, 'datetime') as dt:
-            dt.strptime.side_effect = lambda s,f: s
+            dt.strptime.side_effect = lambda s, f: s
             importer.process_fields(extractor, ticket, issue)
         self.assertEqual(ticket.description.strip(), body_converted.strip())
 
@@ -256,17 +267,18 @@ Hello
 
         issue = {'comments_url': '/comments'}
         extractor.iter_comments.return_value = [
-                {
-                    'body': body,
-                    'created_at': '2013-08-26T16:57:53Z',
-                    'user': {'login': 'me'},
-                }
-            ]
+            {
+                'body': body,
+                'created_at': '2013-08-26T16:57:53Z',
+                'user': {'login': 'me'},
+            }
+        ]
         importer = tracker.GitHubTrackerImporter()
-        importer.github_markdown_converter = GitHubMarkdownConverter('user', 'project')
+        importer.github_markdown_converter = GitHubMarkdownConverter(
+            'user', 'project')
         importer.process_comments(extractor, ticket, issue)
         self.assertEqual(ticket.discussion_thread.add_post.call_args_list[0], mock.call(
-                text=body_converted,
-                timestamp=datetime(2013, 8, 26, 16, 57, 53),
-                ignore_security=True,
-            ))
+            text=body_converted,
+            timestamp=datetime(2013, 8, 26, 16, 57, 53),
+            ignore_security=True,
+        ))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/google/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/google/__init__.py b/ForgeImporters/forgeimporters/tests/google/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/tests/google/__init__.py
+++ b/ForgeImporters/forgeimporters/tests/google/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/google/functional/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/google/functional/__init__.py b/ForgeImporters/forgeimporters/tests/google/functional/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/tests/google/functional/__init__.py
+++ b/ForgeImporters/forgeimporters/tests/google/functional/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/tests/google/functional/test_tracker.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/tests/google/functional/test_tracker.py b/ForgeImporters/forgeimporters/tests/google/functional/test_tracker.py
index c22f7d0..e689971 100644
--- a/ForgeImporters/forgeimporters/tests/google/functional/test_tracker.py
+++ b/ForgeImporters/forgeimporters/tests/google/functional/test_tracker.py
@@ -38,10 +38,12 @@ from forgeimporters.google import tracker
 
 
 class TestGCTrackerImporter(TestCase):
+
     def _make_extractor(self, html):
         with mock.patch.object(base.h, 'urlopen') as urlopen:
             urlopen.return_value = ''
-            extractor = google.GoogleCodeProjectExtractor('allura-google-importer', 'project_info')
+            extractor = google.GoogleCodeProjectExtractor(
+                'allura-google-importer', 'project_info')
         extractor.page = BeautifulSoup(html)
         extractor.url = "http://test/issue/?id=1"
         return extractor
@@ -49,13 +51,16 @@ class TestGCTrackerImporter(TestCase):
     def _make_ticket(self, issue, issue_id=1):
         self.assertIsNone(self.project.app_instance('test-issue'))
         with mock.patch.object(base.h, 'urlopen') as urlopen,\
-             mock.patch.object(google.tracker, 'GoogleCodeProjectExtractor') as GPE,\
-             mock.patch.object(google.tracker.M, 'AuditLog') as AL,\
-             mock.patch('forgetracker.tasks.update_bin_counts') as ubc:
-            urlopen.side_effect = lambda req, **kw: mock.Mock(read=req.get_full_url, info=lambda:{'content-type': 'text/plain'})
+                mock.patch.object(google.tracker, 'GoogleCodeProjectExtractor') as GPE,\
+                mock.patch.object(google.tracker.M, 'AuditLog') as AL,\
+                mock.patch('forgetracker.tasks.update_bin_counts') as ubc:
+            urlopen.side_effect = lambda req, **kw: mock.Mock(
+                read=req.get_full_url,
+                info=lambda: {'content-type': 'text/plain'})
             GPE.iter_issues.return_value = [(issue_id, issue)]
             gti = google.tracker.GoogleCodeTrackerImporter()
-            gti.import_tool(self.project, self.user, 'test-issue-project', mount_point='test-issue')
+            gti.import_tool(self.project, self.user,
+                            'test-issue-project', mount_point='test-issue')
         c.app = self.project.app_instance('test-issue')
         query = TM.Ticket.query.find({'app_config_id': c.app.config._id})
         self.assertEqual(query.count(), 1)
@@ -65,37 +70,42 @@ class TestGCTrackerImporter(TestCase):
     def setUp(self, *a, **kw):
         super(TestGCTrackerImporter, self).setUp(*a, **kw)
         setup_basic_test()
-        self.empty_issue = self._make_extractor(open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/empty-issue.html')).read())
-        self.test_issue = self._make_extractor(open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read())
+        self.empty_issue = self._make_extractor(
+            open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/empty-issue.html')).read())
+        self.test_issue = self._make_extractor(
+            open(pkg_resources.resource_filename('forgeimporters', 'tests/data/google/test-issue.html')).read())
         c.project = self.project = M.Project.query.get(shortname='test')
         c.user = self.user = M.User.query.get(username='test-admin')
 
     def test_empty_issue(self):
         ticket = self._make_ticket(self.empty_issue)
         self.assertEqual(ticket.summary, 'Empty Issue')
-        self.assertEqual(ticket.description, '*Originally created by:* john...@gmail.com\n\nEmpty')
+        self.assertEqual(ticket.description,
+                         '*Originally created by:* john...@gmail.com\n\nEmpty')
         self.assertEqual(ticket.status, '')
         self.assertEqual(ticket.milestone, '')
         self.assertEqual(ticket.custom_fields, {})
         assert c.app.config.options.get('EnableVoting')
-        open_bin = TM.Bin.query.get(summary='Open Tickets', app_config_id=c.app.config._id)
+        open_bin = TM.Bin.query.get(
+            summary='Open Tickets', app_config_id=c.app.config._id)
         self.assertItemsEqual(open_bin.terms.split(' && '), [
-                '!status:Fixed',
-                '!status:Verified',
-                '!status:Invalid',
-                '!status:Duplicate',
-                '!status:WontFix',
-                '!status:Done',
-            ])
-        closed_bin = TM.Bin.query.get(summary='Closed Tickets', app_config_id=c.app.config._id)
+            '!status:Fixed',
+            '!status:Verified',
+            '!status:Invalid',
+            '!status:Duplicate',
+            '!status:WontFix',
+            '!status:Done',
+        ])
+        closed_bin = TM.Bin.query.get(
+            summary='Closed Tickets', app_config_id=c.app.config._id)
         self.assertItemsEqual(closed_bin.terms.split(' or '), [
-                'status:Fixed',
-                'status:Verified',
-                'status:Invalid',
-                'status:Duplicate',
-                'status:WontFix',
-                'status:Done',
-            ])
+            'status:Fixed',
+            'status:Verified',
+            'status:Invalid',
+            'status:Duplicate',
+            'status:WontFix',
+            'status:Done',
+        ])
 
     @without_module('html2text')
     def test_issue_basic_fields(self):
@@ -105,40 +115,40 @@ class TestGCTrackerImporter(TestCase):
         self.assertIsNone(ticket.assigned_to_id)
         self.assertEqual(ticket.summary, 'Test "Issue"')
         assert_equal(ticket.description,
-                '*Originally created by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                '*Originally owned by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                '\n'
-                'Test \\*Issue\\* for testing\n'
-                '\n'
-                '&nbsp; 1\\. Test List\n'
-                '&nbsp; 2\\. Item\n'
-                '\n'
-                '\\*\\*Testing\\*\\*\n'
-                '\n'
-                ' \\* Test list 2\n'
-                ' \\* Item\n'
-                '\n'
-                '\\# Test Section\n'
-                '\n'
-                '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
-                '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
-                '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
-                '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
-                '\n'
-                'References: [issue 1](#1), [r2]\n'
-                '\n'
-                'That\'s all'
-            )
+                     '*Originally created by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                     '*Originally owned by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                     '\n'
+                     'Test \\*Issue\\* for testing\n'
+                     '\n'
+                     '&nbsp; 1\\. Test List\n'
+                     '&nbsp; 2\\. Item\n'
+                     '\n'
+                     '\\*\\*Testing\\*\\*\n'
+                     '\n'
+                     ' \\* Test list 2\n'
+                     ' \\* Item\n'
+                     '\n'
+                     '\\# Test Section\n'
+                     '\n'
+                     '&nbsp;&nbsp;&nbsp; p = source\\.test\\_issue\\.post\\(\\)\n'
+                     '&nbsp;&nbsp;&nbsp; p\\.count = p\\.count \\*5 \\#\\* 6\n'
+                     '&nbsp;&nbsp;&nbsp; if p\\.count &gt; 5:\n'
+                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
+                     '\n'
+                     'References: [issue 1](#1), [r2]\n'
+                     '\n'
+                     'That\'s all'
+                     )
         self.assertEqual(ticket.status, 'Started')
         self.assertEqual(ticket.created_date, datetime(2013, 8, 8, 15, 33, 52))
         self.assertEqual(ticket.mod_date, datetime(2013, 8, 8, 15, 36, 57))
         self.assertEqual(ticket.custom_fields, {
-                '_priority': 'Medium',
-                '_opsys': 'All, OSX, Windows',
-                '_component': 'Logic',
-                '_type': 'Defect',
-                '_milestone': 'Release1.0'
-            })
+            '_priority': 'Medium',
+            '_opsys': 'All, OSX, Windows',
+            '_component': 'Logic',
+            '_type': 'Defect',
+            '_milestone': 'Release1.0'
+        })
         self.assertEqual(ticket.labels, ['Performance', 'Security'])
         self.assertEqual(ticket.votes_up, 1)
         self.assertEqual(ticket.votes, 1)
@@ -146,55 +156,57 @@ class TestGCTrackerImporter(TestCase):
     def test_import_id(self):
         ticket = self._make_ticket(self.test_issue, issue_id=6)
         self.assertEqual(ticket.app.config.options.import_id, {
-                'source': 'Google Code',
-                'project_name': 'test-issue-project',
-            })
+            'source': 'Google Code',
+            'project_name': 'test-issue-project',
+        })
         self.assertEqual(ticket.ticket_num, 6)
         self.assertEqual(ticket.import_id, {
-                'source': 'Google Code',
-                'project_name': 'test-issue-project',
-                'source_id': 6,
-            })
+            'source': 'Google Code',
+            'project_name': 'test-issue-project',
+            'source_id': 6,
+        })
 
     @skipif(module_not_available('html2text'))
     def test_html2text_escaping(self):
         ticket = self._make_ticket(self.test_issue)
         assert_equal(ticket.description,
-                '*Originally created by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                '*Originally owned by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                '\n'
-                'Test \\*Issue\\* for testing\n'
-                '\n'
-                '&nbsp; 1. Test List\n'
-                '&nbsp; 2. Item\n'
-                '\n'
-                '\\*\\*Testing\\*\\*\n'
-                '\n'
-                ' \\* Test list 2\n'
-                ' \\* Item\n'
-                '\n'
-                '\\# Test Section\n'
-                '\n'
-                '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
-                '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
-                '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
-                '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
-                '\n'
-                'References: [issue 1](#1), [r2]\n'
-                '\n'
-                'That\'s all'
-            )
+                     '*Originally created by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                     '*Originally owned by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                     '\n'
+                     'Test \\*Issue\\* for testing\n'
+                     '\n'
+                     '&nbsp; 1. Test List\n'
+                     '&nbsp; 2. Item\n'
+                     '\n'
+                     '\\*\\*Testing\\*\\*\n'
+                     '\n'
+                     ' \\* Test list 2\n'
+                     ' \\* Item\n'
+                     '\n'
+                     '\\# Test Section\n'
+                     '\n'
+                     '&nbsp;&nbsp;&nbsp; p = source.test\\_issue.post\\(\\)\n'
+                     '&nbsp;&nbsp;&nbsp; p.count = p.count \\*5 \\#\\* 6\n'
+                     '&nbsp;&nbsp;&nbsp; if p.count &gt; 5:\n'
+                     '&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; print "Not &lt; 5 &amp; \\!= 5"\n'
+                     '\n'
+                     'References: [issue 1](#1), [r2]\n'
+                     '\n'
+                     'That\'s all'
+                     )
 
     def _assert_attachments(self, actual, *expected):
         self.assertEqual(len(actual), len(expected))
-        atts = set((a.filename, a.content_type, a.rfile().read()) for a in actual)
+        atts = set((a.filename, a.content_type, a.rfile().read())
+                   for a in actual)
         self.assertEqual(atts, set(expected))
 
     def test_attachements(self):
         ticket = self._make_ticket(self.test_issue)
         self._assert_attachments(ticket.attachments,
-                ('at1.txt', 'text/plain', 'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255'),
-            )
+                                 ('at1.txt', 'text/plain',
+                                  'http://allura-google-importer.googlecode.com/issues/attachment?aid=70000000&name=at1.txt&token=3REU1M3JUUMt0rJUg7ldcELt6LA%3A1376059941255'),
+                                 )
 
     @without_module('html2text')
     def test_comments(self):
@@ -202,96 +214,102 @@ class TestGCTrackerImporter(TestCase):
         ticket = self._make_ticket(self.test_issue)
         actual_comments = ticket.discussion_thread.find_posts()
         expected_comments = [
-                {
-                    'timestamp': datetime(2013, 8, 8, 15, 35, 15),
-                    'text': (
-                            '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                            '\n'
-                            'Test \\*comment\\* is a comment\n'
-                            '\n'
-                            '**Labels:** -OpSys-Linux OpSys-Windows\n'
-                            '**Status:** Started'
-                        ),
-                    'attachments': [
-                            ('at2.txt', 'text/plain', 'http://allura-google-importer.googlecode.com/issues/attachment?aid=60001000&name=at2.txt&token=JOSo4duwaN2FCKZrwYOQ-nx9r7U%3A1376001446667'),
-                        ],
-                },
-                {
-                    'timestamp': datetime(2013, 8, 8, 15, 35, 34),
-                    'text': (
-                            '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                            '\n'
-                            'Another comment with references: [issue 2](#2), [r1]\n\n'
-                        ),
-                },
-                {
-                    'timestamp': datetime(2013, 8, 8, 15, 36, 39),
-                    'text': (
-                            '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                            '\n'
-                            'Last comment\n\n'
-                        ),
-                    'attachments': [
-                            ('at4.txt', 'text/plain', 'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003000&name=at4.txt&token=6Ny2zYHmV6b82dqxyoiH6HUYoC4%3A1376001446667'),
-                            ('at1.txt', 'text/plain', 'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003001&name=at1.txt&token=NS8aMvWsKzTAPuY2kniJG5aLzPg%3A1376001446667'),
-                        ],
-                },
-                {
-                    'timestamp': datetime(2013, 8, 8, 15, 36, 57),
-                    'text': (
-                            '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
-                            '\n'
-                            'Oh, I forgot one \\(with an inter\\-project reference to [issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)\n'
-                            '\n'
-                            '**Labels:** OpSys-OSX'
-                        ),
-                },
-            ]
+            {
+                'timestamp': datetime(2013, 8, 8, 15, 35, 15),
+                'text': (
+                    '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                    '\n'
+                    'Test \\*comment\\* is a comment\n'
+                    '\n'
+                    '**Labels:** -OpSys-Linux OpSys-Windows\n'
+                    '**Status:** Started'
+                ),
+                'attachments': [
+                    ('at2.txt', 'text/plain',
+                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60001000&name=at2.txt&token=JOSo4duwaN2FCKZrwYOQ-nx9r7U%3A1376001446667'),
+                ],
+            },
+            {
+                'timestamp': datetime(2013, 8, 8, 15, 35, 34),
+                'text': (
+                    '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                    '\n'
+                    'Another comment with references: [issue 2](#2), [r1]\n\n'
+                ),
+            },
+            {
+                'timestamp': datetime(2013, 8, 8, 15, 36, 39),
+                'text': (
+                    '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                    '\n'
+                    'Last comment\n\n'
+                ),
+                'attachments': [
+                    ('at4.txt', 'text/plain',
+                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003000&name=at4.txt&token=6Ny2zYHmV6b82dqxyoiH6HUYoC4%3A1376001446667'),
+                    ('at1.txt', 'text/plain',
+                     'http://allura-google-importer.googlecode.com/issues/attachment?aid=60003001&name=at1.txt&token=NS8aMvWsKzTAPuY2kniJG5aLzPg%3A1376001446667'),
+                ],
+            },
+            {
+                'timestamp': datetime(2013, 8, 8, 15, 36, 57),
+                'text': (
+                    '*Originally posted by:* [john...@gmail.com](http://code.google.com/u/101557263855536553789/)\n'
+                    '\n'
+                    'Oh, I forgot one \\(with an inter\\-project reference to [issue other\\-project:1](https://code.google.com/p/other-project/issues/detail?id=1)\\)\n'
+                    '\n'
+                    '**Labels:** OpSys-OSX'
+                ),
+            },
+        ]
         self.assertEqual(len(actual_comments), len(expected_comments))
         for actual, expected in zip(actual_comments, expected_comments):
             self.assertEqual(actual.author(), anon)
             self.assertEqual(actual.timestamp, expected['timestamp'])
             self.assertEqual(actual.text, expected['text'])
             if 'attachments' in expected:
-                self._assert_attachments(actual.attachments, *expected['attachments'])
+                self._assert_attachments(
+                    actual.attachments, *expected['attachments'])
 
     def test_globals(self):
         globals = self._make_ticket(self.test_issue, issue_id=6).globals
         self.assertEqual(globals.open_status_names, 'New Accepted Started')
-        self.assertEqual(globals.closed_status_names, 'Fixed Verified Invalid Duplicate WontFix Done')
+        self.assertEqual(globals.closed_status_names,
+                         'Fixed Verified Invalid Duplicate WontFix Done')
         self.assertEqual(globals.last_ticket_num, 6)
         self.assertItemsEqual(globals.custom_fields, [
-                {
-                    'label': 'Milestone',
-                    'name': '_milestone',
-                    'type': 'milestone',
-                    'options': '',
-                    'milestones': [
-                            {'name': 'Release1.0', 'due_date': None, 'complete': False},
-                        ],
-                },
-                {
-                    'label': 'Type',
-                    'name': '_type',
-                    'type': 'select',
-                    'options': 'Defect',
-                },
-                {
-                    'label': 'Priority',
-                    'name': '_priority',
-                    'type': 'select',
-                    'options': 'Medium',
-                },
-                {
-                    'label': 'OpSys',
-                    'name': '_opsys',
-                    'type': 'string',
-                    'options': '',
-                },
-                {
-                    'label': 'Component',
-                    'name': '_component',
-                    'type': 'string',
-                    'options': '',
-                },
-            ])
+            {
+                'label': 'Milestone',
+                'name': '_milestone',
+                'type': 'milestone',
+                'options': '',
+                'milestones': [
+                    {'name': 'Release1.0', 'due_date':
+                     None, 'complete': False},
+                ],
+            },
+            {
+                'label': 'Type',
+                'name': '_type',
+                'type': 'select',
+                'options': 'Defect',
+            },
+            {
+                'label': 'Priority',
+                'name': '_priority',
+                'type': 'select',
+                'options': 'Medium',
+            },
+            {
+                'label': 'OpSys',
+                'name': '_opsys',
+                'type': 'string',
+                'options': '',
+            },
+            {
+                'label': 'Component',
+                'name': '_component',
+                'type': 'string',
+                'options': '',
+            },
+        ])


[36/36] git commit: [#6484] ticket:492 Fix tests failing due to tracwikiimporter is not available

Posted by jo...@apache.org.
[#6484] ticket:492 Fix tests failing due to tracwikiimporter is not available


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/6845ccde
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/6845ccde
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/6845ccde

Branch: refs/heads/cj/6484
Commit: 6845ccde9f86db2c27df3d4ec761e033b5853e20
Parents: 303512a
Author: Igor Bondarenko <je...@gmail.com>
Authored: Thu Jan 2 12:32:03 2014 +0200
Committer: Cory Johns <cj...@slashdotmedia.com>
Committed: Fri Jan 10 19:03:37 2014 +0000

----------------------------------------------------------------------
 .../forgeimporters/trac/tests/test_tickets.py   | 108 +++++++++++--------
 ForgeImporters/forgeimporters/trac/tickets.py   |   5 +-
 2 files changed, 66 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/6845ccde/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
index 18a88ea..de5506f 100644
--- a/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tests/test_tickets.py
@@ -17,7 +17,7 @@
 
 import json
 import os
-from datetime import datetime
+import sys
 
 from unittest import TestCase
 from mock import Mock, patch, MagicMock
@@ -39,14 +39,33 @@ from forgeimporters.trac.tickets import (
 )
 
 
+
 class TestTracTicketImporter(TestCase):
 
+    def mock_tracwikiimporter(self):
+        if module_not_available('tracwikiimporter'):
+            tracwikiimporter_mock = MagicMock(name='tracwikiimporter')
+            tracwikiimporter_mock.scripts = MagicMock(name='tracwikiimporter.scripts')
+            tracwikiimporter_mock.scripts.trac_export = MagicMock(name='tracwikiimporter.scripts.trac_export')
+            tracwikiimporter_mock.scripts.trac_export.DateJSONEncoder.return_value.encode = json.dumps
+            sys.modules['tracwikiimporter'] = tracwikiimporter_mock
+            sys.modules['tracwikiimporter.scripts'] = tracwikiimporter_mock.scripts
+            sys.modules['tracwikiimporter.scripts.trac_export'] = tracwikiimporter_mock.scripts.trac_export
+            self.tracwikiimporter_mocked = True
+
+    def tearDown(self):
+        if getattr(self, 'tracwikiimporter_mocked', False):
+            sys.modules.pop('tracwikiimporter', None)
+            sys.modules.pop('tracwikiimporter', None)
+            sys.modules.pop('tracwikiimporter.scripts.trac_export', None)
+            self.tracwikiimporter_mocked = False
+
     @patch('forgeimporters.trac.tickets.session')
     @patch('forgeimporters.trac.tickets.g')
     @patch('forgeimporters.trac.tickets.AuditLog')
     @patch('forgeimporters.trac.tickets.TracImportSupport')
-    @patch('forgeimporters.trac.tickets.export')
-    def test_import_tool(self, export, ImportSupport, AuditLog, g, session):
+    @patch('forgeimporters.trac.tickets.c')
+    def test_import_tool(self, c, ImportSupport, AuditLog, g, session):
         user_map = {"orig_user": "new_user"}
         importer = TracTicketImporter()
         app = Mock(name='ForgeTrackerApp')
@@ -56,54 +75,57 @@ class TestTracTicketImporter(TestCase):
         project = Mock(name='Project', shortname='myproject')
         project.install_app.return_value = app
         user = Mock(name='User', _id='id')
-        export.return_value = []
-        res = importer.import_tool(project, user,
-                                   mount_point='bugs',
-                                   mount_label='Bugs',
-                                   trac_url='http://example.com/trac/url',
-                                   user_map=json.dumps(user_map),
-                                   )
-        self.assertEqual(res, app)
-        project.install_app.assert_called_once_with(
-            'Tickets', mount_point='bugs', mount_label='Bugs',
-            open_status_names='new assigned accepted reopened',
-            closed_status_names='closed',
-            import_id={
-                'source': 'Trac',
-                'trac_url': 'http://example.com/trac/url/',
-            })
-        export.assert_called_once_with('http://example.com/trac/url/')
-        ImportSupport.return_value.perform_import.assert_called_once_with(
-            json.dumps(export.return_value),
-            json.dumps({
-                "user_map": user_map,
-                "usernames_match": False,
-            }),
-        )
-        AuditLog.log.assert_called_once_with(
-            'import tool bugs from http://example.com/trac/url/',
-            project=project, user=user, url='foo')
-        g.post_event.assert_called_once_with('project_updated')
+        self.mock_tracwikiimporter()
+        with patch('tracwikiimporter.scripts.trac_export.export', create=True) as export:
+            export.return_value = []
+            res = importer.import_tool(project, user,
+                                       mount_point='bugs',
+                                       mount_label='Bugs',
+                                       trac_url='http://example.com/trac/url',
+                                       user_map=json.dumps(user_map),
+                                       )
+            self.assertEqual(res, app)
+            project.install_app.assert_called_once_with(
+                'Tickets', mount_point='bugs', mount_label='Bugs',
+                open_status_names='new assigned accepted reopened',
+                closed_status_names='closed',
+                import_id={
+                    'source': 'Trac',
+                    'trac_url': 'http://example.com/trac/url/',
+                })
+            export.assert_called_once_with('http://example.com/trac/url/')
+            ImportSupport.return_value.perform_import.assert_called_once_with(
+                json.dumps(export.return_value),
+                json.dumps({
+                    "user_map": user_map,
+                    "usernames_match": False,
+                }),
+            )
+            AuditLog.log.assert_called_once_with(
+                'import tool bugs from http://example.com/trac/url/',
+                project=project, user=user, url='foo')
+            g.post_event.assert_called_once_with('project_updated')
 
     @patch('forgeimporters.trac.tickets.session')
     @patch('forgeimporters.trac.tickets.h')
-    @patch('forgeimporters.trac.tickets.export')
-    def test_import_tool_failure(self, export, h, session):
+    def test_import_tool_failure(self, h, session):
         importer = TracTicketImporter()
         app = Mock(name='ForgeTrackerApp')
         project = Mock(name='Project', shortname='myproject')
         project.install_app.return_value = app
         user = Mock(name='User', _id='id')
-        export.side_effect = ValueError
-
-        self.assertRaises(ValueError, importer.import_tool, project, user,
-                          mount_point='bugs',
-                          mount_label='Bugs',
-                          trac_url='http://example.com/trac/url',
-                          user_map=None,
-                          )
-
-        h.make_app_admin_only.assert_called_once_with(app)
+        self.mock_tracwikiimporter()
+        with patch('tracwikiimporter.scripts.trac_export.export', create=True) as export:
+            export.side_effect = ValueError
+
+            self.assertRaises(ValueError, importer.import_tool, project, user,
+                              mount_point='bugs',
+                              mount_label='Bugs',
+                              trac_url='http://example.com/trac/url',
+                              user_map=None,
+                              )
+
+            h.make_app_admin_only.assert_called_once_with(app)
 
 
 class TestTracTicketImportController(TestController, TestCase):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/6845ccde/ForgeImporters/forgeimporters/trac/tickets.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/trac/tickets.py b/ForgeImporters/forgeimporters/trac/tickets.py
index 231ce71..c9b42c1 100644
--- a/ForgeImporters/forgeimporters/trac/tickets.py
+++ b/ForgeImporters/forgeimporters/trac/tickets.py
@@ -39,10 +39,6 @@ from allura.lib.decorators import require_post
 from allura.lib import validators as v
 from allura.lib import helpers as h
 from allura.model import AuditLog
-from tracwikiimporter.scripts.trac_export import (
-    export,
-    DateJSONEncoder,
-)
 
 from forgeimporters.base import (
     ToolImporter,
@@ -121,6 +117,7 @@ class TracTicketImporter(ToolImporter):
         session(app.config).flush(app.config)
         session(app.globals).flush(app.globals)
         try:
+            from tracwikiimporter.scripts.trac_export import export, DateJSONEncoder
             with h.push_config(c, app=app):
                 TracImportSupport().perform_import(
                     json.dumps(export(trac_url), cls=DateJSONEncoder),


[12/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/forgediscussion/widgets/forum_widgets.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/forgediscussion/widgets/forum_widgets.py b/ForgeDiscussion/forgediscussion/widgets/forum_widgets.py
index 1b04323..7e9debb 100644
--- a/ForgeDiscussion/forgediscussion/widgets/forum_widgets.py
+++ b/ForgeDiscussion/forgediscussion/widgets/forum_widgets.py
@@ -29,131 +29,158 @@ from allura.lib.widgets.subscriptions import SubscribeForm
 
 from forgediscussion import model as M
 
+
 class _ForumSummary(ew_core.Widget):
-    template='jinja:forgediscussion:templates/discussion_widgets/forum_summary.html'
-    defaults=dict(
+    template = 'jinja:forgediscussion:templates/discussion_widgets/forum_summary.html'
+    defaults = dict(
         ew_core.Widget.defaults,
         name=None,
         value=None,
         show_label=True,
         label=None)
 
+
 class _ForumsTable(ew.TableField):
+
     class fields(ew_core.NameList):
-        _id=ew.HiddenField(validator=V.Ming(M.ForumThread))
-        num_topics=ffw.DisplayOnlyField(show_label=True, label='Topics')
-        num_posts=ffw.DisplayOnlyField(show_label=True, label='Posts')
-        last_post=ffw.DisplayOnlyField(show_label=True)
-        subscribed=ew.Checkbox(suppress_label=True, show_label=True)
+        _id = ew.HiddenField(validator=V.Ming(M.ForumThread))
+        num_topics = ffw.DisplayOnlyField(show_label=True, label='Topics')
+        num_posts = ffw.DisplayOnlyField(show_label=True, label='Posts')
+        last_post = ffw.DisplayOnlyField(show_label=True)
+        subscribed = ew.Checkbox(suppress_label=True, show_label=True)
     fields.insert(0, _ForumSummary())
 
+
 class ForumSubscriptionForm(CsrfForm):
+
     class fields(ew_core.NameList):
-        forums=_ForumsTable()
-        page_list=ffw.PageList()
-    submit_text='Update Subscriptions'
+        forums = _ForumsTable()
+        page_list = ffw.PageList()
+    submit_text = 'Update Subscriptions'
+
 
 class _ThreadsTable(DW._ThreadsTable):
+
     class fields(ew_core.NameList):
-        _id=ew.HiddenField(validator=V.Ming(M.ForumThread))
-        subject=ffw.DisplayOnlyField(show_label=True, label='Subject')
-        url=ffw.DisplayOnlyField()
-        num_replies=ffw.DisplayOnlyField(show_label=True, label='Num Replies')
-        num_views=ffw.DisplayOnlyField(show_label=True)
-        flags=ffw.DisplayOnlyField(show_label=True)
-        last_post=ffw.DisplayOnlyField(show_label=True)
-        subscription=ew.Checkbox(suppress_label=True, show_label=True)
-    defaults=dict(DW._ThreadsTable.defaults, div_id='forum_threads')
+        _id = ew.HiddenField(validator=V.Ming(M.ForumThread))
+        subject = ffw.DisplayOnlyField(show_label=True, label='Subject')
+        url = ffw.DisplayOnlyField()
+        num_replies = ffw.DisplayOnlyField(
+            show_label=True, label='Num Replies')
+        num_views = ffw.DisplayOnlyField(show_label=True)
+        flags = ffw.DisplayOnlyField(show_label=True)
+        last_post = ffw.DisplayOnlyField(show_label=True)
+        subscription = ew.Checkbox(suppress_label=True, show_label=True)
+    defaults = dict(DW._ThreadsTable.defaults, div_id='forum_threads')
+
 
 class ThreadSubscriptionForm(DW.SubscriptionForm):
+
     class fields(ew_core.NameList):
-        threads=_ThreadsTable()
-        page_list=ffw.PageList()
-        page_size=ffw.PageSize()
+        threads = _ThreadsTable()
+        page_list = ffw.PageList()
+        page_size = ffw.PageSize()
+
 
 class AnnouncementsTable(DW._ThreadsTable):
+
     class fields(ew_core.NameList):
-        _id=ew.HiddenField(validator=V.Ming(M.ForumThread))
-        subject=ffw.DisplayOnlyField(show_label=True, label='Subject')
-        url=ffw.DisplayOnlyField()
-        num_replies=ffw.DisplayOnlyField(show_label=True, label='Num Replies')
-        num_views=ffw.DisplayOnlyField(show_label=True)
-        flags=ffw.DisplayOnlyField(show_label=True)
-        last_post=ffw.DisplayOnlyField(show_label=True)
-    defaults=dict(DW._ThreadsTable.defaults, div_id='announcements')
-    name='announcements'
+        _id = ew.HiddenField(validator=V.Ming(M.ForumThread))
+        subject = ffw.DisplayOnlyField(show_label=True, label='Subject')
+        url = ffw.DisplayOnlyField()
+        num_replies = ffw.DisplayOnlyField(
+            show_label=True, label='Num Replies')
+        num_views = ffw.DisplayOnlyField(show_label=True)
+        flags = ffw.DisplayOnlyField(show_label=True)
+        last_post = ffw.DisplayOnlyField(show_label=True)
+    defaults = dict(DW._ThreadsTable.defaults, div_id='announcements')
+    name = 'announcements'
+
 
 class _ForumSelector(ew.SingleSelectField):
+
     def options(self):
         return [
             ew.Option(label=f.name, py_value=f, html_value=f.shortname)
-            for f in c.app.forums ]
+            for f in c.app.forums]
+
     def to_python(self, value, state):
-        result = M.Forum.query.get(shortname=value, app_config_id=c.app.config._id)
+        result = M.Forum.query.get(
+            shortname=value, app_config_id=c.app.config._id)
         if not result:
-            raise fev.Invalid('Illegal forum shortname: %s' % value, value, state)
+            raise fev.Invalid('Illegal forum shortname: %s' %
+                              value, value, state)
         return result
+
     def from_python(self, value, state):
         return value.shortname
 
+
 class ModerateThread(CsrfForm):
-    submit_text='Save Changes'
+    submit_text = 'Save Changes'
+
     class fields(ew_core.NameList):
-        discussion=_ForumSelector(label='New Forum')
-        flags=ew.CheckboxSet(options=['Sticky', 'Announcement'])
+        discussion = _ForumSelector(label='New Forum')
+        flags = ew.CheckboxSet(options=['Sticky', 'Announcement'])
 
     class buttons(ew_core.NameList):
-        delete=ew.SubmitButton(label='Delete Thread')
+        delete = ew.SubmitButton(label='Delete Thread')
 
 
 class ModeratePost(CsrfForm):
-    submit_text=None
-    fields=[
+    submit_text = None
+    fields = [
         ew.FieldSet(legend='Promote post to its own thread', fields=[
-                ew.TextField(name='subject', label='Thread title'),
-                ew.SubmitButton(name='promote', label='Promote to thread')])]
+            ew.TextField(name='subject', label='Thread title'),
+            ew.SubmitButton(name='promote', label='Promote to thread')])]
+
 
 class PromoteToThread(CsrfForm):
-    submit_text=None
-    fields=[
+    submit_text = None
+    fields = [
         ew.TextField(name='subject', label='Thread title'),
         ew.SubmitButton(name='promote', label='Promote to thread')]
 
+
 class ForumHeader(DW.DiscussionHeader):
-    template='jinja:forgediscussion:templates/discussion_widgets/forum_header.html'
-    widgets=dict(DW.DiscussionHeader.widgets,
-                 announcements_table=AnnouncementsTable(),
-                 forum_subscription_form=ForumSubscriptionForm())
+    template = 'jinja:forgediscussion:templates/discussion_widgets/forum_header.html'
+    widgets = dict(DW.DiscussionHeader.widgets,
+                   announcements_table=AnnouncementsTable(),
+                   forum_subscription_form=ForumSubscriptionForm())
+
 
 class ThreadHeader(DW.ThreadHeader):
-    template='jinja:forgediscussion:templates/discussion_widgets/thread_header.html'
-    defaults=dict(DW.ThreadHeader.defaults,
-                  show_subject=True,
-                  show_moderate=True)
-    widgets=dict(DW.ThreadHeader.widgets,
-                 moderate_thread=ModerateThread(),
-                 announcements_table=AnnouncementsTable())
+    template = 'jinja:forgediscussion:templates/discussion_widgets/thread_header.html'
+    defaults = dict(DW.ThreadHeader.defaults,
+                    show_subject=True,
+                    show_moderate=True)
+    widgets = dict(DW.ThreadHeader.widgets,
+                   moderate_thread=ModerateThread(),
+                   announcements_table=AnnouncementsTable())
+
 
 class Post(DW.Post):
-    show_subject=False
-    widgets=dict(DW.Post.widgets,
-                 promote_to_thread=PromoteToThread())
+    show_subject = False
+    widgets = dict(DW.Post.widgets,
+                   promote_to_thread=PromoteToThread())
+
 
 class Thread(DW.Thread):
-    defaults=dict(
+    defaults = dict(
         DW.Thread.defaults,
         show_subject=False)
-    widgets=dict(DW.Thread.widgets,
-                 thread_header=ThreadHeader(),
-                 post=Post())
+    widgets = dict(DW.Thread.widgets,
+                   thread_header=ThreadHeader(),
+                   post=Post())
+
 
 class Forum(DW.Discussion):
-    template='jinja:forgediscussion:templates/discussion_widgets/discussion.html'
-    allow_create_thread=True
+    template = 'jinja:forgediscussion:templates/discussion_widgets/discussion.html'
+    allow_create_thread = True
     show_subject = True
-    widgets=dict(DW.Discussion.widgets,
-                 discussion_header=ForumHeader(),
-                 forum_subscription_form=ForumSubscriptionForm(),
-                 whole_forum_subscription_form=SubscribeForm(),
-                 subscription_form=ThreadSubscriptionForm()
-                 )
+    widgets = dict(DW.Discussion.widgets,
+                   discussion_header=ForumHeader(),
+                   forum_subscription_form=ForumSubscriptionForm(),
+                   whole_forum_subscription_form=SubscribeForm(),
+                   subscription_form=ThreadSubscriptionForm()
+                   )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeDiscussion/setup.py
----------------------------------------------------------------------
diff --git a/ForgeDiscussion/setup.py b/ForgeDiscussion/setup.py
index 812c8a0..c978edd 100644
--- a/ForgeDiscussion/setup.py
+++ b/ForgeDiscussion/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgediscussion.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeDiscussion',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/git_main.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/git_main.py b/ForgeGit/forgegit/git_main.py
index 1d11e41..fda6914 100644
--- a/ForgeGit/forgegit/git_main.py
+++ b/ForgeGit/forgegit/git_main.py
@@ -42,17 +42,19 @@ from .controllers import BranchBrowser
 
 log = logging.getLogger(__name__)
 
+
 class ForgeGitApp(RepositoryApp):
+
     '''This is the Git app for PyForge'''
     __version__ = version.__version__
-    tool_label='Git'
-    tool_description="""
+    tool_label = 'Git'
+    tool_description = """
         Git is a distributed version control system designed to
         handle everything from small to very large projects with speed
         and efficiency.
     """
-    ordinal=2
-    forkable=True
+    ordinal = 2
+    forkable = True
 
     def __init__(self, project, config):
         super(ForgeGitApp, self).__init__(project, config)
@@ -68,7 +70,8 @@ class ForgeGitApp(RepositoryApp):
 
     @property
     def default_branch_name(self):
-        default_branch_name = getattr(self.repo, 'default_branch_name', 'master')
+        default_branch_name = getattr(
+            self.repo, 'default_branch_name', 'master')
         if not default_branch_name:
             default_branch_name = 'master'
         return default_branch_name
@@ -76,9 +79,10 @@ class ForgeGitApp(RepositoryApp):
     def admin_menu(self):
         links = []
         links.append(SitemapEntry(
-                'Set default branch',
-                c.project.url()+'admin/'+self.config.options.mount_point+'/' + 'set_default_branch_name',
-                className='admin_modal'))
+            'Set default branch',
+            c.project.url() + 'admin/' + self.config.options.mount_point +
+            '/' + 'set_default_branch_name',
+            className='admin_modal'))
         links += super(ForgeGitApp, self).admin_menu()
         return links
 
@@ -91,7 +95,8 @@ class ForgeGitApp(RepositoryApp):
             status='initializing',
             fs_path=self.config.options.get('fs_path'))
         ThreadLocalORMSession.flush_all()
-        cloned_from_project_id = self.config.options.get('cloned_from_project_id')
+        cloned_from_project_id = self.config.options.get(
+            'cloned_from_project_id')
         cloned_from_repo_id = self.config.options.get('cloned_from_repo_id')
         init_from_url = self.config.options.get('init_from_url')
         init_from_path = self.config.options.get('init_from_path')
@@ -112,7 +117,8 @@ class ForgeGitApp(RepositoryApp):
 
 def git_timers():
     return [
-        Timer('git_lib.{method_name}', git.Repo, 'rev_parse', 'iter_commits', 'commit'),
+        Timer('git_lib.{method_name}', git.Repo,
+              'rev_parse', 'iter_commits', 'commit'),
         Timer('git_lib.{method_name}', GM.git_repo.GitLibCmdWrapper, 'log'),
     ]
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/model/git_repo.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/model/git_repo.py b/ForgeGit/forgegit/model/git_repo.py
index 961539c..e5dce43 100644
--- a/ForgeGit/forgegit/model/git_repo.py
+++ b/ForgeGit/forgegit/model/git_repo.py
@@ -50,7 +50,9 @@ log = logging.getLogger(__name__)
 gitdb.util.mman = gitdb.util.mman.__class__(
     max_open_handles=128)
 
+
 class GitLibCmdWrapper(object):
+
     def __init__(self, client):
         self.client = client
 
@@ -60,12 +62,14 @@ class GitLibCmdWrapper(object):
     def log(self, *args, **kwargs):
         return self.client.log(*args, **kwargs)
 
+
 class Repository(M.Repository):
-    tool_name='Git'
-    repo_id='git'
-    type_s='Git Repository'
+    tool_name = 'Git'
+    repo_id = 'git'
+    type_s = 'Git Repository'
+
     class __mongometa__:
-        name='git-repository'
+        name = 'git-repository'
 
     @LazyProperty
     def _impl(self):
@@ -86,11 +90,11 @@ class Repository(M.Repository):
             )
         else:
             fetch_command = (
-                    'git remote add merge_request {}\n'
-                    'git fetch merge_request'
-                ).format(
-                    merge_request.downstream_repo_url,
-                )
+                'git remote add merge_request {}\n'
+                'git fetch merge_request'
+            ).format(
+                merge_request.downstream_repo_url,
+            )
         return 'git checkout %s\n%s\ngit merge %s' % (
             merge_request.target_branch,
             fetch_command,
@@ -100,6 +104,7 @@ class Repository(M.Repository):
     def rev_to_commit_id(self, rev):
         return self._impl.rev_parse(rev).hexsha
 
+
 class GitImplementation(M.RepositoryImplementation):
     post_receive_template = string.Template(
         '#!/bin/bash\n'
@@ -157,7 +162,8 @@ class GitImplementation(M.RepositoryImplementation):
                 shutil.rmtree(fullname)
             if self.can_hotcopy(source_url):
                 shutil.copytree(source_url, fullname)
-                post_receive = os.path.join(self._repo.full_fs_path, 'hooks', 'post-receive')
+                post_receive = os.path.join(
+                    self._repo.full_fs_path, 'hooks', 'post-receive')
                 if os.path.exists(post_receive):
                     os.rename(post_receive, post_receive + '-user')
                 repo = git.Repo(fullname)
@@ -188,7 +194,8 @@ class GitImplementation(M.RepositoryImplementation):
                     url = ' at ' + request.url
                 except:
                     pass
-                log.exception('Error with rev_parse(%s)%s' % (str(rev) + '^0', url))
+                log.exception('Error with rev_parse(%s)%s' %
+                              (str(rev) + '^0', url))
         if result:
             result.set_context(self._repo)
         return result
@@ -201,21 +208,23 @@ class GitImplementation(M.RepositoryImplementation):
             return  # empty repo
         seen = set()
         for ci in self._git.iter_commits(all=True, topo_order=True):
-            if ci.binsha in seen: continue
+            if ci.binsha in seen:
+                continue
             seen.add(ci.binsha)
             yield ci.hexsha
 
     def new_commits(self, all_commits=False):
         graph = {}
 
-        to_visit = [ self._git.commit(rev=hd.object_id) for hd in self.heads ]
+        to_visit = [self._git.commit(rev=hd.object_id) for hd in self.heads]
         while to_visit:
             obj = to_visit.pop()
-            if obj.hexsha in graph: continue
+            if obj.hexsha in graph:
+                continue
             if not all_commits:
                 # Look up the object
                 if M.repo.Commit.query.find(dict(_id=obj.hexsha)).count():
-                    graph[obj.hexsha] = set() # mark as parentless
+                    graph[obj.hexsha] = set()  # mark as parentless
                     continue
             graph[obj.hexsha] = set(p.hexsha for p in obj.parents)
             to_visit += obj.parents
@@ -224,21 +233,22 @@ class GitImplementation(M.RepositoryImplementation):
     def refresh_commit_info(self, oid, seen, lazy=True):
         from allura.model.repo import CommitDoc
         ci_doc = CommitDoc.m.get(_id=oid)
-        if ci_doc and lazy: return False
+        if ci_doc and lazy:
+            return False
         ci = self._git.rev_parse(oid)
         args = dict(
             tree_id=ci.tree.hexsha,
-            committed = Object(
+            committed=Object(
                 name=h.really_unicode(ci.committer.name),
                 email=h.really_unicode(ci.committer.email),
                 date=datetime.utcfromtimestamp(ci.committed_date)),
-            authored = Object(
+            authored=Object(
                 name=h.really_unicode(ci.author.name),
                 email=h.really_unicode(ci.author.email),
                 date=datetime.utcfromtimestamp(ci.authored_date)),
             message=h.really_unicode(ci.message or ''),
             child_ids=[],
-            parent_ids = [ p.hexsha for p in ci.parents ])
+            parent_ids=[p.hexsha for p in ci.parents])
         if ci_doc:
             ci_doc.update(**args)
             ci_doc.m.save()
@@ -247,19 +257,21 @@ class GitImplementation(M.RepositoryImplementation):
             try:
                 ci_doc.m.insert(safe=True)
             except DuplicateKeyError:
-                if lazy: return False
+                if lazy:
+                    return False
         self.refresh_tree_info(ci.tree, seen, lazy)
         return True
 
     def refresh_tree_info(self, tree, seen, lazy=True):
         from allura.model.repo import TreeDoc
-        if lazy and tree.binsha in seen: return
+        if lazy and tree.binsha in seen:
+            return
         seen.add(tree.binsha)
         doc = TreeDoc(dict(
-                _id=tree.hexsha,
-                tree_ids=[],
-                blob_ids=[],
-                other_ids=[]))
+            _id=tree.hexsha,
+            tree_ids=[],
+            blob_ids=[],
+            other_ids=[]))
         for o in tree:
             if o.type == 'submodule':
                 continue
@@ -313,37 +325,38 @@ class GitImplementation(M.RepositoryImplementation):
                     if renamed and renamed['to'] == path:
                         rename_details['path'] = '/' + renamed['from']
                         # get first rev **before** rename
-                        _iter = self._git.iter_commits(revs, renamed['from'], max_count=2)
+                        _iter = self._git.iter_commits(
+                            revs, renamed['from'], max_count=2)
                         prev_rev = list(_iter)[1]
                         rename_details['commit_url'] = self._repo.url_for_commit(
                             prev_rev.hexsha
                         )
 
                     try:
-                        node = ci.tree/path
+                        node = ci.tree / path
                         size = node.size if node.type == 'blob' else None
                     except KeyError as e:
                         size = None
                     if rename_details:
                         path = rename_details['path'].strip('/')
                 yield {
-                        'id': ci.hexsha,
-                        'message': h.really_unicode(ci.message or '--none--'),
-                        'authored': {
-                                'name': h.really_unicode(ci.author.name or '--none--'),
-                                'email': h.really_unicode(ci.author.email),
-                                'date': datetime.utcfromtimestamp(ci.authored_date),
-                            },
-                        'committed': {
-                                'name': h.really_unicode(ci.committer.name or '--none--'),
-                                'email': h.really_unicode(ci.committer.email),
-                                'date': datetime.utcfromtimestamp(ci.committed_date),
-                            },
-                        'refs': refs,
-                        'parents': [pci.hexsha for pci in ci.parents],
-                        'size': size,
-                        'rename_details': rename_details,
-                    }
+                    'id': ci.hexsha,
+                    'message': h.really_unicode(ci.message or '--none--'),
+                    'authored': {
+                        'name': h.really_unicode(ci.author.name or '--none--'),
+                        'email': h.really_unicode(ci.author.email),
+                        'date': datetime.utcfromtimestamp(ci.authored_date),
+                    },
+                    'committed': {
+                        'name': h.really_unicode(ci.committer.name or '--none--'),
+                        'email': h.really_unicode(ci.committer.email),
+                        'date': datetime.utcfromtimestamp(ci.committed_date),
+                    },
+                    'refs': refs,
+                    'parents': [pci.hexsha for pci in ci.parents],
+                    'size': size,
+                    'rename_details': rename_details,
+                }
 
     def _iter_commits_with_refs(self, *args, **kwargs):
         """
@@ -376,7 +389,8 @@ class GitImplementation(M.RepositoryImplementation):
             D\t<some path> # other cases
             etc
         """
-        proc = self._git.git.log(*args, format='%H%x00%d', as_process=True, **kwargs)
+        proc = self._git.git.log(*args,
+                                 format='%H%x00%d', as_process=True, **kwargs)
         stream = proc.stdout
         commit_lines = []
         while True:
@@ -390,9 +404,11 @@ class GitImplementation(M.RepositoryImplementation):
                 ]
                 if commit_lines:
                     hexsha, decoration = commit_lines[0].split('\x00')
-                    refs = decoration.strip(' ()').split(', ') if decoration else []
+                    refs = decoration.strip(' ()').split(
+                        ', ') if decoration else []
                     renamed = {}
-                    if len(commit_lines) > 1:  # merge commits don't have any --name-status output
+                    # merge commits don't have any --name-status output
+                    if len(commit_lines) > 1:
                         name_stat_parts = commit_lines[1].split(' ')
                         if name_stat_parts[0] in ['R100', 'R096']:
                             renamed['from'] = name_stat_parts[1]
@@ -416,7 +432,8 @@ class GitImplementation(M.RepositoryImplementation):
         'Set up the git post-commit hook'
         text = self.post_receive_template.substitute(
             url=self._repo.refresh_url())
-        fn = os.path.join(self._repo.fs_path, self._repo.name, 'hooks', 'post-receive')
+        fn = os.path.join(self._repo.fs_path, self._repo.name,
+                          'hooks', 'post-receive')
         with open(fn, 'w') as fp:
             fp.write(text)
         os.chmod(fn, 0755)
@@ -425,8 +442,8 @@ class GitImplementation(M.RepositoryImplementation):
         evens = oid[::2]
         odds = oid[1::2]
         binsha = ''
-        for e,o in zip(evens, odds):
-            binsha += chr(int(e+o, 16))
+        for e, o in zip(evens, odds):
+            binsha += chr(int(e + o, 16))
         return git.Object.new_from_sha(self._git, binsha)
 
     def rev_parse(self, rev):
@@ -434,7 +451,8 @@ class GitImplementation(M.RepositoryImplementation):
 
     def symbolics_for_commit(self, commit):
         try:
-            branches = [b.name for b in self.branches if b.object_id == commit._id]
+            branches = [
+                b.name for b in self.branches if b.object_id == commit._id]
             tags = [t.name for t in self.tags if t.object_id == commit._id]
             return branches, tags
         except git.GitCommandError:
@@ -449,11 +467,14 @@ class GitImplementation(M.RepositoryImplementation):
         if not os.path.exists(self._repo.tarball_path):
             os.makedirs(self._repo.tarball_path)
         archive_name = self._repo.tarball_filename(commit)
-        filename = os.path.join(self._repo.tarball_path, '%s%s' % (archive_name, '.zip'))
-        tmpfilename = os.path.join(self._repo.tarball_path, '%s%s' % (archive_name, '.tmp'))
+        filename = os.path.join(self._repo.tarball_path, '%s%s' %
+                                (archive_name, '.zip'))
+        tmpfilename = os.path.join(self._repo.tarball_path, '%s%s' %
+                                   (archive_name, '.tmp'))
         try:
             with open(tmpfilename, 'wb') as archive_file:
-                self._git.archive(archive_file, format='zip', treeish=commit, prefix=archive_name + '/')
+                self._git.archive(archive_file,
+                                  format='zip', treeish=commit, prefix=archive_name + '/')
             os.rename(tmpfilename, filename)
         finally:
             if os.path.exists(tmpfilename):
@@ -466,7 +487,7 @@ class GitImplementation(M.RepositoryImplementation):
         path = path.strip('/')
         ci = self._git.rev_parse(rev)
         try:
-            node = ci.tree/path
+            node = ci.tree / path
             return node.type == 'blob'
         except KeyError as e:
             return False
@@ -507,11 +528,11 @@ class GitImplementation(M.RepositoryImplementation):
         skip = 0
         while commit_id and not files:
             output = self._git.git.log(
-                    commit_id, '--', *paths,
-                    pretty='format:%H',
-                    name_only=True,
-                    max_count=1,
-                    skip=skip)
+                commit_id, '--', *paths,
+                pretty='format:%H',
+                name_only=True,
+                max_count=1,
+                skip=skip)
             lines = output.split('\n')
             commit_id = lines[0]
             files = prefix_paths_union(paths, set(lines[1:]))
@@ -525,13 +546,14 @@ class GitImplementation(M.RepositoryImplementation):
 
     def get_changes(self, commit_id):
         return self._git.git.log(
-                commit_id,
-                name_only=True,
-                pretty='format:',
-                max_count=1).splitlines()[1:]
+            commit_id,
+            name_only=True,
+            pretty='format:',
+            max_count=1).splitlines()[1:]
+
 
 class _OpenedGitBlob(object):
-    CHUNK_SIZE=4096
+    CHUNK_SIZE = 4096
 
     def __init__(self, stream):
         self._stream = stream
@@ -548,16 +570,18 @@ class _OpenedGitBlob(object):
             # Replenish buffer until we have a line break
             while '\n' not in buffer:
                 chars = self._stream.read(self.CHUNK_SIZE)
-                if not chars: break
+                if not chars:
+                    break
                 buffer += chars
-            if not buffer: break
+            if not buffer:
+                break
             eol = buffer.find('\n')
             if eol == -1:
                 # end without \n
                 yield buffer
                 break
-            yield buffer[:eol+1]
-            buffer = buffer[eol+1:]
+            yield buffer[:eol + 1]
+            buffer = buffer[eol + 1:]
 
     def close(self):
         pass

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/__init__.py b/ForgeGit/forgegit/tests/__init__.py
index fc1d735..b693039 100644
--- a/ForgeGit/forgegit/tests/__init__.py
+++ b/ForgeGit/forgegit/tests/__init__.py
@@ -18,7 +18,7 @@
 #       under the License.
 
 
-## Make our own Git tool test decorator
+# Make our own Git tool test decorator
 from allura.tests.decorators import with_tool
 
 with_git = with_tool('test', 'Git', 'src-git', 'Git', type='git')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/functional/test_auth.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/functional/test_auth.py b/ForgeGit/forgegit/tests/functional/test_auth.py
index 4a16aab..93a1af0 100644
--- a/ForgeGit/forgegit/tests/functional/test_auth.py
+++ b/ForgeGit/forgegit/tests/functional/test_auth.py
@@ -77,8 +77,8 @@ class TestGitUserPermissions(TestController):
     def _check_repo(self, path, username='test-admin', **kw):
         url = '/auth/repo_permissions'
         r = self.app.get(url, params=dict(
-                repo_path=path,
-                username=username), **kw)
+            repo_path=path,
+            username=username), **kw)
         try:
             return r.json
         except:
@@ -86,7 +86,8 @@ class TestGitUserPermissions(TestController):
 
     @with_git
     def test_list_repos(self):
-        r = self.app.get('/auth/repo_permissions', params=dict(username='test-admin'), status=200)
+        r = self.app.get('/auth/repo_permissions',
+                         params=dict(username='test-admin'), status=200)
         assert_equal(json.loads(r.body), {"allow_write": [
             '/git/test/src-git',
         ]})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/functional/test_controllers.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/functional/test_controllers.py b/ForgeGit/forgegit/tests/functional/test_controllers.py
index f1c8a49..68d117e 100644
--- a/ForgeGit/forgegit/tests/functional/test_controllers.py
+++ b/ForgeGit/forgegit/tests/functional/test_controllers.py
@@ -35,6 +35,7 @@ from allura.tests.decorators import with_tool
 from forgegit.tests import with_git
 from forgegit import model as GM
 
+
 class _TestCase(TestController):
 
     def setUp(self):
@@ -104,45 +105,53 @@ class TestRootController(_TestCase):
 
     def test_commit_browser_data(self):
         resp = self.app.get('/src-git/commit_browser_data')
-        data = json.loads(resp.body);
+        data = json.loads(resp.body)
         assert data['max_row'] == 4
         assert data['next_column'] == 1
-        assert_equal(data['built_tree']['df30427c488aeab84b2352bdf88a3b19223f9d7a'],
-                {u'url': u'/p/test/src-git/ci/df30427c488aeab84b2352bdf88a3b19223f9d7a/',
-                 u'oid': u'df30427c488aeab84b2352bdf88a3b19223f9d7a',
-                 u'short_id': u'[df3042]',
-                 u'column': 0,
-                 u'parents': [u'6a45885ae7347f1cac5103b0050cc1be6a1496c8'],
-                 u'message': u'Add README', u'row': 2})
+        assert_equal(
+            data['built_tree']['df30427c488aeab84b2352bdf88a3b19223f9d7a'],
+            {u'url': u'/p/test/src-git/ci/df30427c488aeab84b2352bdf88a3b19223f9d7a/',
+             u'oid': u'df30427c488aeab84b2352bdf88a3b19223f9d7a',
+             u'short_id': u'[df3042]',
+             u'column': 0,
+             u'parents': [u'6a45885ae7347f1cac5103b0050cc1be6a1496c8'],
+             u'message': u'Add README', u'row': 2})
 
     def test_log(self):
-        resp = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/')
+        resp = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/')
         assert 'Initial commit' in resp
         assert '<div class="markdown_content"><p>Change README</p></div>' in resp
         assert 'tree/README?format=raw">Download</a>' not in resp
-        assert 'Tree' in resp.html.findAll('td')[2].text, resp.html.findAll('td')[2].text
-        resp = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/README')
+        assert 'Tree' in resp.html.findAll(
+            'td')[2].text, resp.html.findAll('td')[2].text
+        resp = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/README')
         assert 'View' in resp.html.findAll('td')[2].text
         assert 'Change README' in resp
         assert 'tree/README?format=raw">Download</a>' in resp
         assert 'Add README' in resp
         assert "Initial commit " not in resp
-        resp = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/a/b/c/')
+        resp = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/a/b/c/')
         assert 'Remove file' in resp
         assert 'Initial commit' in resp
         assert 'Add README' not in resp
         assert 'Change README' not in resp
-        resp = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/not/exist')
+        resp = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/not/exist')
         assert 'No (more) commits' in resp
 
     def test_diff_ui(self):
-        r = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/README')
+        r = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/?path=/README')
         assert '<div class="grid-19"><input type="button" value="Compare" class="compare_revision"></div>' in r
         assert '<input type="checkbox" class="revision"' in r
         assert 'revision="1e146e67985dcd71c74de79613719bef7bddca4a"' in r
         assert 'url_commit="/p/test/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/">' in r
 
-        r = self.app.get('/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/')
+        r = self.app.get(
+            '/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/log/')
         assert '<div class="grid-19"><input type="button" value="Compare" class="compare_revision"></div>' not in r
         assert '<input type="checkbox" class="revision"' not in r
         assert 'revision="1e146e67985dcd71c74de79613719bef7bddca4a"' not in r
@@ -174,7 +183,8 @@ class TestRootController(_TestCase):
             title = channel.find('title').text
             assert_equal(title, 'test Git changes')
             description = channel.find('description').text
-            assert_equal(description, 'Recent changes to Git repository in test project')
+            assert_equal(description,
+                         'Recent changes to Git repository in test project')
             link = channel.find('link').text
             assert_equal(link, 'http://localhost/p/test/src-git/')
             commit = channel.find('item')
@@ -200,7 +210,7 @@ class TestRootController(_TestCase):
         assert len(resp.html.findAll('tr')) == 2, resp.showbrowser()
         resp = self.app.get(ci + 'tree/')
         assert 'README' in resp, resp.showbrowser()
-        links = [ a.get('href') for a in resp.html.findAll('a') ]
+        links = [a.get('href') for a in resp.html.findAll('a')]
         assert 'README' in links, resp.showbrowser()
         assert 'README/' not in links, resp.showbrowser()
 
@@ -219,8 +229,10 @@ class TestRootController(_TestCase):
     def test_file(self):
         ci = self._get_ci()
         resp = self.app.get(ci + 'tree/README')
-        assert 'README' in resp.html.find('h2', {'class':'dark title'}).contents[2]
-        content = str(resp.html.find('div', {'class':'clip grid-19 codebrowser'}))
+        assert 'README' in resp.html.find(
+            'h2', {'class': 'dark title'}).contents[2]
+        content = str(
+            resp.html.find('div', {'class': 'clip grid-19 codebrowser'}))
         assert 'This is readme' in content, content
         assert '<span id="l1" class="code_block">' in resp
         assert 'var hash = window.location.hash.substring(1);' in resp
@@ -231,7 +243,8 @@ class TestRootController(_TestCase):
 
     def test_diff(self):
         ci = self._get_ci()
-        resp = self.app.get(ci + 'tree/README?diff=df30427c488aeab84b2352bdf88a3b19223f9d7a')
+        resp = self.app.get(
+            ci + 'tree/README?diff=df30427c488aeab84b2352bdf88a3b19223f9d7a')
         assert 'readme' in resp, resp.showbrowser()
         assert '+++' in resp, resp.showbrowser()
 
@@ -239,7 +252,8 @@ class TestRootController(_TestCase):
         ci = self._get_ci()
         fn = 'tree/README?diff=df30427c488aeab84b2352bdf88a3b19223f9d7a'
         r = self.app.get(ci + fn + '&diformat=regular')
-        assert fn + '&amp;diformat=sidebyside">Switch to side-by-side view</a>' in r
+        assert fn + \
+            '&amp;diformat=sidebyside">Switch to side-by-side view</a>' in r
 
         r = self.app.get(ci + fn + '&diformat=sidebyside')
         assert fn + '&amp;diformat=regular">Switch to unified view</a>' in r
@@ -249,7 +263,8 @@ class TestRootController(_TestCase):
         notification = M.Notification.query.find(
             dict(subject='[test:src-git] 5 new commits to Test Project Git')).first()
         assert notification
-        domain = '.'.join(reversed(c.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(c.app.url[1:-1].split('/'))).replace('_', '-')
         common_suffix = tg.config.get('forgemail.domain', '.sourceforge.net')
         email = 'noreply@%s%s' % (domain, common_suffix)
         assert email in notification['reply_to_address']
@@ -257,7 +272,8 @@ class TestRootController(_TestCase):
     def test_file_force_display(self):
         ci = self._get_ci()
         resp = self.app.get(ci + 'tree/README?force=True')
-        content = str(resp.html.find('div', {'class':'clip grid-19 codebrowser'}))
+        content = str(
+            resp.html.find('div', {'class': 'clip grid-19 codebrowser'}))
         assert re.search(r'<pre>.*This is readme', content), content
         assert '</pre>' in content, content
 
@@ -270,7 +286,8 @@ class TestRootController(_TestCase):
         r = self.app.get(ci + 'tree/index.html')
         header = r.html.find('h2', {'class': 'dark title'}).contents[2]
         assert 'index.html' in header, header
-        content = str(r.html.find('div', {'class': 'clip grid-19 codebrowser'}))
+        content = str(
+            r.html.find('div', {'class': 'clip grid-19 codebrowser'}))
         assert ('<span class="nt">&lt;h1&gt;</span>'
                 'index.html'
                 '<span class="nt">&lt;/h1&gt;</span>') in content, content
@@ -284,7 +301,8 @@ class TestRootController(_TestCase):
         header = r.html.find('h2', {'class': 'dark title'})
         assert 'index' in header.contents[3], header.contents[3]
         assert 'index.htm' in header.contents[4], header.contents[4]
-        content = str(r.html.find('div', {'class': 'clip grid-19 codebrowser'}))
+        content = str(
+            r.html.find('div', {'class': 'clip grid-19 codebrowser'}))
         assert ('<span class="nt">&lt;h1&gt;</span>'
                 'index/index.htm'
                 '<span class="nt">&lt;/h1&gt;</span>') in content, content
@@ -296,27 +314,27 @@ class TestRootController(_TestCase):
         # user is not subscribed
         assert not M.Mailbox.subscribed(user_id=user._id)
         r = self.app.get(ci + 'tree/',
-                extra_environ={'username': str(user.username)})
+                         extra_environ={'username': str(user.username)})
         link = r.html.find('a', 'artifact_subscribe')
         assert link is not None, r.html
 
         # subscribe
         self.app.get(ci + 'tree/subscribe?subscribe=True',
-                extra_environ={'username': str(user.username)}).follow()
+                     extra_environ={'username': str(user.username)}).follow()
         # user is subscribed
         assert M.Mailbox.subscribed(user_id=user._id)
         r = self.app.get(ci + 'tree/',
-                extra_environ={'username': str(user.username)})
+                         extra_environ={'username': str(user.username)})
         link = r.html.find('a', 'artifact_unsubscribe active')
         assert link is not None, r.html
 
         # unsubscribe
         self.app.get(ci + 'tree/subscribe?unsubscribe=True',
-                extra_environ={'username': str(user.username)}).follow()
+                     extra_environ={'username': str(user.username)}).follow()
         # user is not subscribed
         assert not M.Mailbox.subscribed(user_id=user._id)
         r = self.app.get(ci + 'tree/',
-                extra_environ={'username': str(user.username)})
+                         extra_environ={'username': str(user.username)})
         link = r.html.find('a', 'artifact_subscribe')
         assert link is not None, r.html
 
@@ -363,7 +381,8 @@ class TestRootController(_TestCase):
         self.setup_testgit_index_repo()
         r = self.app.get('/p/test/testgit-index/ci/master/tree/index/')
         form = r.html.find('form', 'tarball')
-        assert_equal(form.get('action'), '/p/test/testgit-index/ci/master/tarball')
+        assert_equal(
+            form.get('action'), '/p/test/testgit-index/ci/master/tarball')
         assert_equal(form.input.get('value'), '/index')
 
     def test_default_branch(self):
@@ -376,14 +395,17 @@ class TestRootController(_TestCase):
     def test_set_default_branch(self):
         r = self.app.get('/p/test/admin/src-git/set_default_branch_name')
         assert '<input type="text" name="branch_name" id="branch_name"  value="master"/>' in r
-        self.app.post('/p/test/admin/src-git/set_default_branch_name', params={'branch_name':'zz'})
+        self.app.post('/p/test/admin/src-git/set_default_branch_name',
+                      params={'branch_name': 'zz'})
         r = self.app.get('/p/test/admin/src-git/set_default_branch_name')
         assert '<input type="text" name="branch_name" id="branch_name"  value="zz"/>' in r
         r = self.app.get('/p/test/src-git/').follow().follow()
         assert '<span class="scm-branch-label">zz</span>' in r
-        assert_in('<span>bad</span>', r)  # 'bad' is a file name which in zz, but not in master
+        # 'bad' is a file name which in zz, but not in master
+        assert_in('<span>bad</span>', r)
 
-        self.app.post('/p/test/admin/src-git/set_default_branch_name', params={'branch_name':'master'})
+        self.app.post('/p/test/admin/src-git/set_default_branch_name',
+                      params={'branch_name': 'master'})
         r = self.app.get('/p/test/src-git/').follow().follow()
         assert_not_in('<span>bad</span>', r)
         assert_in('<span>README</span>', r)
@@ -397,6 +419,7 @@ class TestRestController(_TestCase):
     def test_commits(self):
         self.app.get('/rest/p/test/src-git/commits', status=200)
 
+
 class TestFork(_TestCase):
 
     def setUp(self):
@@ -404,18 +427,19 @@ class TestFork(_TestCase):
         to_project = M.Project.query.get(
             shortname='test2', neighborhood_id=c.project.neighborhood_id)
         r = self.app.post('/src-git/fork', params=dict(
-                project_id=str(to_project._id),
-                mount_point='code',
-                mount_label='Test forked repository'))
+            project_id=str(to_project._id),
+            mount_point='code',
+            mount_label='Test forked repository'))
         assert "{status: 'error'}" not in str(r.follow())
         cloned_from = c.app.repo
         with h.push_context('test2', 'code', neighborhood='Projects'):
             c.app.repo.init_as_clone(
-                    cloned_from.full_fs_path,
-                    cloned_from.app.config.script_name(),
-                    cloned_from.full_fs_path)
+                cloned_from.full_fs_path,
+                cloned_from.app.config.script_name(),
+                cloned_from.full_fs_path)
             # Add commit to a forked repo, thus merge requests will not be empty
-            # clone repo to tmp location first (can't add commit to bare repos directly)
+            # clone repo to tmp location first (can't add commit to bare repos
+            # directly)
             clone_path = tempfile.mkdtemp()
             cloned = c.app.repo._impl._git.clone(clone_path)
             with open(clone_path + '/README', 'w+') as f:
@@ -473,8 +497,8 @@ class TestFork(_TestCase):
 
     def test_fork_links_go_to_fork(self):
         r = self._fork_page()
-        hrefs = ( a.get('href') for a in r.html('a') )
-        hrefs = ( href for href in hrefs if href and '/ci/' in href )
+        hrefs = (a.get('href') for a in r.html('a'))
+        hrefs = (href for href in hrefs if href and '/ci/' in href)
         for href in hrefs:
             assert href.startswith('/p/test2/code/'), href
 
@@ -504,7 +528,8 @@ class TestFork(_TestCase):
         c_id = self.forked_repo.get_heads()[0]['object_id']
         assert_equal(rev_links[0].get('href'), '/p/test2/code/ci/%s/' % c_id)
         assert_equal(rev_links[0].getText(), '[%s]' % c_id[:6])
-        assert_equal(browse_links[0].get('href'), '/p/test2/code/ci/%s/tree' % c_id)
+        assert_equal(browse_links[0].get('href'),
+                     '/p/test2/code/ci/%s/tree' % c_id)
         assert_equal(browse_links[0].getText(), 'Tree')
         merge_instructions = r.html.findAll('textarea')[0].getText()
         assert 'git checkout master' in merge_instructions
@@ -523,7 +548,8 @@ class TestFork(_TestCase):
         assert 'Merge Request #%s:  (rejected)' % mr_num in r, r
 
     def test_merge_request_default_branches(self):
-        _select_val = lambda r, n: r.html.find('select', {'name': n}).find(selected=True).string
+        _select_val = lambda r, n: r.html.find(
+            'select', {'name': n}).find(selected=True).string
         r = self.app.get('/p/test2/code/request_merge')
         assert_equal(_select_val(r, 'source_branch'), 'master')
         assert_equal(_select_val(r, 'target_branch'), 'master')
@@ -539,6 +565,7 @@ class TestFork(_TestCase):
         assert_equal(_select_val(r, 'source_branch'), 'zz')
         assert_equal(_select_val(r, 'target_branch'), 'zz')
 
+
 class TestDiff(TestController):
 
     def setUp(self):
@@ -559,14 +586,17 @@ class TestDiff(TestController):
         ThreadLocalORMSession.flush_all()
 
     def test_diff(self):
-        r = self.app.get('/src-git/ci/d961abbbf10341ee18a668c975842c35cfc0bef2/tree/1.png?barediff=2ce83a24e52c21e8d2146b1a04a20717c0bb08d7')
+        r = self.app.get(
+            '/src-git/ci/d961abbbf10341ee18a668c975842c35cfc0bef2/tree/1.png?barediff=2ce83a24e52c21e8d2146b1a04a20717c0bb08d7')
         assert 'alt="2ce83a2..."' in r
         assert 'alt="d961abb..."' in r
 
-        r = self.app.get('/src-git/ci/d961abbbf10341ee18a668c975842c35cfc0bef2/tree/1.png?diff=2ce83a24e52c21e8d2146b1a04a20717c0bb08d7')
+        r = self.app.get(
+            '/src-git/ci/d961abbbf10341ee18a668c975842c35cfc0bef2/tree/1.png?diff=2ce83a24e52c21e8d2146b1a04a20717c0bb08d7')
         assert 'alt="2ce83a2..."' in r
         assert 'alt="d961abb..."' in r
 
+
 class TestGitRename(TestController):
 
     def setUp(self):
@@ -587,17 +617,18 @@ class TestGitRename(TestController):
         ThreadLocalORMSession.flush_all()
 
     def test_log(self):
-        resp = self.app.get('/src-git/ci/259c77dd6ee0e6091d11e429b56c44ccbf1e64a3/log/?path=/f2.txt')
+        resp = self.app.get(
+            '/src-git/ci/259c77dd6ee0e6091d11e429b56c44ccbf1e64a3/log/?path=/f2.txt')
         assert '<b>renamed from</b>' in resp
         assert '/f.txt' in resp
         assert '(27 Bytes)' in resp
         assert '(19 Bytes)' in resp
 
-        resp = self.app.get('/src-git/ci/fbb0644603bb6ecee3ebb62efe8c86efc9b84ee6/log/?path=/f.txt')
+        resp = self.app.get(
+            '/src-git/ci/fbb0644603bb6ecee3ebb62efe8c86efc9b84ee6/log/?path=/f.txt')
         assert '(19 Bytes)' in resp
         assert '(10 Bytes)' in resp
 
-        resp = self.app.get('/src-git/ci/7c09182e61af959e4f1fb0e354bab49f14ef810d/tree/f.txt')
+        resp = self.app.get(
+            '/src-git/ci/7c09182e61af959e4f1fb0e354bab49f14ef810d/tree/f.txt')
         assert "2 lines (1 with data), 10 Bytes" in resp
-
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/model/test_repository.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/model/test_repository.py b/ForgeGit/forgegit/tests/model/test_repository.py
index 33875bd..eb68a1b 100644
--- a/ForgeGit/forgegit/tests/model/test_repository.py
+++ b/ForgeGit/forgegit/tests/model/test_repository.py
@@ -45,6 +45,7 @@ from forgegit import model as GM
 from forgegit.tests import with_git
 from forgewiki import model as WM
 
+
 class TestNewGit(unittest.TestCase):
 
     def setUp(self):
@@ -61,7 +62,7 @@ class TestNewGit(unittest.TestCase):
         c.app.repo.fs_path = repo_dir
         c.app.repo.name = 'testgit.git'
         self.repo = c.app.repo
-        #self.repo = GM.Repository(
+        # self.repo = GM.Repository(
         #     name='testgit.git',
         #     fs_path=repo_dir,
         #     url_path = '/test/',
@@ -80,7 +81,8 @@ class TestNewGit(unittest.TestCase):
         assert self.rev.tree._id == self.rev.tree_id
         assert self.rev.summary == self.rev.message.splitlines()[0]
         assert self.rev.shorthand_id() == '[1e146e]'
-        assert self.rev.symbolic_ids == (['master'], ['foo']), self.rev.symbolic_ids
+        assert self.rev.symbolic_ids == (
+            ['master'], ['foo']), self.rev.symbolic_ids
         assert self.rev.url() == (
             '/p/test/src-git/ci/'
             '1e146e67985dcd71c74de79613719bef7bddca4a/')
@@ -112,6 +114,7 @@ class TestNewGit(unittest.TestCase):
                 '/p/test/src-git/ci/'
                 '1e146e67985dcd71c74de79613719bef7bddca4a/')
 
+
 class TestGitRepo(unittest.TestCase, RepoImplTestBase):
 
     def setUp(self):
@@ -127,9 +130,9 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
         self.repo = GM.Repository(
             name='testgit.git',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'git',
-            status = 'creating')
+            url_path='/test/',
+            tool='git',
+            status='creating')
         self.repo.refresh()
         ThreadLocalORMSession.flush_all()
         ThreadLocalORMSession.close_all()
@@ -137,10 +140,10 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
     def test_init(self):
         repo = GM.Repository(
             name='testgit.git',
-            fs_path=g.tmpdir+'/',
-            url_path = '/test/',
-            tool = 'git',
-            status = 'creating')
+            fs_path=g.tmpdir + '/',
+            url_path='/test/',
+            tool='git',
+            status='creating')
         dirname = os.path.join(repo.fs_path, repo.name)
         if os.path.exists(dirname):
             shutil.rmtree(dirname)
@@ -151,9 +154,9 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
         repo = GM.Repository(
             name='testgit.git',
             fs_path=g.tmpdir + '/',
-            url_path = '/test/',
-            tool = 'git',
-            status = 'creating')
+            url_path='/test/',
+            tool='git',
+            status='creating')
         repo_path = pkg_resources.resource_filename(
             'forgegit', 'tests/data/testgit.git')
         dirname = os.path.join(repo.fs_path, repo.name)
@@ -161,19 +164,23 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
             shutil.rmtree(dirname)
         repo.init()
         repo._impl.clone_from(repo_path)
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
-        assert os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
-        assert os.stat(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
+        assert os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
+        assert os.stat(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
 
     @mock.patch('forgegit.model.git_repo.g.post_event')
     def test_clone(self, post_event):
         repo = GM.Repository(
             name='testgit.git',
             fs_path=g.tmpdir + '/',
-            url_path = '/test/',
-            tool = 'git',
-            status = 'creating')
+            url_path='/test/',
+            tool='git',
+            status='creating')
         repo_path = pkg_resources.resource_filename(
             'forgegit', 'tests/data/testgit.git')
         dirname = os.path.join(repo.fs_path, repo.name)
@@ -182,12 +189,18 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
         repo.init()
         repo._impl.clone_from(repo_path)
         assert len(list(repo.log()))
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
-        assert not os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
-        assert os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
-        assert os.stat(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
-        with open(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive')) as f: c = f.read()
-        self.assertIn('curl -s http://localhost/auth/refresh_repo/p/test/src-git/\n', c)
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
+        assert not os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
+        assert os.path.exists(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
+        assert os.stat(
+            os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
+        with open(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive')) as f:
+            c = f.read()
+        self.assertIn(
+            'curl -s http://localhost/auth/refresh_repo/p/test/src-git/\n', c)
         self.assertIn('exec $DIR/post-receive-user\n', c)
         shutil.rmtree(dirname)
 
@@ -197,10 +210,10 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
         with h.push_config(tg.config, **{'scm.git.hotcopy': 'True'}):
             repo = GM.Repository(
                 name='testgit.git',
-                fs_path=g.tmpdir+'/',
-                url_path = '/test/',
-                tool = 'git',
-                status = 'creating')
+                fs_path=g.tmpdir + '/',
+                url_path='/test/',
+                tool='git',
+                status='creating')
             repo.app.config.options['hotcopy'] = True
             repo_path = pkg_resources.resource_filename(
                 'forgegit', 'tests/data/testgit.git')
@@ -211,12 +224,18 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
             repo._impl.clone_from(repo_path)
             assert not clone_from.called
             assert len(list(repo.log()))
-            assert os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
-            assert os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
-            assert os.path.exists(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
-            assert os.stat(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
-            with open(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive')) as f: c = f.read()
-            self.assertIn('curl -s http://localhost/auth/refresh_repo/p/test/src-git/\n', c)
+            assert os.path.exists(
+                os.path.join(g.tmpdir, 'testgit.git/hooks/update'))
+            assert os.path.exists(
+                os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive-user'))
+            assert os.path.exists(
+                os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))
+            assert os.stat(
+                os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive'))[0] & stat.S_IXUSR
+            with open(os.path.join(g.tmpdir, 'testgit.git/hooks/post-receive')) as f:
+                c = f.read()
+            self.assertIn(
+                'curl -s http://localhost/auth/refresh_repo/p/test/src-git/\n', c)
             self.assertIn('exec $DIR/post-receive-user\n', c)
             shutil.rmtree(dirname)
 
@@ -283,7 +302,7 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
              'refs': [],
              'size': None,
              'rename_details': {}},
-            ])
+        ])
 
     def test_log_unicode(self):
         entries = list(self.repo.log(path=u'völundr', id_only=False))
@@ -316,7 +335,7 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
              'refs': [],
              'size': 15,
              'rename_details': {}},
-            ])
+        ])
 
     def test_commit(self):
         entry = self.repo.commit('HEAD')
@@ -342,13 +361,15 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
         self.assertEqual(new_tree.other_ids, orig_tree.other_ids)
 
     def test_notification_email(self):
-        send_notifications(self.repo, ['1e146e67985dcd71c74de79613719bef7bddca4a', ])
+        send_notifications(
+            self.repo, ['1e146e67985dcd71c74de79613719bef7bddca4a', ])
         ThreadLocalORMSession.flush_all()
         n = M.Notification.query.find(
             dict(subject='[test:src-git] [1e146e] - Rick Copeland: Change README')).first()
         assert n
         assert 'master: ' in n.text, n.text
-        send_notifications(self.repo, ['1e146e67985dcd71c74de79613719bef7bddca4a', 'df30427c488aeab84b2352bdf88a3b19223f9d7a'])
+        send_notifications(
+            self.repo, ['1e146e67985dcd71c74de79613719bef7bddca4a', 'df30427c488aeab84b2352bdf88a3b19223f9d7a'])
         ThreadLocalORMSession.flush_all()
         assert M.Notification.query.find(
             dict(subject='[test:src-git] 2 new commits to Test Project Git')).first()
@@ -356,50 +377,59 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
     def test_tarball(self):
         tmpdir = tg.config['scm.repos.tarball.root']
         if os.path.isfile(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip")):
-            os.remove(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
-        assert_equal(self.repo.tarball_path, os.path.join(tmpdir, 'git/t/te/test/testgit.git'))
-        assert_equal(self.repo.tarball_url('HEAD'), 'file:///git/t/te/test/testgit.git/test-src-git-HEAD.zip')
+            os.remove(
+                os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
+        assert_equal(self.repo.tarball_path,
+                     os.path.join(tmpdir, 'git/t/te/test/testgit.git'))
+        assert_equal(self.repo.tarball_url('HEAD'),
+                     'file:///git/t/te/test/testgit.git/test-src-git-HEAD.zip')
         self.repo.tarball('HEAD')
-        assert os.path.isfile(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
+        assert os.path.isfile(
+            os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
 
     def test_all_commit_ids(self):
         cids = list(self.repo.all_commit_ids())
         heads = [
-                '1e146e67985dcd71c74de79613719bef7bddca4a',  # master
-                '5c47243c8e424136fd5cdd18cd94d34c66d1955c',  # zz
-            ]
+            '1e146e67985dcd71c74de79613719bef7bddca4a',  # master
+            '5c47243c8e424136fd5cdd18cd94d34c66d1955c',  # zz
+        ]
         self.assertIn(cids[0], heads)  # repo head comes first
         for head in heads:
             self.assertIn(head, cids)  # all branches included
-        self.assertEqual(cids[-1], '9a7df788cf800241e3bb5a849c8870f2f8259d98')  # repo root comes last
+        # repo root comes last
+        self.assertEqual(cids[-1], '9a7df788cf800241e3bb5a849c8870f2f8259d98')
 
     def test_ls(self):
         lcd_map = self.repo.commit('HEAD').tree.ls()
         self.assertEqual(lcd_map, [{
-                'href': u'README',
-                'kind': 'BLOB',
-                'last_commit': {
+            'href': u'README',
+            'kind': 'BLOB',
+            'last_commit': {
                     'author': u'Rick Copeland',
-                    'author_email': u'rcopeland@geek.net',
-                    'author_url': None,
-                    'date': datetime.datetime(2010, 10, 7, 18, 44, 11),
-                    'href': u'/p/test/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/',
-                    'shortlink': u'[1e146e]',
-                    'summary': u'Change README'},
-                'name': u'README'}])
+                'author_email': u'rcopeland@geek.net',
+                'author_url': None,
+                'date': datetime.datetime(2010, 10, 7, 18, 44, 11),
+                'href': u'/p/test/src-git/ci/1e146e67985dcd71c74de79613719bef7bddca4a/',
+                'shortlink': u'[1e146e]',
+                'summary': u'Change README'},
+            'name': u'README'}])
 
     def test_tarball_status(self):
         tmpdir = tg.config['scm.repos.tarball.root']
         if os.path.isfile(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip")):
-            os.remove(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
+            os.remove(
+                os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
         if os.path.isfile(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.tmp")):
-            os.remove(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.tmp"))
+            os.remove(
+                os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.tmp"))
         if os.path.isdir(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD/")):
-            os.removedirs(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD/"))
+            os.removedirs(
+                os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD/"))
         self.repo.tarball('HEAD')
         assert_equal(self.repo.get_tarball_status('HEAD'), 'complete')
 
-        os.remove(os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
+        os.remove(
+            os.path.join(tmpdir, "git/t/te/test/testgit.git/test-src-git-HEAD.zip"))
         assert_equal(self.repo.get_tarball_status('HEAD'), None)
 
     def test_tarball_status_task(self):
@@ -415,7 +445,7 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
             'task_name': 'allura.tasks.repo_tasks.tarball',
             'args': ['HEAD', ''],
             'state': {'$in': ['busy', 'ready']},
-            })
+        })
 
         # task is running
         task.state = 'busy'
@@ -433,25 +463,29 @@ class TestGitRepo(unittest.TestCase, RepoImplTestBase):
             repo2 = GM.Repository(
                 name='test',
                 fs_path=d.path,
-                url_path = '/test/',
-                tool = 'git',
-                status = 'creating')
+                url_path='/test/',
+                tool='git',
+                status='creating')
             repo2.init()
             assert repo2.is_empty()
             repo2.refresh()
             ThreadLocalORMSession.flush_all()
             assert repo2.is_empty()
 
+
 class TestGitImplementation(unittest.TestCase):
+
     def test_branches(self):
         repo_dir = pkg_resources.resource_filename(
             'forgegit', 'tests/data/testgit.git')
         repo = mock.Mock(full_fs_path=repo_dir)
         impl = GM.git_repo.GitImplementation(repo)
         self.assertEqual(impl.branches, [
-                Object(name='master', object_id='1e146e67985dcd71c74de79613719bef7bddca4a'),
-                Object(name='zz', object_id='5c47243c8e424136fd5cdd18cd94d34c66d1955c')
-            ])
+            Object(name='master',
+                   object_id='1e146e67985dcd71c74de79613719bef7bddca4a'),
+            Object(name='zz',
+                   object_id='5c47243c8e424136fd5cdd18cd94d34c66d1955c')
+        ])
 
     def test_tags(self):
         repo_dir = pkg_resources.resource_filename(
@@ -459,8 +493,9 @@ class TestGitImplementation(unittest.TestCase):
         repo = mock.Mock(full_fs_path=repo_dir)
         impl = GM.git_repo.GitImplementation(repo)
         self.assertEqual(impl.tags, [
-                Object(name='foo', object_id='1e146e67985dcd71c74de79613719bef7bddca4a'),
-            ])
+            Object(name='foo',
+                   object_id='1e146e67985dcd71c74de79613719bef7bddca4a'),
+        ])
 
     def test_last_commit_ids(self):
         repo_dir = pkg_resources.resource_filename(
@@ -469,12 +504,12 @@ class TestGitImplementation(unittest.TestCase):
         impl = GM.git_repo.GitImplementation(repo)
         lcd = lambda c, p: impl.last_commit_ids(mock.Mock(_id=c), p)
         self.assertEqual(lcd('13951944969cf45a701bf90f83647b309815e6d5', ['f2.txt', 'f3.txt']), {
-                'f2.txt': '259c77dd6ee0e6091d11e429b56c44ccbf1e64a3',
-                'f3.txt': '653667b582ef2950c1954a0c7e1e8797b19d778a',
-            })
+            'f2.txt': '259c77dd6ee0e6091d11e429b56c44ccbf1e64a3',
+            'f3.txt': '653667b582ef2950c1954a0c7e1e8797b19d778a',
+        })
         self.assertEqual(lcd('259c77dd6ee0e6091d11e429b56c44ccbf1e64a3', ['f2.txt', 'f3.txt']), {
-                'f2.txt': '259c77dd6ee0e6091d11e429b56c44ccbf1e64a3',
-            })
+            'f2.txt': '259c77dd6ee0e6091d11e429b56c44ccbf1e64a3',
+        })
 
     def test_last_commit_ids_threaded(self):
         with h.push_config(tg.config, lcd_thread_chunk_size=1):
@@ -488,7 +523,8 @@ class TestGitImplementation(unittest.TestCase):
             repo = mock.Mock(full_fs_path=repo_dir)
             _git.side_effect = ValueError
             impl = GM.git_repo.GitImplementation(repo)
-            lcds = impl.last_commit_ids(mock.Mock(_id='13951944969cf45a701bf90f83647b309815e6d5'), ['f2.txt', 'f3.txt'])
+            lcds = impl.last_commit_ids(
+                mock.Mock(_id='13951944969cf45a701bf90f83647b309815e6d5'), ['f2.txt', 'f3.txt'])
             self.assertEqual(lcds, {})
 
 
@@ -507,9 +543,9 @@ class TestGitCommit(unittest.TestCase):
         self.repo = GM.Repository(
             name='testgit.git',
             fs_path=repo_dir,
-            url_path = '/test/',
-            tool = 'git',
-            status = 'creating')
+            url_path='/test/',
+            tool='git',
+            status='creating')
         self.repo.refresh()
         self.rev = self.repo.commit('HEAD')
         ThreadLocalORMSession.flush_all()
@@ -517,7 +553,8 @@ class TestGitCommit(unittest.TestCase):
 
     def test_url(self):
         assert self.rev.url().endswith('ca4a/')
-        assert self.repo._impl.url_for_commit('master').endswith('master/'), self.repo._impl.url_for_commit('master')
+        assert self.repo._impl.url_for_commit('master').endswith(
+            'master/'), self.repo._impl.url_for_commit('master')
 
     def test_committer_url(self):
         assert self.rev.committer_url is None
@@ -530,9 +567,9 @@ class TestGitCommit(unittest.TestCase):
 
     def test_diff(self):
         diffs = (self.rev.diffs.added
-                 +self.rev.diffs.removed
-                 +self.rev.diffs.changed
-                 +self.rev.diffs.copied)
+                 + self.rev.diffs.removed
+                 + self.rev.diffs.changed
+                 + self.rev.diffs.copied)
         for d in diffs:
             print d
 
@@ -540,30 +577,33 @@ class TestGitCommit(unittest.TestCase):
         # path only
         commits = list(self.repo.log(id_only=True))
         assert_equal(commits, [
-                "1e146e67985dcd71c74de79613719bef7bddca4a",
-                "df30427c488aeab84b2352bdf88a3b19223f9d7a",
-                "6a45885ae7347f1cac5103b0050cc1be6a1496c8",
-                "9a7df788cf800241e3bb5a849c8870f2f8259d98",
-            ])
+            "1e146e67985dcd71c74de79613719bef7bddca4a",
+            "df30427c488aeab84b2352bdf88a3b19223f9d7a",
+            "6a45885ae7347f1cac5103b0050cc1be6a1496c8",
+            "9a7df788cf800241e3bb5a849c8870f2f8259d98",
+        ])
         commits = list(self.repo.log(self.repo.head, 'README', id_only=True))
         assert_equal(commits, [
-                "1e146e67985dcd71c74de79613719bef7bddca4a",
-                "df30427c488aeab84b2352bdf88a3b19223f9d7a",
-            ])
-        commits = list(self.repo.log("df30427c488aeab84b2352bdf88a3b19223f9d7a", 'README', id_only=True))
+            "1e146e67985dcd71c74de79613719bef7bddca4a",
+            "df30427c488aeab84b2352bdf88a3b19223f9d7a",
+        ])
+        commits = list(
+            self.repo.log("df30427c488aeab84b2352bdf88a3b19223f9d7a", 'README', id_only=True))
         assert_equal(commits, [
-                "df30427c488aeab84b2352bdf88a3b19223f9d7a",
-            ])
+            "df30427c488aeab84b2352bdf88a3b19223f9d7a",
+        ])
         commits = list(self.repo.log(self.repo.head, '/a/b/c/', id_only=True))
         assert_equal(commits, [
-                "6a45885ae7347f1cac5103b0050cc1be6a1496c8",
-                "9a7df788cf800241e3bb5a849c8870f2f8259d98",
-            ])
-        commits = list(self.repo.log("9a7df788cf800241e3bb5a849c8870f2f8259d98", '/a/b/c/', id_only=True))
+            "6a45885ae7347f1cac5103b0050cc1be6a1496c8",
+            "9a7df788cf800241e3bb5a849c8870f2f8259d98",
+        ])
+        commits = list(
+            self.repo.log("9a7df788cf800241e3bb5a849c8870f2f8259d98", '/a/b/c/', id_only=True))
         assert_equal(commits, [
-                "9a7df788cf800241e3bb5a849c8870f2f8259d98",
-            ])
-        commits = list(self.repo.log(self.repo.head, '/does/not/exist/', id_only=True))
+            "9a7df788cf800241e3bb5a849c8870f2f8259d98",
+        ])
+        commits = list(
+            self.repo.log(self.repo.head, '/does/not/exist/', id_only=True))
         assert_equal(commits, [])
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/test_git_app.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/test_git_app.py b/ForgeGit/forgegit/tests/test_git_app.py
index 5d745b4..0b59396 100644
--- a/ForgeGit/forgegit/tests/test_git_app.py
+++ b/ForgeGit/forgegit/tests/test_git_app.py
@@ -25,6 +25,7 @@ from alluratest.controller import setup_basic_test, setup_global_objects
 from allura.lib import helpers as h
 from forgegit.tests import with_git
 
+
 class TestGitApp(unittest.TestCase):
 
     def setUp(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/forgegit/tests/test_tasks.py
----------------------------------------------------------------------
diff --git a/ForgeGit/forgegit/tests/test_tasks.py b/ForgeGit/forgegit/tests/test_tasks.py
index 0f82c08..3f003d6 100644
--- a/ForgeGit/forgegit/tests/test_tasks.py
+++ b/ForgeGit/forgegit/tests/test_tasks.py
@@ -27,6 +27,7 @@ from allura.tasks import repo_tasks
 from allura import model as M
 from forgegit.tests import with_git
 
+
 class TestGitTasks(unittest.TestCase):
 
     def setUp(self):
@@ -52,7 +53,8 @@ class TestGitTasks(unittest.TestCase):
         with mock.patch.object(c.app.repo, 'init_as_clone') as f:
             c.app.config.options['init_from_path'] = 'test_path'
             c.app.config.options['init_from_url'] = 'test_url'
-            repo_tasks.reclone_repo(prefix='p', shortname='test', mount_point='src-git')
+            repo_tasks.reclone_repo(
+                prefix='p', shortname='test', mount_point='src-git')
             M.main_orm_session.flush()
             f.assert_called_with('test_path', None, 'test_url')
             assert ns + 1 == M.Notification.query.find().count()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeGit/setup.py
----------------------------------------------------------------------
diff --git a/ForgeGit/setup.py b/ForgeGit/setup.py
index 3d14ef3..ee01f0b 100644
--- a/ForgeGit/setup.py
+++ b/ForgeGit/setup.py
@@ -16,7 +16,8 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 from forgegit.version import __version__
 
@@ -25,7 +26,8 @@ setup(name='ForgeGit',
       description="",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/docs/conf.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/docs/conf.py b/ForgeImporters/docs/conf.py
index 07430a5..b19661b 100644
--- a/ForgeImporters/docs/conf.py
+++ b/ForgeImporters/docs/conf.py
@@ -28,18 +28,20 @@
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 
-import sys, os
+import sys
+import os
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
+# sys.path.append(os.path.abspath('.'))
 
-# -- General configuration -----------------------------------------------------
+# -- General configuration -----------------------------------------------
 
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
+              'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['_templates']
@@ -104,7 +106,7 @@ pygments_style = 'sphinx'
 #modindex_common_prefix = []
 
 
-# -- Options for HTML output ---------------------------------------------------
+# -- Options for HTML output ---------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  Major themes that come with
 # Sphinx are currently 'default' and 'sphinxdoc'.
@@ -178,7 +180,7 @@ html_static_path = ['_static']
 htmlhelp_basename = 'alluradoc'
 
 
-# -- Options for LaTeX output --------------------------------------------------
+# -- Options for LaTeX output --------------------------------------------
 
 # The paper size ('letter' or 'a4').
 #latex_paper_size = 'letter'
@@ -189,8 +191,8 @@ htmlhelp_basename = 'alluradoc'
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'allura.tex', u'allura Documentation',
-   u'Cory Johns, Tim Van Steenburgh, Dave Brondsema', 'manual'),
+    ('index', 'allura.tex', u'allura Documentation',
+     u'Cory Johns, Tim Van Steenburgh, Dave Brondsema', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/base.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/base.py b/ForgeImporters/forgeimporters/base.py
index 922367d..9be397b 100644
--- a/ForgeImporters/forgeimporters/base.py
+++ b/ForgeImporters/forgeimporters/base.py
@@ -57,6 +57,7 @@ log = logging.getLogger(__name__)
 
 
 class ProjectImportForm(schema.Schema):
+
     def __init__(self, source):
         super(ProjectImportForm, self).__init__()
         provider = ProjectRegistrationProvider.get()
@@ -69,6 +70,7 @@ class ProjectImportForm(schema.Schema):
 
 
 class ToolImportForm(schema.Schema):
+
     def __init__(self, tool_class):
         super(ToolImportForm, self).__init__()
         self.add_field('mount_point', v.MountPointValidator(tool_class))
@@ -76,6 +78,7 @@ class ToolImportForm(schema.Schema):
 
 
 class ImportErrorHandler(object):
+
     def __init__(self, importer, project_name, project):
         self.importer = importer
         self.project_name = project_name
@@ -89,19 +92,19 @@ class ImportErrorHandler(object):
             self.importer.clear_pending(self.project)
         if exc_type:
             g.post_event('import_tool_task_failed',
-                error=str(exc_val),
-                traceback=traceback.format_exc(),
-                importer_source=self.importer.source,
-                importer_tool_label=self.importer.tool_label,
-                project_name=self.project_name,
-                )
+                         error=str(exc_val),
+                         traceback=traceback.format_exc(),
+                         importer_source=self.importer.source,
+                         importer_tool_label=self.importer.tool_label,
+                         project_name=self.project_name,
+                         )
 
     def success(self, app):
         with h.push_config(c, project=self.project, app=app):
             g.post_event('import_tool_task_succeeded',
-                    self.importer.source,
-                    self.importer.tool_label,
-                    )
+                         self.importer.source,
+                         self.importer.tool_label,
+                         )
 
 
 def object_from_path(path):
@@ -117,9 +120,10 @@ def object_from_path(path):
 def import_tool(importer_path, project_name=None, mount_point=None, mount_label=None, **kw):
     importer = object_from_path(importer_path)()
     with ImportErrorHandler(importer, project_name, c.project) as handler,\
-         M.session.substitute_extensions(M.artifact_orm_session, [M.session.BatchIndexer]):
-        app = importer.import_tool(c.project, c.user, project_name=project_name,
-                mount_point=mount_point, mount_label=mount_label, **kw)
+            M.session.substitute_extensions(M.artifact_orm_session, [M.session.BatchIndexer]):
+        app = importer.import_tool(
+            c.project, c.user, project_name=project_name,
+            mount_point=mount_point, mount_label=mount_label, **kw)
         M.artifact_orm_session.flush()
         M.session.BatchIndexer.flush()
         if app:
@@ -127,6 +131,7 @@ def import_tool(importer_path, project_name=None, mount_point=None, mount_label=
 
 
 class ProjectExtractor(object):
+
     """Base class for project extractors.
 
     Subclasses should use :meth:`urlopen` to make HTTP requests, as it provides
@@ -147,7 +152,8 @@ class ProjectExtractor(object):
     @staticmethod
     def urlopen(url, retries=3, codes=(408,), **kw):
         req = urllib2.Request(url, **kw)
-        req.add_header('User-Agent', 'Allura Data Importer (https://forge-allura.apache.org/p/allura/)')
+        req.add_header(
+            'User-Agent', 'Allura Data Importer (https://forge-allura.apache.org/p/allura/)')
         return h.urlopen(req, retries=retries, codes=codes)
 
     def get_page(self, page_name_or_url, parser=None, **kw):
@@ -175,7 +181,7 @@ class ProjectExtractor(object):
             if parser is None:
                 parser = self.parse_page
             self.page = self._page_cache[self.url] = \
-                    parser(self.urlopen(self.url))
+                parser(self.urlopen(self.url))
         return self.page
 
     def get_page_url(self, page_name, **kw):
@@ -185,7 +191,7 @@ class ProjectExtractor(object):
 
         """
         return self.PAGE_MAP[page_name].format(
-            project_name = urllib.quote(self.project_name), **kw)
+            project_name=urllib.quote(self.project_name), **kw)
 
     def parse_page(self, page):
         """Transforms the result of a `urlopen` call before returning it from
@@ -205,6 +211,7 @@ class ProjectExtractor(object):
 
 
 class ProjectImporter(BaseController):
+
     """
     Base class for project importers.
 
@@ -264,16 +271,20 @@ class ProjectImporter(BaseController):
         message indicating that some data will not be available immediately.
         """
         try:
-            c.project = self.neighborhood.register_project(kw['project_shortname'],
-                    project_name=kw['project_name'])
+            c.project = self.neighborhood.register_project(
+                kw['project_shortname'],
+                project_name=kw['project_name'])
         except exceptions.ProjectOverlimitError:
-            flash("You have exceeded the maximum number of projects you are allowed to create", 'error')
+            flash(
+                "You have exceeded the maximum number of projects you are allowed to create", 'error')
             redirect('.')
         except exceptions.ProjectRatelimitError:
-            flash("Project creation rate limit exceeded.  Please try again later.", 'error')
+            flash(
+                "Project creation rate limit exceeded.  Please try again later.", 'error')
             redirect('.')
         except Exception:
-            log.error('error registering project: %s', kw['project_shortname'], exc_info=True)
+            log.error('error registering project: %s',
+                      kw['project_shortname'], exc_info=True)
             flash('Internal Error. Please try again later.', 'error')
             redirect('.')
 
@@ -314,6 +325,7 @@ class ProjectImporter(BaseController):
 
 
 class ToolImporter(object):
+
     """
     Base class for tool importers.
 
@@ -377,16 +389,16 @@ class ToolImporter(object):
         limit = config.get('tool_import.rate_limit', 1)
         pending_key = 'tool_data.%s.pending' % self.classname
         modified_project = M.Project.query.find_and_modify(
-                query={
-                        '_id': project._id,
-                        '$or': [
-                                {pending_key: None},
-                                {pending_key: {'$lt': limit}},
-                            ],
-                    },
-                update={'$inc': {pending_key: 1}},
-                new=True,
-            )
+            query={
+                '_id': project._id,
+                '$or': [
+                    {pending_key: None},
+                    {pending_key: {'$lt': limit}},
+                ],
+            },
+            update={'$inc': {pending_key: 1}},
+            new=True,
+        )
         return modified_project is not None
 
     def clear_pending(self, project):
@@ -396,13 +408,13 @@ class ToolImporter(object):
         """
         pending_key = 'tool_data.%s.pending' % self.classname
         M.Project.query.find_and_modify(
-                query={'_id': project._id},
-                update={'$inc': {pending_key: -1}},
-                new=True,
-            )
+            query={'_id': project._id},
+            update={'$inc': {pending_key: -1}},
+            new=True,
+        )
 
     def import_tool(self, project, user, project_name=None,
-            mount_point=None, mount_label=None, **kw):
+                    mount_point=None, mount_label=None, **kw):
         """
         Override this method to perform the tool import.
 
@@ -450,12 +462,14 @@ class ToolImporter(object):
 
 
 class ToolsValidator(fev.Set):
+
     """
     Validates the list of tool importers during a project import.
 
     This verifies that the tools selected are available and valid
     for this source.
     """
+
     def __init__(self, source, *a, **kw):
         super(ToolsValidator, self).__init__(*a, **kw)
         self.source = source
@@ -472,10 +486,13 @@ class ToolsValidator(fev.Set):
                 invalid.append(name)
         if invalid:
             pl = 's' if len(invalid) > 1 else ''
-            raise fev.Invalid('Invalid tool%s selected: %s' % (pl, ', '.join(invalid)), value, state)
+            raise fev.Invalid('Invalid tool%s selected: %s' %
+                              (pl, ', '.join(invalid)), value, state)
         return valid
 
+
 class ProjectToolsImportController(object):
+
     '''List all importers available'''
 
     @with_trailing_slash
@@ -503,24 +520,28 @@ class ProjectToolsImportController(object):
         else:
             raise exc.HTTPNotFound
 
+
 class ImportAdminExtension(AdminExtension):
+
     '''Add import link to project admin sidebar'''
 
     project_admin_controllers = {'import': ProjectToolsImportController}
 
     def update_project_sidebar_menu(self, sidebar_links):
         base_url = c.project.url() + 'admin/ext/'
-        link = SitemapEntry('Import', base_url+'import/')
+        link = SitemapEntry('Import', base_url + 'import/')
         sidebar_links.append(link)
 
 
 def stringio_parser(page):
     return {
-            'content-type': page.info()['content-type'],
-            'data': StringIO(page.read()),
-        }
+        'content-type': page.info()['content-type'],
+        'data': StringIO(page.read()),
+    }
+
 
 class File(object):
+
     def __init__(self, url, filename=None):
         extractor = ProjectExtractor(None, url, parser=stringio_parser)
         self.url = url
@@ -546,12 +567,13 @@ def get_importer_upload_path(project):
     elif not project.is_root:
         shortname = project.shortname.split('/')[0]
     upload_path = config['importer_upload_path'].format(
-            nbhd=project.neighborhood.url_prefix.strip('/'),
-            project=shortname,
-            c=c,
-        )
+        nbhd=project.neighborhood.url_prefix.strip('/'),
+        project=shortname,
+        c=c,
+    )
     return upload_path
 
+
 def save_importer_upload(project, filename, data):
     dest_path = get_importer_upload_path(project)
     dest_file = os.path.join(dest_path, filename)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeImporters/forgeimporters/forge/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeImporters/forgeimporters/forge/__init__.py b/ForgeImporters/forgeimporters/forge/__init__.py
index 77505f1..144e298 100644
--- a/ForgeImporters/forgeimporters/forge/__init__.py
+++ b/ForgeImporters/forgeimporters/forge/__init__.py
@@ -14,4 +14,3 @@
 #       KIND, either express or implied.  See the License for the
 #       specific language governing permissions and limitations
 #       under the License.
-


[05/36] PEP8 cleanup

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/test_app.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/test_app.py b/ForgeTracker/forgetracker/tests/test_app.py
index e6e74b8..94d593b 100644
--- a/ForgeTracker/forgetracker/tests/test_app.py
+++ b/ForgeTracker/forgetracker/tests/test_app.py
@@ -29,6 +29,7 @@ from forgetracker.tests.functional.test_root import TrackerTestController
 
 
 class TestBulkExport(TrackerTestController):
+
     @td.with_tracker
     def setup_with_tools(self):
         super(TestBulkExport, self).setup_with_tools()
@@ -49,7 +50,8 @@ class TestBulkExport(TrackerTestController):
         f.seek(0)
         tracker = json.loads(f.read())
 
-        tickets = sorted(tracker['tickets'], key=operator.itemgetter('summary'))
+        tickets = sorted(tracker['tickets'],
+                         key=operator.itemgetter('summary'))
         assert_equal(len(tickets), 2)
         ticket_foo = tickets[1]
         assert_equal(ticket_foo['summary'], 'foo')
@@ -62,9 +64,11 @@ class TestBulkExport(TrackerTestController):
         assert_true('options' in tracker_config.keys())
         assert_equal(tracker_config['options']['mount_point'], 'bugs')
 
-        milestones = sorted(tracker['milestones'], key=operator.itemgetter('name'))
+        milestones = sorted(tracker['milestones'],
+                            key=operator.itemgetter('name'))
         assert_equal(milestones[0]['name'], '1.0')
         assert_equal(milestones[1]['name'], '2.0')
 
-        saved_bins_summaries = [bin['summary'] for bin in tracker['saved_bins']]
+        saved_bins_summaries = [bin['summary']
+                                for bin in tracker['saved_bins']]
         assert_true('Closed Tickets' in saved_bins_summaries)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/test_tracker_roles.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/test_tracker_roles.py b/ForgeTracker/forgetracker/tests/test_tracker_roles.py
index 791268d..92bed3f 100644
--- a/ForgeTracker/forgetracker/tests/test_tracker_roles.py
+++ b/ForgeTracker/forgetracker/tests/test_tracker_roles.py
@@ -22,19 +22,23 @@ from allura import model as M
 from allura.lib import security
 from allura.tests import decorators as td
 
+
 def setUp():
     setup_basic_test()
     setup_with_tools()
 
+
 @td.with_tracker
 def setup_with_tools():
     setup_global_objects()
     g.set_app('bugs')
 
+
 def test_role_assignments():
     admin = M.User.by_username('test-admin')
     user = M.User.by_username('test-user')
     anon = M.User.anonymous()
+
     def check_access(perm):
         pred = security.has_access(c.app, perm)
         return pred(user=admin), pred(user=user), pred(user=anon)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/__init__.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/__init__.py b/ForgeTracker/forgetracker/tests/unit/__init__.py
index c1b7ab1..0a2189a 100644
--- a/ForgeTracker/forgetracker/tests/unit/__init__.py
+++ b/ForgeTracker/forgetracker/tests/unit/__init__.py
@@ -30,15 +30,16 @@ def setUp():
 
 
 class TrackerTestWithModel(object):
+
     def setUp(self):
         bootstrap.wipe_database()
         project_reg = plugin.ProjectRegistrationProvider.get()
         c.user = bootstrap.create_user('Test User')
         neighborhood = M.Neighborhood(name='Projects', url_prefix='/p/',
-            features=dict(private_projects = False,
-                          max_projects = None,
-                          css = 'none',
-                          google_analytics = False))
+                                      features=dict(private_projects=False,
+                                                    max_projects=None,
+                                                    css='none',
+                                                    google_analytics=False))
         project_reg.register_neighborhood_project(neighborhood, [c.user])
         c.project = neighborhood.register_project('test', c.user)
         c.project.install_app('Tickets', 'bugs')
@@ -47,4 +48,3 @@ class TrackerTestWithModel(object):
 
     def tearDown(self):
         ThreadLocalORMSession.close_all()
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_globals_model.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_globals_model.py b/ForgeTracker/forgetracker/tests/unit/test_globals_model.py
index ada2120..ac0c2a9 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_globals_model.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_globals_model.py
@@ -30,6 +30,7 @@ from allura.lib import helpers as h
 
 
 class TestGlobalsModel(TrackerTestWithModel):
+
     def setUp(self):
         super(TestGlobalsModel, self).setUp()
         c.project.install_app('Tickets', 'doc-bugs')
@@ -57,7 +58,8 @@ class TestGlobalsModel(TrackerTestWithModel):
         now = datetime.utcnow()
         mock_dt.utcnow.return_value = now
         gbl = Globals()
-        gbl._bin_counts_data = [{'summary': 'foo', 'hits': 1}, {'summary': 'bar', 'hits': 2}]
+        gbl._bin_counts_data = [{'summary': 'foo', 'hits': 1},
+                                {'summary': 'bar', 'hits': 2}]
         gbl.invalidate_bin_counts = mock.Mock()
 
         # not expired, finds bin
@@ -105,13 +107,15 @@ class TestGlobalsModel(TrackerTestWithModel):
         mock_dt.utcnow.return_value = now
         gbl = Globals()
         gbl._bin_counts_invalidated = now - timedelta(minutes=1)
-        mock_bin.query.find.return_value = [mock.Mock(summary='foo', terms='bar')]
+        mock_bin.query.find.return_value = [
+            mock.Mock(summary='foo', terms='bar')]
         mock_search().hits = 5
 
         assert_equal(gbl._bin_counts_data, [])  # sanity pre-check
         gbl.update_bin_counts()
         assert mock_bin.query.find.called
-        mock_search.assert_called_with(forgetracker.model.Ticket, 'bar', rows=0, short_timeout=False)
+        mock_search.assert_called_with(
+            forgetracker.model.Ticket, 'bar', rows=0, short_timeout=False)
         assert_equal(gbl._bin_counts_data, [{'summary': 'foo', 'hits': 5}])
         assert_equal(gbl._bin_counts_expire, now + timedelta(minutes=60))
         assert_equal(gbl._bin_counts_invalidated, None)
@@ -119,12 +123,16 @@ class TestGlobalsModel(TrackerTestWithModel):
     def test_append_new_labels(self):
         gbl = Globals()
         assert_equal(gbl.append_new_labels([], ['tag1']), ['tag1'])
-        assert_equal(gbl.append_new_labels(['tag1', 'tag2'], ['tag2']), ['tag1', 'tag2'])
-        assert_equal(gbl.append_new_labels(['tag1', 'tag2'], ['tag3']), ['tag1', 'tag2', 'tag3'])
-        assert_equal(gbl.append_new_labels(['tag1', 'tag2', 'tag3'], ['tag2']), ['tag1', 'tag2', 'tag3'])
+        assert_equal(
+            gbl.append_new_labels(['tag1', 'tag2'], ['tag2']), ['tag1', 'tag2'])
+        assert_equal(gbl.append_new_labels(
+            ['tag1', 'tag2'], ['tag3']), ['tag1', 'tag2', 'tag3'])
+        assert_equal(gbl.append_new_labels(
+            ['tag1', 'tag2', 'tag3'], ['tag2']), ['tag1', 'tag2', 'tag3'])
 
 
 class TestCustomFields(TrackerTestWithModel):
+
     def test_it_has_sortable_custom_fields(self):
         tracker_globals = globals_with_custom_fields(
             [dict(label='Iteration Number',
@@ -136,7 +144,8 @@ class TestCustomFields(TrackerTestWithModel):
         expected = [dict(sortable_name='_point_estimate_s',
                          name='_point_estimate',
                          label='Point Estimate')]
-        assert tracker_globals.sortable_custom_fields_shown_in_search() == expected
+        assert tracker_globals.sortable_custom_fields_shown_in_search(
+        ) == expected
 
 
 def globals_with_custom_fields(custom_fields):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_milestone_controller.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_milestone_controller.py b/ForgeTracker/forgetracker/tests/unit/test_milestone_controller.py
index 357b6ce..e35d9ec 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_milestone_controller.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_milestone_controller.py
@@ -42,7 +42,8 @@ def test_unicode_lookup():
     with h.push_config(c, app=app):
         root = None
         field = 'milestone'
-        milestone_urlparam = '%D0%9F%D0%B5%D1%80%D1%81%D0%BF%D0%B5%D0%BA%D1%82%D0%B8%D0%B2%D0%B0' # u'Перспектива'
+        # u'Перспектива'
+        milestone_urlparam = '%D0%9F%D0%B5%D1%80%D1%81%D0%BF%D0%B5%D0%BA%D1%82%D0%B8%D0%B2%D0%B0'
         mc = MilestoneController(root, field, milestone_urlparam)
 
     assert mc.milestone  # check that it is found

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_root_controller.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_root_controller.py b/ForgeTracker/forgetracker/tests/unit/test_root_controller.py
index f952aba..cbe926c 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_root_controller.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_root_controller.py
@@ -28,6 +28,7 @@ import unittest
 
 
 class WithUserAndBugsApp(TrackerTestWithModel):
+
     def setUp(self):
         super(WithUserAndBugsApp, self).setUp()
         c.user = User(username='test-user')
@@ -35,6 +36,7 @@ class WithUserAndBugsApp(TrackerTestWithModel):
 
 
 class TestWhenSearchingWithCustomFields(WithUserAndBugsApp):
+
     def setUp(self):
         super(TestWhenSearchingWithCustomFields, self).setUp()
         with solr_search_returning_colors_are_wrong_ticket():
@@ -51,6 +53,7 @@ class TestWhenSearchingWithCustomFields(WithUserAndBugsApp):
 
 
 class TestWhenLoadingFrontPage(WithUserAndBugsApp):
+
     def setUp(self):
         super(TestWhenLoadingFrontPage, self).setUp()
         with mongo_search_returning_colors_are_wrong_ticket():
@@ -69,9 +72,10 @@ def solr_search_returning_colors_are_wrong_ticket():
     search_artifact.return_value = matches
     return patch('forgetracker.model.ticket.search_artifact', search_artifact)
 
+
 def mongo_search_returning_colors_are_wrong_ticket():
     ticket = create_colors_are_wrong_ticket()
-    tickets = [ ticket ]
+    tickets = [ticket]
     paged_query = Mock()
     paged_query.return_value = dict(tickets=tickets)
     return patch('forgetracker.tracker_main.TM.Ticket.paged_query', paged_query)
@@ -104,6 +108,7 @@ def create_ticket(summary, custom_fields):
 
 
 class test_change_text(unittest.TestCase):
+
     def test_get_label(self):
         self.assertEqual('Milestone', tracker_main.get_label('_milestone'))
         self.assertEqual('Ticket Number', tracker_main.get_label('ticket_num'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_ticket_custom_fields_form.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_ticket_custom_fields_form.py b/ForgeTracker/forgetracker/tests/unit/test_ticket_custom_fields_form.py
index a4f01b2..7e393b6 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_ticket_custom_fields_form.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_ticket_custom_fields_form.py
@@ -26,14 +26,14 @@ from forgetracker.model import Globals
 
 
 class TestTicketCustomFields(TrackerTestWithModel):
+
     def test_it_creates_string_fields(self):
         globals_ = c.app.globals
         globals_.custom_fields = [Object(name='_iteration_number',
-                                       label='Iteration Number',
-                                       type='string')]
+                                         label='Iteration Number',
+                                         type='string')]
         ThreadLocalORMSession.flush_all()
         fields = ticket_form.TicketCustomFields().fields
         iteration_field = fields[0]
         assert iteration_field.label == 'Iteration Number'
         assert iteration_field.name == '_iteration_number'
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_ticket_form.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_ticket_form.py b/ForgeTracker/forgetracker/tests/unit/test_ticket_form.py
index a2f0a9b..bc5bbf2 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_ticket_form.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_ticket_form.py
@@ -26,6 +26,7 @@ from forgetracker.model import Globals
 
 
 class TestTicketForm(TrackerTestWithModel):
+
     def test_it_creates_status_field(self):
         g = c.app.globals
         g.open_status_names = 'open'
@@ -39,4 +40,3 @@ class TestTicketForm(TrackerTestWithModel):
                            for field in fields
                            if field.name == field_name]
         return matching_fields[0].options
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tests/unit/test_ticket_model.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tests/unit/test_ticket_model.py b/ForgeTracker/forgetracker/tests/unit/test_ticket_model.py
index e7ccb2d..2dd2515 100644
--- a/ForgeTracker/forgetracker/tests/unit/test_ticket_model.py
+++ b/ForgeTracker/forgetracker/tests/unit/test_ticket_model.py
@@ -33,12 +33,14 @@ from allura.tests import decorators as td
 
 
 class TestTicketModel(TrackerTestWithModel):
+
     def test_that_label_counts_are_local_to_tool(self):
         """Test that label queries return only artifacts from the specified
         tool.
         """
         # create a ticket in two different tools, with the same label
         from allura.tests import decorators as td
+
         @td.with_tool('test', 'Tickets', 'bugs', username='test-user')
         def _test_ticket():
             return Ticket(ticket_num=1, summary="ticket1", labels=["mylabel"])
@@ -53,8 +55,10 @@ class TestTicketModel(TrackerTestWithModel):
         ThreadLocalORMSession.flush_all()
 
         # test label query results
-        label_count1 = t1.artifacts_labeled_with("mylabel", t1.app_config).count()
-        label_count2 = t2.artifacts_labeled_with("mylabel", t2.app_config).count()
+        label_count1 = t1.artifacts_labeled_with(
+            "mylabel", t1.app_config).count()
+        label_count2 = t2.artifacts_labeled_with(
+            "mylabel", t2.app_config).count()
         assert 1 == label_count1 == label_count2
 
     def test_that_it_has_ordered_custom_fields(self):
@@ -95,31 +99,33 @@ class TestTicketModel(TrackerTestWithModel):
         observer = bootstrap.create_user('Random Non-Project User')
         anon = User(_id=None, username='*anonymous',
                     display_name='Anonymous')
-        t = Ticket(summary='my ticket', ticket_num=3, reported_by_id=creator._id)
+        t = Ticket(summary='my ticket', ticket_num=3,
+                   reported_by_id=creator._id)
 
         assert creator == t.reported_by
         role_admin = ProjectRole.by_name('Admin')._id
         role_developer = ProjectRole.by_name('Developer')._id
         role_creator = ProjectRole.by_user(t.reported_by, upsert=True)._id
-        ProjectRole.by_user(developer, upsert=True).roles.append(role_developer)
+        ProjectRole.by_user(
+            developer, upsert=True).roles.append(role_developer)
         ThreadLocalORMSession.flush_all()
         cred = Credentials.get().clear()
 
         t.private = True
         assert_equal(t.acl, [
-                        ACE.allow(role_developer, 'save_searches'),
-                        ACE.allow(role_developer, 'read'),
-                        ACE.allow(role_developer, 'create'),
-                        ACE.allow(role_developer, 'update'),
-                        ACE.allow(role_developer, 'unmoderated_post'),
-                        ACE.allow(role_developer, 'post'),
-                        ACE.allow(role_developer, 'moderate'),
-                        ACE.allow(role_developer, 'delete'),
-                        ACE.allow(role_creator, 'read'),
-                        ACE.allow(role_creator, 'post'),
-                        ACE.allow(role_creator, 'create'),
-                        ACE.allow(role_creator, 'unmoderated_post'),
-                        DENY_ALL])
+            ACE.allow(role_developer, 'save_searches'),
+            ACE.allow(role_developer, 'read'),
+            ACE.allow(role_developer, 'create'),
+            ACE.allow(role_developer, 'update'),
+            ACE.allow(role_developer, 'unmoderated_post'),
+            ACE.allow(role_developer, 'post'),
+            ACE.allow(role_developer, 'moderate'),
+            ACE.allow(role_developer, 'delete'),
+            ACE.allow(role_creator, 'read'),
+            ACE.allow(role_creator, 'post'),
+            ACE.allow(role_creator, 'create'),
+            ACE.allow(role_creator, 'unmoderated_post'),
+            DENY_ALL])
         assert has_access(t, 'read', user=admin)()
         assert has_access(t, 'create', user=admin)()
         assert has_access(t, 'update', user=admin)()
@@ -155,11 +161,11 @@ class TestTicketModel(TrackerTestWithModel):
 
     def test_feed(self):
         t = Ticket(
-        app_config_id=c.app.config._id,
-        ticket_num=1,
-        summary='test ticket',
-        description='test description',
-        created_date=datetime(2012, 10, 29, 9, 57, 21, 465000))
+            app_config_id=c.app.config._id,
+            ticket_num=1,
+            summary='test ticket',
+            description='test description',
+            created_date=datetime(2012, 10, 29, 9, 57, 21, 465000))
         assert_equal(t.created_date, datetime(2012, 10, 29, 9, 57, 21, 465000))
         f = Feed.post(
             t,
@@ -168,7 +174,8 @@ class TestTicketModel(TrackerTestWithModel):
             pubdate=t.created_date)
         assert_equal(f.pubdate, datetime(2012, 10, 29, 9, 57, 21, 465000))
         assert_equal(f.title, 'test ticket')
-        assert_equal(f.description, '<div class="markdown_content"><p>test description</p></div>')
+        assert_equal(f.description,
+                     '<div class="markdown_content"><p>test description</p></div>')
 
     @td.with_tool('test', 'Tickets', 'bugs', username='test-user')
     @td.with_tool('test', 'Tickets', 'bugs2', username='test-user')
@@ -182,13 +189,18 @@ class TestTicketModel(TrackerTestWithModel):
             ticket.assigned_to_id = User.by_username('test-user')._id
             ticket.discussion_thread.add_post(text='test comment')
 
-        assert_equal(Ticket.query.find({'app_config_id': app1.config._id}).count(), 1)
-        assert_equal(Ticket.query.find({'app_config_id': app2.config._id}).count(), 0)
-        assert_equal(Post.query.find(dict(thread_id=ticket.discussion_thread._id)).count(), 1)
+        assert_equal(
+            Ticket.query.find({'app_config_id': app1.config._id}).count(), 1)
+        assert_equal(
+            Ticket.query.find({'app_config_id': app2.config._id}).count(), 0)
+        assert_equal(
+            Post.query.find(dict(thread_id=ticket.discussion_thread._id)).count(), 1)
 
         t = ticket.move(app2.config)
-        assert_equal(Ticket.query.find({'app_config_id': app1.config._id}).count(), 0)
-        assert_equal(Ticket.query.find({'app_config_id': app2.config._id}).count(), 1)
+        assert_equal(
+            Ticket.query.find({'app_config_id': app1.config._id}).count(), 0)
+        assert_equal(
+            Ticket.query.find({'app_config_id': app2.config._id}).count(), 1)
         assert_equal(t.summary, 'test ticket')
         assert_equal(t.description, 'test description')
         assert_equal(t.assigned_to.username, 'test-user')
@@ -228,7 +240,8 @@ class TestTicketModel(TrackerTestWithModel):
         assert_equal(t.summary, 'test ticket')
         assert_equal(t.description, 'test description')
         assert_equal(t.custom_fields['_test'], 'test val')
-        post = Post.query.find(dict(thread_id=ticket.discussion_thread._id)).first()
+        post = Post.query.find(
+            dict(thread_id=ticket.discussion_thread._id)).first()
         assert post is not None, 'No comment about ticket moving'
         message = 'Ticket moved from /p/test/bugs/1/'
         message += '\n\nCan\'t be converted:\n'
@@ -255,14 +268,17 @@ class TestTicketModel(TrackerTestWithModel):
             ticket.summary = 'test ticket'
             ticket.description = 'test description'
             ticket.custom_fields['_user_field'] = 'test-user'  # in project
-            ticket.custom_fields['_user_field_2'] = 'test-user-0'  # not in project
-            ticket.assigned_to_id = User.by_username('test-user-0')._id  # not in project
+            # not in project
+            ticket.custom_fields['_user_field_2'] = 'test-user-0'
+            # not in project
+            ticket.assigned_to_id = User.by_username('test-user-0')._id
 
         t = ticket.move(app2.config)
         assert_equal(t.assigned_to_id, None)
         assert_equal(t.custom_fields['_user_field'], 'test-user')
         assert_equal(t.custom_fields['_user_field_2'], '')
-        post = Post.query.find(dict(thread_id=ticket.discussion_thread._id)).first()
+        post = Post.query.find(
+            dict(thread_id=ticket.discussion_thread._id)).first()
         assert post is not None, 'No comment about ticket moving'
         message = 'Ticket moved from /p/test/bugs/1/'
         message += '\n\nCan\'t be converted:\n'
@@ -278,8 +294,9 @@ class TestTicketModel(TrackerTestWithModel):
             ticket.description = 'test description'
         assert_equal(len(ticket.attachments), 0)
         f = urllib2.urlopen('file://%s' % __file__)
-        TicketAttachment.save_attachment('test_ticket_model.py', ResettableStream(f),
-                                            artifact_id=ticket._id)
+        TicketAttachment.save_attachment(
+            'test_ticket_model.py', ResettableStream(f),
+            artifact_id=ticket._id)
         ThreadLocalORMSession.flush_all()
         # need to refetch since attachments are cached
         session(ticket).expunge(ticket)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/tracker_main.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/tracker_main.py b/ForgeTracker/forgetracker/tracker_main.py
index 34267cf..1ad73de 100644
--- a/ForgeTracker/forgetracker/tracker_main.py
+++ b/ForgeTracker/forgetracker/tracker_main.py
@@ -87,6 +87,7 @@ search_validators = dict(
     sort=validators.UnicodeString(if_empty=None),
     deleted=validators.StringBool(if_empty=False))
 
+
 def _mongo_col_to_solr_col(name):
     if name == 'ticket_num':
         return 'ticket_num_i'
@@ -164,7 +165,7 @@ def _my_trackers(user, current_tracker_app_config):
 
 
 class W:
-    thread=w.Thread(
+    thread = w.Thread(
         page=None, limit=None, page_size=None, count=None,
         style='linear')
     date_field = ffw.DateField()
@@ -188,10 +189,11 @@ class W:
     move_ticket_form = w.forms.MoveTicketForm
     mass_move_form = MassMoveForm
 
+
 class ForgeTrackerApp(Application):
     __version__ = version.__version__
     permissions = ['configure', 'read', 'update', 'create', 'save_searches',
-                    'unmoderated_post', 'post', 'moderate', 'admin', 'delete']
+                   'unmoderated_post', 'post', 'moderate', 'admin', 'delete']
     permissions_desc = {
         'configure': 'Edit milestones.',
         'read': 'View tickets.',
@@ -199,31 +201,31 @@ class ForgeTrackerApp(Application):
         'create': 'Create tickets.',
         'save_searches': 'Not used.',
         'admin': 'Set permissions. Configure options, saved searches, custom fields, '
-            'and default list view columns. Move tickets to or from this '
-            'tracker. Import tickets.',
+        'and default list view columns. Move tickets to or from this '
+        'tracker. Import tickets.',
         'delete': 'Delete and undelete tickets. View deleted tickets.',
     }
     config_options = Application.config_options + [
         ConfigOption('EnableVoting', bool, False),
         ConfigOption('TicketMonitoringEmail', str, ''),
         ConfigOption('TicketMonitoringType',
-            schema.OneOf('NewTicketsOnly', 'AllTicketChanges',
-                'NewPublicTicketsOnly', 'AllPublicTicketChanges'), None)
-        ]
+                     schema.OneOf('NewTicketsOnly', 'AllTicketChanges',
+                                  'NewPublicTicketsOnly', 'AllPublicTicketChanges'), None)
+    ]
     exportable = True
-    searchable=True
-    tool_label='Tickets'
-    tool_description="""
+    searchable = True
+    tool_label = 'Tickets'
+    tool_description = """
         Bugs, enhancements, tasks, etc., will help you plan and
         manage your development.
     """
-    default_mount_label='Tickets'
-    default_mount_point='tickets'
-    ordinal=6
-    icons={
-        24:'images/tickets_24.png',
-        32:'images/tickets_32.png',
-        48:'images/tickets_48.png'
+    default_mount_label = 'Tickets'
+    default_mount_point = 'tickets'
+    ordinal = 6
+    icons = {
+        24: 'images/tickets_24.png',
+        32: 'images/tickets_32.png',
+        48: 'images/tickets_48.png'
     }
 
     def __init__(self, project, config):
@@ -255,9 +257,9 @@ class ForgeTrackerApp(Application):
         '''Apps should provide their entries to be added to the main nav
         :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>`
         '''
-        return [ SitemapEntry(
-                self.config.options.mount_label,
-                '.')]
+        return [SitemapEntry(
+            self.config.options.mount_label,
+            '.')]
 
     @property
     @h.exceptionless([], log)
@@ -265,10 +267,11 @@ class ForgeTrackerApp(Application):
         menu_id = self.config.options.mount_label
         with h.push_config(c, app=self):
             return [
-                SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
+                SitemapEntry(menu_id, '.')[self.sidebar_menu()]]
 
     def admin_menu(self):
-        admin_url = c.project.url() + 'admin/' + self.config.options.mount_point + '/'
+        admin_url = c.project.url() + 'admin/' + \
+            self.config.options.mount_point + '/'
         links = [SitemapEntry('Field Management', admin_url + 'fields'),
                  SitemapEntry('Edit Searches', admin_url + 'bins/')]
         links += super(ForgeTrackerApp, self).admin_menu()
@@ -286,30 +289,38 @@ class ForgeTrackerApp(Application):
             label = bin.shorthand_id()
             cls = '' if bin.terms and '$USER' in bin.terms else 'search_bin'
             search_bins.append(SitemapEntry(
-                    h.text.truncate(label, 72), bin.url(), className=cls))
+                h.text.truncate(label, 72), bin.url(), className=cls))
         for fld in c.app.globals.milestone_fields:
             milestones.append(SitemapEntry(h.text.truncate(fld.label, 72)))
             for m in getattr(fld, "milestones", []):
-                if m.complete: continue
+                if m.complete:
+                    continue
                 milestones.append(
                     SitemapEntry(
                         h.text.truncate(m.name, 72),
-                        self.url + fld.name[1:] + '/' + h.urlquote(m.name) + '/',
+                        self.url + fld.name[1:] + '/' +
+                        h.urlquote(m.name) + '/',
                         className='milestones'))
 
         links = []
         if has_access(self, 'create')():
             links.append(SitemapEntry('Create Ticket',
-                self.config.url() + 'new/', ui_icon=g.icons['plus']))
+                                      self.config.url() + 'new/', ui_icon=g.icons['plus']))
         if has_access(self, 'configure')():
-            links.append(SitemapEntry('Edit Milestones', self.config.url() + 'milestones', ui_icon=g.icons['table']))
-            links.append(SitemapEntry('Edit Searches', c.project.url() + 'admin/' + c.app.config.options.mount_point + '/bins/', ui_icon=g.icons['search']))
-        links.append(SitemapEntry('View Stats', self.config.url() + 'stats', ui_icon=g.icons['stats']))
+            links.append(SitemapEntry('Edit Milestones', self.config.url()
+                         + 'milestones', ui_icon=g.icons['table']))
+            links.append(SitemapEntry('Edit Searches', c.project.url() + 'admin/' +
+                         c.app.config.options.mount_point + '/bins/', ui_icon=g.icons['search']))
+        links.append(SitemapEntry('View Stats', self.config.url()
+                     + 'stats', ui_icon=g.icons['stats']))
         discussion = c.app.config.discussion
-        pending_mod_count = M.Post.query.find({'discussion_id':discussion._id, 'status':'pending'}).count()
+        pending_mod_count = M.Post.query.find(
+            {'discussion_id': discussion._id, 'status': 'pending'}).count()
         if pending_mod_count and has_access(discussion, 'moderate')():
-            links.append(SitemapEntry('Moderate', discussion.url() + 'moderate', ui_icon=g.icons['pencil'],
-                small = pending_mod_count))
+            links.append(
+                SitemapEntry(
+                    'Moderate', discussion.url() + 'moderate', ui_icon=g.icons['pencil'],
+                    small=pending_mod_count))
 
         links += milestones
 
@@ -317,7 +328,8 @@ class ForgeTrackerApp(Application):
             links.append(SitemapEntry('Searches'))
             links = links + search_bins
         links.append(SitemapEntry('Help'))
-        links.append(SitemapEntry('Formatting Help', self.config.url() + 'markdown_syntax'))
+        links.append(
+            SitemapEntry('Formatting Help', self.config.url() + 'markdown_syntax'))
         return links
 
     def sidebar_menu_js(self):
@@ -381,31 +393,34 @@ class ForgeTrackerApp(Application):
             M.ACE.allow(role_developer, 'delete'),
             M.ACE.allow(role_admin, 'configure'),
             M.ACE.allow(role_admin, 'admin'),
-            ]
+        ]
         self.globals = TM.Globals(app_config_id=c.app.config._id,
-            last_ticket_num=0,
-            open_status_names=self.config.options.pop('open_status_names', 'open unread accepted pending'),
-            closed_status_names=self.config.options.pop('closed_status_names', 'closed wont-fix'),
-            custom_fields=[dict(
-                    name='_milestone',
-                    label='Milestone',
-                    type='milestone',
-                    milestones=[
-                        dict(name='1.0', complete=False, due_date=None, default=True),
-                        dict(name='2.0', complete=False, due_date=None, default=False)]) ])
+                                  last_ticket_num=0,
+                                  open_status_names=self.config.options.pop(
+                                      'open_status_names', 'open unread accepted pending'),
+                                  closed_status_names=self.config.options.pop(
+                                      'closed_status_names', 'closed wont-fix'),
+                                  custom_fields=[dict(
+                                      name='_milestone',
+                                      label='Milestone',
+                                      type='milestone',
+                                      milestones=[
+                                          dict(name='1.0', complete=False,
+                                               due_date=None, default=True),
+                                          dict(name='2.0', complete=False, due_date=None, default=False)])])
         self.globals.update_bin_counts()
         # create default search bins
         TM.Bin(summary='Open Tickets', terms=self.globals.not_closed_query,
-                app_config_id = self.config._id, custom_fields = dict())
+               app_config_id=self.config._id, custom_fields=dict())
         TM.Bin(summary='Closed Tickets', terms=self.globals.closed_query,
-                app_config_id=self.config._id, custom_fields=dict())
+               app_config_id=self.config._id, custom_fields=dict())
         TM.Bin(summary='Changes', terms=self.globals.not_closed_query,
-                sort='mod_date_dt desc', app_config_id = self.config._id,
-                custom_fields = dict())
+               sort='mod_date_dt desc', app_config_id=self.config._id,
+               custom_fields=dict())
 
     def uninstall(self, project):
         "Remove all the tool's artifacts from the database"
-        app_config_id = {'app_config_id':c.app.config._id}
+        app_config_id = {'app_config_id': c.app.config._id}
         TM.TicketAttachment.query.remove(app_config_id)
         TM.Ticket.query.remove(app_config_id)
         TM.Bin.query.remove(app_config_id)
@@ -416,7 +431,8 @@ class ForgeTrackerApp(Application):
         f.write('{"tickets": [')
         tickets = TM.Ticket.query.find(dict(
             app_config_id=self.config._id,
-            deleted={'$ne': True},  # backwards compat for old tickets that don't have it set
+            # backwards compat for old tickets that don't have it set
+            deleted={'$ne': True},
         ))
         for i, ticket in enumerate(tickets):
             if i > 0:
@@ -428,11 +444,14 @@ class ForgeTrackerApp(Application):
         milestones = self.milestones
         json.dump(milestones, f, cls=jsonify.GenericJSON, indent=2)
         f.write(',\n"custom_fields":')
-        json.dump(self.globals.custom_fields, f, cls=jsonify.GenericJSON, indent=2)
+        json.dump(self.globals.custom_fields, f,
+                  cls=jsonify.GenericJSON, indent=2)
         f.write(',\n"open_status_names":')
-        json.dump(self.globals.open_status_names, f, cls=jsonify.GenericJSON, indent=2)
+        json.dump(self.globals.open_status_names, f,
+                  cls=jsonify.GenericJSON, indent=2)
         f.write(',\n"closed_status_names":')
-        json.dump(self.globals.closed_status_names, f, cls=jsonify.GenericJSON, indent=2)
+        json.dump(self.globals.closed_status_names, f,
+                  cls=jsonify.GenericJSON, indent=2)
         f.write(',\n"saved_bins":')
         bins = self.bins
         json.dump(bins, f, cls=jsonify.GenericJSON, indent=2)
@@ -448,7 +467,8 @@ class ForgeTrackerApp(Application):
         for fld in self.globals.milestone_fields:
             if fld.name == '_milestone':
                 for m in fld.milestones:
-                    d =  self.globals.milestone_count('%s:%s' % (fld.name, m.name))
+                    d = self.globals.milestone_count(
+                        '%s:%s' % (fld.name, m.name))
                     milestones.append(dict(
                         name=m.name,
                         due_date=m.get('due_date'),
@@ -496,17 +516,19 @@ def mongo_columns():
                     label='Updated',
                     active=c.app.globals.show_in_search['mod_date']),
                dict(name='labels',
-                   sort_name='labels',
-                   label='Labels',
-                   active=c.app.globals.show_in_search['labels']),
+                    sort_name='labels',
+                    label='Labels',
+                    active=c.app.globals.show_in_search['labels']),
                ]
     for field in c.app.globals.sortable_custom_fields_shown_in_search():
         columns.append(
             dict(name=field['name'], sort_name=field['name'], label=field['label'], active=True))
     if c.app.config.options.get('EnableVoting'):
-        columns.append(dict(name='votes', sort_name='votes', label='Votes', active=True))
+        columns.append(
+            dict(name='votes', sort_name='votes', label='Votes', active=True))
     return columns
 
+
 def solr_columns():
     columns = [dict(name='ticket_num',
                     sort_name='ticket_num_i',
@@ -541,16 +563,19 @@ def solr_columns():
                     label='Updated',
                     active=c.app.globals.show_in_search['mod_date']),
                dict(name='labels',
-                   sort_name='labels_t',
-                   label='Labels',
-                   active=c.app.globals.show_in_search['labels']),
+                    sort_name='labels_t',
+                    label='Labels',
+                    active=c.app.globals.show_in_search['labels']),
                ]
     for field in c.app.globals.sortable_custom_fields_shown_in_search():
-        columns.append(dict(name=field['name'], sort_name=field['sortable_name'], label=field['label'], active=True))
+        columns.append(
+            dict(name=field['name'], sort_name=field['sortable_name'], label=field['label'], active=True))
     if c.app.config.options.get('EnableVoting'):
-        columns.append(dict(name='votes', sort_name='votes_total_i', label='Votes', active=True))
+        columns.append(
+            dict(name='votes', sort_name='votes_total_i', label='Votes', active=True))
     return columns
 
+
 class RootController(BaseController, FeedController):
 
     def __init__(self):
@@ -581,8 +606,10 @@ class RootController(BaseController, FeedController):
         milestone_counts = []
         for fld in c.app.globals.milestone_fields:
             for m in getattr(fld, "milestones", []):
-                if m.complete: continue
-                count = c.app.globals.milestone_count('%s:%s' % (fld.name, m.name))['hits']
+                if m.complete:
+                    continue
+                count = c.app.globals.milestone_count(
+                    '%s:%s' % (fld.name, m.name))['hits']
                 name = h.text.truncate(m.name, 72)
                 milestone_counts.append({'name': name, 'count': count})
         return {'milestone_counts': milestone_counts}
@@ -606,7 +633,7 @@ class RootController(BaseController, FeedController):
             {'$project': {'labels': 1}},
             {'$unwind': '$labels'},
             {'$match': {'labels': {'$regex': '^%s' % term, '$options': 'i'}}},
-            {'$group': { '_id': '$labels', 'count': {'$sum': 1}}},
+            {'$group': {'_id': '$labels', 'count': {'$sum': 1}}},
             {'$sort': SON([('count', -1), ('_id', 1)])}
         ])
         return json.dumps([tag['_id'] for tag in tags.get('result', [])])
@@ -618,19 +645,22 @@ class RootController(BaseController, FeedController):
     def index(self, limit=25, columns=None, page=0, sort='ticket_num desc', deleted=False, **kw):
         show_deleted = [False]
         if deleted and has_access(c.app, 'delete'):
-            show_deleted = [False,True]
+            show_deleted = [False, True]
 
-        kw.pop('q', None) # it's just our original query mangled and sent back to us
+        # it's just our original query mangled and sent back to us
+        kw.pop('q', None)
         result = TM.Ticket.paged_query(c.app.config, c.user,
-                                        c.app.globals.not_closed_mongo_query,
-                                        sort=sort, limit=int(limit),
-                                        page=page, deleted={'$in':show_deleted}, **kw)
+                                       c.app.globals.not_closed_mongo_query,
+                                       sort=sort, limit=int(limit),
+                                       page=page, deleted={'$in': show_deleted}, **kw)
         result['columns'] = columns or mongo_columns()
-        result['sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
+        result[
+            'sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
         result['subscribed'] = M.Mailbox.subscribed()
         result['allow_edit'] = has_access(c.app, 'update')()
         result['allow_move'] = has_access(c.app, 'admin')()
-        result['help_msg'] = c.app.config.options.get('TicketHelpSearch','').strip()
+        result['help_msg'] = c.app.config.options.get(
+            'TicketHelpSearch', '').strip()
         result['url_q'] = c.app.globals.not_closed_query
         result['url_sort'] = ''
         result['deleted'] = deleted
@@ -662,21 +692,22 @@ class RootController(BaseController, FeedController):
         # the Edit Milestones page capable of editing any/all milestone fields
         # instead of just the default "_milestone" field.
         if field_name == '_milestone' and \
-            field_name not in [m.name for m in c.app.globals.milestone_fields]:
+                field_name not in [m.name for m in c.app.globals.milestone_fields]:
             c.app.globals.custom_fields.append(dict(name='_milestone',
-                label='Milestone', type='milestone', milestones=[]))
+                                                    label='Milestone', type='milestone', milestones=[]))
         for fld in c.app.globals.milestone_fields:
             if fld.name == field_name:
                 for new in milestones:
                     exists_milestones = [m.name for m in fld.milestones]
                     new['new_name'] = new['new_name'].replace("/", "-")
                     if (new['new_name'] in exists_milestones) and (new['new_name'] != new['old_name']):
-                        flash('The milestone "%s" already exists.' % new['new_name'], 'error')
+                        flash('The milestone "%s" already exists.' %
+                              new['new_name'], 'error')
                         redirect('milestones')
                     for m in fld.milestones:
                         if m.name == new['old_name']:
                             if new['new_name'] == '':
-                                flash('You must name the milestone.','error')
+                                flash('You must name the milestone.', 'error')
                             else:
                                 m.name = new['new_name']
                                 m.description = new['description']
@@ -689,21 +720,23 @@ class RootController(BaseController, FeedController):
                                     # rows by default, so give it a high upper
                                     # bound to make sure we get all tickets
                                     # for this milestone
-                                    r = search_artifact(TM.Ticket, q, rows=10000, short_timeout=False)
-                                    ticket_numbers = [match['ticket_num_i'] for match in r.docs]
+                                    r = search_artifact(
+                                        TM.Ticket, q, rows=10000, short_timeout=False)
+                                    ticket_numbers = [match['ticket_num_i']
+                                                      for match in r.docs]
                                     tickets = TM.Ticket.query.find(dict(
                                         app_config_id=c.app.config._id,
-                                        ticket_num={'$in':ticket_numbers})).all()
+                                        ticket_num={'$in': ticket_numbers})).all()
                                     for t in tickets:
                                         t.custom_fields[field_name] = m.name
                                     update_counts = True
                     if new['old_name'] == '' and new['new_name'] != '':
                         fld.milestones.append(dict(
                             name=new['new_name'],
-                            description = new['description'],
-                            due_date = new['due_date'],
-                            complete = new['complete'] == 'Closed',
-                            default = new.get('default', False),
+                            description=new['description'],
+                            due_date=new['due_date'],
+                            complete=new['complete'] == 'Closed',
+                            default=new.get('default', False),
                         ))
                         update_counts = True
         if update_counts:
@@ -722,16 +755,21 @@ class RootController(BaseController, FeedController):
         c.bin_form = W.bin_form
         bin = None
         if q:
-            bin = TM.Bin.query.find(dict(app_config_id=c.app.config._id,terms=q)).first()
+            bin = TM.Bin.query.find(
+                dict(app_config_id=c.app.config._id, terms=q)).first()
         if project:
-            redirect(c.project.url() + 'search?' + urlencode(dict(q=q, history=kw.get('history'))))
-        result = TM.Ticket.paged_search(c.app.config, c.user, q, page=page, sort=sort, show_deleted=deleted, **kw)
+            redirect(c.project.url() + 'search?' +
+                     urlencode(dict(q=q, history=kw.get('history'))))
+        result = TM.Ticket.paged_search(
+            c.app.config, c.user, q, page=page, sort=sort, show_deleted=deleted, **kw)
         result['columns'] = columns or solr_columns()
-        result['sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
+        result[
+            'sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
         result['allow_edit'] = has_access(c.app, 'update')()
         result['allow_move'] = has_access(c.app, 'admin')()
         result['bin'] = bin
-        result['help_msg'] = c.app.config.options.get('TicketHelpSearch', '').strip()
+        result['help_msg'] = c.app.config.options.get(
+            'TicketHelpSearch', '').strip()
         result['deleted'] = deleted
         c.ticket_search_results = W.ticket_search_results
         return result
@@ -743,10 +781,12 @@ class RootController(BaseController, FeedController):
     def search_feed(self, q=None, query=None, project=None, page=0, sort=None, deleted=False, **kw):
         if query and not q:
             q = query
-        result = TM.Ticket.paged_search(c.app.config, c.user, q, page=page, sort=sort, show_deleted=deleted, **kw)
+        result = TM.Ticket.paged_search(
+            c.app.config, c.user, q, page=page, sort=sort, show_deleted=deleted, **kw)
         response.headers['Content-Type'] = ''
         response.content_type = 'application/xml'
-        d = dict(title='Ticket search results', link=h.absurl(c.app.url), description='You searched for %s' % q, language=u'en')
+        d = dict(title='Ticket search results', link=h.absurl(c.app.url),
+                 description='You searched for %s' % q, language=u'en')
         if request.environ['PATH_INFO'].endswith('.atom'):
             feed = FG.Atom1Feed(**d)
         else:
@@ -771,21 +811,19 @@ class RootController(BaseController, FeedController):
         else:
             raise exc.HTTPNotFound
 
-
     @with_trailing_slash
     @expose('jinja:forgetracker:templates/tracker/search_help.html')
     def search_help(self):
         'Static page with search help'
         return dict()
 
-
     @with_trailing_slash
     @expose('jinja:forgetracker:templates/tracker/new_ticket.html')
     def new(self, description=None, summary=None, labels=None, **kw):
         require_access(c.app, 'create')
         c.ticket_form = W.ticket_form
-        help_msg = c.app.config.options.get('TicketHelpNew','').strip()
-        return dict(action=c.app.config.url()+'save_ticket',
+        help_msg = c.app.config.options.get('TicketHelpNew', '').strip()
+        return dict(action=c.app.config.url() + 'save_ticket',
                     help_msg=help_msg,
                     description=description, summary=summary, labels=labels)
 
@@ -807,7 +845,9 @@ class RootController(BaseController, FeedController):
         # if c.app.globals.milestone_names is None:
         #     c.app.globals.milestone_names = ''
         ticket_num = ticket_form.pop('ticket_num', None)
-        ticket_form.pop('comment', None) # W.ticket_form gives us this, but we don't set any comment during ticket creation
+        # W.ticket_form gives us this, but we don't set any comment during
+        # ticket creation
+        ticket_form.pop('comment', None)
         if ticket_num:
             ticket = TM.Ticket.query.get(
                 app_config_id=c.app.config._id,
@@ -821,8 +861,8 @@ class RootController(BaseController, FeedController):
         ticket.update(ticket_form)
         c.app.globals.invalidate_bin_counts()
         g.director.create_activity(c.user, 'created', ticket,
-                related_nodes=[c.project])
-        redirect(str(ticket.ticket_num)+'/')
+                                   related_nodes=[c.project])
+        redirect(str(ticket.ticket_num) + '/')
 
     @with_trailing_slash
     @expose('jinja:forgetracker:templates/tracker/mass_edit.html')
@@ -832,11 +872,13 @@ class RootController(BaseController, FeedController):
                    sort=validators.UnicodeString(if_empty='ticket_num_i asc')))
     def edit(self, q=None, limit=None, page=None, sort=None, **kw):
         require_access(c.app, 'update')
-        result = TM.Ticket.paged_search(c.app.config, c.user, q, sort=sort, limit=limit, page=page, show_deleted=False, **kw)
+        result = TM.Ticket.paged_search(
+            c.app.config, c.user, q, sort=sort, limit=limit, page=page, show_deleted=False, **kw)
         # if c.app.globals.milestone_names is None:
         #     c.app.globals.milestone_names = ''
         result['columns'] = solr_columns()
-        result['sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
+        result[
+            'sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
         result['globals'] = c.app.globals
         result['cancel_href'] = url(
             c.app.url + 'search/',
@@ -855,11 +897,14 @@ class RootController(BaseController, FeedController):
                    sort=validators.UnicodeString(if_empty='ticket_num_i asc')))
     def move(self, q=None, limit=None, page=None, sort=None, **kw):
         require_access(c.app, 'admin')
-        result = TM.Ticket.paged_search(c.app.config, c.user, q, sort=sort, limit=limit, page=page, show_deleted=False, **kw)
+        result = TM.Ticket.paged_search(
+            c.app.config, c.user, q, sort=sort, limit=limit, page=page, show_deleted=False, **kw)
         result['columns'] = solr_columns()
-        result['sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
+        result[
+            'sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
         result['globals'] = c.app.globals
-        result['cancel_href'] = url(c.app.url + 'search/', dict(q=q, limit=limit, sort=sort))
+        result['cancel_href'] = url(
+            c.app.url + 'search/', dict(q=q, limit=limit, sort=sort))
         c.mass_move = W.mass_edit
         trackers = _my_trackers(c.user, c.app.config)
         c.mass_move_form = W.mass_move_form(
@@ -885,7 +930,8 @@ class RootController(BaseController, FeedController):
             flash('Ticket already in a selected tracker', 'info')
             redirect('move/' + search)
         if not has_access(tracker, 'admin')():
-            flash('You should have admin access to destination tracker', 'error')
+            flash('You should have admin access to destination tracker',
+                  'error')
             redirect('move/' + search)
         tickets = TM.Ticket.query.find(dict(
             _id={'$in': [ObjectId(id) for id in ticket_ids]},
@@ -896,29 +942,34 @@ class RootController(BaseController, FeedController):
         c.app.globals.invalidate_bin_counts()
         ThreadLocalORMSession.flush_all()
         count = len(tickets)
-        flash('Move scheduled ({} ticket{})'.format(count, 's' if count != 1 else ''), 'ok')
+        flash('Move scheduled ({} ticket{})'.format(
+            count, 's' if count != 1 else ''), 'ok')
         redirect('move/' + search)
 
     @expose()
     @require_post()
     def update_tickets(self, **post_data):
         tickets = TM.Ticket.query.find(dict(
-                _id={'$in':[ObjectId(id) for id in aslist(post_data['__ticket_ids'])]},
-                app_config_id=c.app.config._id)).all()
+            _id={'$in': [ObjectId(id)
+                         for id in aslist(
+                             post_data['__ticket_ids'])]},
+            app_config_id=c.app.config._id)).all()
         for ticket in tickets:
             require_access(ticket, 'update')
         tasks.bulk_edit.post(**post_data)
         count = len(tickets)
-        flash('Update scheduled ({} ticket{})'.format(count, 's' if count != 1 else ''), 'ok')
+        flash('Update scheduled ({} ticket{})'.format(
+            count, 's' if count != 1 else ''), 'ok')
         redirect('edit/' + post_data['__search'])
 
     def tickets_since(self, when=None):
         count = 0
         if when:
             count = TM.Ticket.query.find(dict(app_config_id=c.app.config._id,
-                created_date={'$gte':when})).count()
+                                              created_date={'$gte': when})).count()
         else:
-            count = TM.Ticket.query.find(dict(app_config_id=c.app.config._id)).count()
+            count = TM.Ticket.query.find(
+                dict(app_config_id=c.app.config._id)).count()
         return count
 
     def ticket_comments_since(self, when=None):
@@ -927,16 +978,19 @@ class RootController(BaseController, FeedController):
             status='ok',
         )
         if when is not None:
-            q['timestamp'] = {'$gte':when}
+            q['timestamp'] = {'$gte': when}
         return M.Post.query.find(q).count()
 
     @with_trailing_slash
     @expose('jinja:forgetracker:templates/tracker/stats.html')
     def stats(self, dates=None, **kw):
         globals = c.app.globals
-        total = TM.Ticket.query.find(dict(app_config_id=c.app.config._id, deleted = False)).count()
-        open = TM.Ticket.query.find(dict(app_config_id=c.app.config._id, deleted = False, status={'$in': list(globals.set_of_open_status_names)})).count()
-        closed = TM.Ticket.query.find(dict(app_config_id=c.app.config._id, deleted = False, status={'$in': list(globals.set_of_closed_status_names)})).count()
+        total = TM.Ticket.query.find(
+            dict(app_config_id=c.app.config._id, deleted=False)).count()
+        open = TM.Ticket.query.find(dict(app_config_id=c.app.config._id, deleted=False, status={
+                                    '$in': list(globals.set_of_open_status_names)})).count()
+        closed = TM.Ticket.query.find(dict(app_config_id=c.app.config._id, deleted=False, status={
+                                      '$in': list(globals.set_of_closed_status_names)})).count()
         now = datetime.utcnow()
         week = timedelta(weeks=1)
         fortnight = timedelta(weeks=2)
@@ -947,34 +1001,34 @@ class RootController(BaseController, FeedController):
         week_tickets = self.tickets_since(week_ago)
         fortnight_tickets = self.tickets_since(fortnight_ago)
         month_tickets = self.tickets_since(month_ago)
-        comments=self.ticket_comments_since()
-        week_comments=self.ticket_comments_since(week_ago)
-        fortnight_comments=self.ticket_comments_since(fortnight_ago)
-        month_comments=self.ticket_comments_since(month_ago)
+        comments = self.ticket_comments_since()
+        week_comments = self.ticket_comments_since(week_ago)
+        fortnight_comments = self.ticket_comments_since(fortnight_ago)
+        month_comments = self.ticket_comments_since(month_ago)
         c.user_select = ffw.ProjectUserCombo()
         if dates is None:
             today = datetime.utcnow()
-            dates = "%s to %s" % ((today - timedelta(days=61)).strftime('%Y-%m-%d'), today.strftime('%Y-%m-%d'))
+            dates = "%s to %s" % ((today - timedelta(days=61))
+                                  .strftime('%Y-%m-%d'), today.strftime('%Y-%m-%d'))
         return dict(
-                now=str(now),
-                week_ago=str(week_ago),
-                fortnight_ago=str(fortnight_ago),
-                month_ago=str(month_ago),
-                week_tickets=week_tickets,
-                fortnight_tickets=fortnight_tickets,
-                month_tickets=month_tickets,
-                comments=comments,
-                week_comments=week_comments,
-                fortnight_comments=fortnight_comments,
-                month_comments=month_comments,
-                total=total,
-                open=open,
-                closed=closed,
-                globals=globals,
-                dates=dates,
+            now=str(now),
+            week_ago=str(week_ago),
+            fortnight_ago=str(fortnight_ago),
+            month_ago=str(month_ago),
+            week_tickets=week_tickets,
+            fortnight_tickets=fortnight_tickets,
+            month_tickets=month_tickets,
+            comments=comments,
+            week_comments=week_comments,
+            fortnight_comments=fortnight_comments,
+            month_comments=month_comments,
+            total=total,
+            open=open,
+            closed=closed,
+            globals=globals,
+            dates=dates,
         )
 
-
     @expose()
     @validate(W.subscribe_form)
     def subscribe(self, subscribe=None, unsubscribe=None):
@@ -984,6 +1038,7 @@ class RootController(BaseController, FeedController):
             M.Mailbox.unsubscribe()
         redirect(request.referer)
 
+
 class BinController(BaseController):
 
     def __init__(self, summary=None, app=None):
@@ -1034,13 +1089,14 @@ class BinController(BaseController):
         if bin is None:
             bin = TM.Bin(app_config_id=self.app.config._id, summary='')
             new_bin = bin
-        require(lambda:bin.app_config_id==self.app.config._id)
-        bin.summary=bin_form['summary']
-        bin.terms=bin_form['terms']
+        require(lambda: bin.app_config_id == self.app.config._id)
+        bin.summary = bin_form['summary']
+        bin.terms = bin_form['terms']
         try:
             # Test the search by running it
             with h.push_config(c, app=self.app):
-                search_artifact(TM.Ticket, bin.terms, rows=0, short_timeout=True)
+                search_artifact(TM.Ticket, bin.terms,
+                                rows=0, short_timeout=True)
         except SearchError as e:
             # Search threw an error.
             # Save the error on the bin object for displaying
@@ -1051,7 +1107,7 @@ class BinController(BaseController):
             M.session.artifact_orm_session.expunge(bin)
             # Render edit page with error messages
             return dict(bins=self.app.bins, count=len(self.app.bins),
-                    app=self.app, new_bin=new_bin, errors=True)
+                        app=self.app, new_bin=new_bin, errors=True)
         self.app.globals.invalidate_bin_counts()
         redirect('.')
 
@@ -1060,7 +1116,7 @@ class BinController(BaseController):
     @require_post()
     @validate(validators=dict(bin=V.Ming(TM.Bin)))
     def delbin(self, bin=None):
-        require(lambda:bin.app_config_id==self.app.config._id)
+        require(lambda: bin.app_config_id == self.app.config._id)
         bin.delete()
         redirect(request.referer)
 
@@ -1112,7 +1168,8 @@ class BinController(BaseController):
                         bin.terms = bin_form['terms']
                         try:
                             with h.push_config(c, app=self.app):
-                                search_artifact(TM.Ticket, bin.terms, rows=0, short_timeout=True)
+                                search_artifact(
+                                    TM.Ticket, bin.terms, rows=0, short_timeout=True)
                         except SearchError as e:
                             # Search threw an error.
                             # Save the error on the bin object for displaying
@@ -1134,12 +1191,14 @@ class BinController(BaseController):
             # There were errors in some of the search terms. Render the edit
             # page so the user can fix the errors.
             return dict(bins=saved_bins, count=len(bins), app=self.app,
-                    new_bin=new_bin, errors=errors)
+                        new_bin=new_bin, errors=errors)
         self.app.globals.invalidate_bin_counts()
         # No errors, redirect to search bin list page.
         redirect('.')
 
+
 class changelog(object):
+
     """
     A dict-like object which keeps log about what keys have been changed.
 
@@ -1168,7 +1227,7 @@ class changelog(object):
     """
 
     def __init__(self):
-        self.keys = [] # to track insertion order
+        self.keys = []  # to track insertion order
         self.originals = {}
         self.data = {}
 
@@ -1189,18 +1248,20 @@ class changelog(object):
                     t.append((key, (orig_value, curr_value)))
         return t
 
+
 class TicketController(BaseController, FeedController):
 
     def __init__(self, ticket_num=None):
         if ticket_num is not None:
             self.ticket_num = int(ticket_num)
             self.ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                                    ticket_num=self.ticket_num)
+                                              ticket_num=self.ticket_num)
             if self.ticket is None:
                 self.ticket = TM.Ticket.query.get(
-                        app_config_id = c.app.config._id,
-                        import_id = ImportIdConverter.get().expand(ticket_num, c.app),
-                    )
+                    app_config_id=c.app.config._id,
+                    import_id=ImportIdConverter.get().expand(
+                        ticket_num, c.app),
+                )
                 if self.ticket is not None:
                     utils.permanent_redirect(self.ticket.url())
                 else:
@@ -1221,8 +1282,8 @@ class TicketController(BaseController, FeedController):
     @with_trailing_slash
     @expose('jinja:forgetracker:templates/tracker/ticket.html')
     @validate(dict(
-            page=validators.Int(if_empty=0, if_invalid=0),
-            limit=validators.Int(if_empty=10, if_invalid=10)))
+        page=validators.Int(if_empty=0, if_invalid=0),
+        limit=validators.Int(if_empty=10, if_invalid=10)))
     def index(self, page=0, limit=10, deleted=False, **kw):
         ticket_visible = self.ticket and not self.ticket.deleted
         if ticket_visible or has_access(self.ticket, 'delete'):
@@ -1267,7 +1328,7 @@ class TicketController(BaseController, FeedController):
     @h.vardec
     def update_ticket(self, **post_data):
         if not post_data.get('summary'):
-            flash('You must provide a Name','error')
+            flash('You must provide a Name', 'error')
             redirect('.')
         if 'labels' in post_data:
             post_data['labels'] = post_data['labels'].split(',')
@@ -1284,7 +1345,7 @@ class TicketController(BaseController, FeedController):
         # icky: handle custom fields like the non-widget form does
         if 'custom_fields' in data:
             for k in data['custom_fields']:
-                data['custom_fields.'+k] = data['custom_fields'][k]
+                data['custom_fields.' + k] = data['custom_fields'][k]
         self._update_ticket(data)
 
     @without_trailing_slash
@@ -1294,10 +1355,11 @@ class TicketController(BaseController, FeedController):
         require_access(self.ticket, 'delete')
         M.Shortlink.query.remove(dict(ref_id=self.ticket.index_id()))
         self.ticket.deleted = True
-        suffix = " {dt.hour}:{dt.minute}:{dt.second} {dt.day}-{dt.month}-{dt.year}".format(dt=datetime.utcnow())
+        suffix = " {dt.hour}:{dt.minute}:{dt.second} {dt.day}-{dt.month}-{dt.year}".format(
+            dt=datetime.utcnow())
         self.ticket.summary += suffix
         flash('Ticket successfully deleted')
-        return dict(location='../'+str(self.ticket.ticket_num))
+        return dict(location='../' + str(self.ticket.ticket_num))
 
     @without_trailing_slash
     @expose('json:')
@@ -1305,10 +1367,11 @@ class TicketController(BaseController, FeedController):
     def undelete(self):
         require_access(self.ticket, 'delete')
         self.ticket.deleted = False
-        self.ticket.summary = re.sub(' \d+:\d+:\d+ \d+-\d+-\d+$','',self.ticket.summary)
+        self.ticket.summary = re.sub(
+            ' \d+:\d+:\d+ \d+-\d+-\d+$', '', self.ticket.summary)
         M.Shortlink.from_artifact(self.ticket)
         flash('Ticket successfully restored')
-        return dict(location='../'+str(self.ticket.ticket_num))
+        return dict(location='../' + str(self.ticket.ticket_num))
 
     @require_post()
     def _update_ticket(self, post_data):
@@ -1347,7 +1410,7 @@ class TicketController(BaseController, FeedController):
                     # restrict custom user field values to project members
                     user = c.project.user_in_project(value)
                     value = user.username \
-                            if user and user != M.User.anonymous() else ''
+                        if user and user != M.User.anonymous() else ''
             elif cf.name == '_milestone' and cf.name in post_data:
                 value = post_data[cf.name]
             # unchecked boolean won't be passed in, so make it False here
@@ -1361,8 +1424,8 @@ class TicketController(BaseController, FeedController):
             if value is not None:
                 def cf_val(cf):
                     return self.ticket.get_custom_user(cf.name) \
-                           if cf.type == 'user' \
-                           else self.ticket.custom_fields.get(cf.name)
+                        if cf.type == 'user' \
+                        else self.ticket.custom_fields.get(cf.name)
                 changes[cf.label] = cf_val(cf)
                 self.ticket.custom_fields[cf.name] = value
                 changes[cf.label] = cf_val(cf)
@@ -1377,7 +1440,7 @@ class TicketController(BaseController, FeedController):
         if comment:
             self.ticket.discussion_thread.post(text=comment)
         g.director.create_activity(c.user, 'modified', self.ticket,
-                related_nodes=[c.project])
+                                   related_nodes=[c.project])
         c.app.globals.invalidate_bin_counts()
         redirect('.')
 
@@ -1428,7 +1491,8 @@ class TicketController(BaseController, FeedController):
                 redirect(request.referer)
 
             if not has_access(tracker, 'admin')():
-                flash('You should have admin access to destination tracker', 'error')
+                flash('You should have admin access to destination tracker',
+                      'error')
                 redirect(request.referer)
 
             new_ticket = self.ticket.move(tracker)
@@ -1447,11 +1511,13 @@ class AttachmentController(ac.AttachmentController):
     AttachmentClass = TM.TicketAttachment
     edit_perm = 'update'
 
+
 class AttachmentsController(ac.AttachmentsController):
     AttachmentControllerClass = AttachmentController
 
 NONALNUM_RE = re.compile(r'\W+')
 
+
 class TrackerAdminController(DefaultAdminController):
 
     def __init__(self, app):
@@ -1472,7 +1538,8 @@ class TrackerAdminController(DefaultAdminController):
     def fields(self, **kw):
         c.form = W.field_admin
         c.app = self.app
-        columns = dict((column, get_label(column)) for column in self.app.globals['show_in_search'].keys())
+        columns = dict((column, get_label(column))
+                       for column in self.app.globals['show_in_search'].keys())
         return dict(app=self.app, globals=self.app.globals, columns=columns)
 
     @expose('jinja:forgetracker:templates/tracker/admin_options.html')
@@ -1480,8 +1547,10 @@ class TrackerAdminController(DefaultAdminController):
         c.options_admin = W.options_admin
         return dict(app=self.app, form_value=dict(
             EnableVoting=self.app.config.options.get('EnableVoting'),
-            TicketMonitoringType=self.app.config.options.get('TicketMonitoringType'),
-            TicketMonitoringEmail=self.app.config.options.get('TicketMonitoringEmail'),
+            TicketMonitoringType=self.app.config.options.get(
+                'TicketMonitoringType'),
+            TicketMonitoringEmail=self.app.config.options.get(
+                'TicketMonitoringEmail'),
             TicketHelpNew=self.app.config.options.get('TicketHelpNew'),
             TicketHelpSearch=self.app.config.options.get('TicketHelpSearch'),
         ))
@@ -1491,7 +1560,7 @@ class TrackerAdminController(DefaultAdminController):
     @validate(W.options_admin, error_handler=options)
     def set_options(self, **kw):
         require_access(self.app, 'configure')
-        for k,v in kw.iteritems():
+        for k, v in kw.iteritems():
             self.app.config.options[k] = v
         flash('Options updated')
         redirect(c.project.url() + 'admin/tools')
@@ -1515,8 +1584,8 @@ class TrackerAdminController(DefaultAdminController):
     @require_post()
     @h.vardec
     def set_custom_fields(self, **post_data):
-        self.app.globals.open_status_names=post_data['open_status_names']
-        self.app.globals.closed_status_names=post_data['closed_status_names']
+        self.app.globals.open_status_names = post_data['open_status_names']
+        self.app.globals.closed_status_names = post_data['closed_status_names']
         custom_fields = post_data.get('custom_fields', [])
         for field in custom_fields:
             if 'name' not in field or not field['name']:
@@ -1530,54 +1599,54 @@ class TrackerAdminController(DefaultAdminController):
         posted_milestone_fld_names = set(
             cf['name'] for cf in custom_fields if cf['type'] == 'milestone')
         deleted_milestone_fld_names = existing_milestone_fld_names -\
-                                      posted_milestone_fld_names
+            posted_milestone_fld_names
         added_milestone_fld_names = posted_milestone_fld_names -\
-                                    existing_milestone_fld_names
+            existing_milestone_fld_names
 
         # TODO: make milestone custom fields renameable
         for milestone_fld_name in existing_milestone_fld_names |\
-                                  posted_milestone_fld_names:
+                posted_milestone_fld_names:
             if milestone_fld_name in deleted_milestone_fld_names:
                 # Milestone field deleted, remove it from tickets
                 tickets = TM.Ticket.query.find({
                     'app_config_id': self.app.config._id,
                     'custom_fields.%s' % milestone_fld_name:
-                        {'$exists': True}}).all()
+                    {'$exists': True}}).all()
                 for t in tickets:
                     del t.custom_fields[milestone_fld_name]
             elif milestone_fld_name in added_milestone_fld_names:
                 # Milestone field added, sanitize milestone names
                 milestone_fld = [
-                        cf for cf in custom_fields
-                        if cf['type'] == 'milestone'
-                        and cf['name'] == milestone_fld_name][0]
+                    cf for cf in custom_fields
+                    if cf['type'] == 'milestone'
+                    and cf['name'] == milestone_fld_name][0]
                 for milestone in milestone_fld.get('milestones', []):
                     milestone['name'] = milestone['name'].replace("/", "-")
             else:
                 # Milestone field updated, sanitize milestone names and update
                 # tickets if milestone names have changed
                 existing_milestone_fld = [
-                        mf for mf in self.app.globals.milestone_fields
-                        if mf.name == milestone_fld_name][0]
+                    mf for mf in self.app.globals.milestone_fields
+                    if mf.name == milestone_fld_name][0]
                 posted_milestone_fld = [
-                        cf for cf in custom_fields
-                        if cf['type'] == 'milestone'
-                        and cf['name'] == milestone_fld_name][0]
+                    cf for cf in custom_fields
+                    if cf['type'] == 'milestone'
+                    and cf['name'] == milestone_fld_name][0]
                 existing_milestone_names = set(
-                        m.name for m in
-                        existing_milestone_fld.get('milestones', []))
+                    m.name for m in
+                    existing_milestone_fld.get('milestones', []))
                 old_posted_milestone_names = set(
-                        m['old_name']
-                        for m in posted_milestone_fld.get('milestones', [])
-                        if m.get('old_name', None))
+                    m['old_name']
+                    for m in posted_milestone_fld.get('milestones', [])
+                    if m.get('old_name', None))
                 deleted_milestone_names = existing_milestone_names -\
-                                          old_posted_milestone_names
+                    old_posted_milestone_names
 
                 # Milestone deleted, remove it from tickets
                 tickets = TM.Ticket.query.find({
                     'app_config_id': self.app.config._id,
                     'custom_fields.%s' % milestone_fld_name:
-                        {'$in': list(deleted_milestone_names)}}).all()
+                    {'$in': list(deleted_milestone_names)}}).all()
                 for t in tickets:
                     t.custom_fields[milestone_fld_name] = ''
 
@@ -1585,7 +1654,7 @@ class TrackerAdminController(DefaultAdminController):
                     milestone['name'] = milestone['name'].replace("/", "-")
                     old_name = milestone.pop('old_name', None)
                     if old_name and old_name in existing_milestone_names \
-                                and old_name != milestone['name']:
+                            and old_name != milestone['name']:
                         # Milestone name updated, need to update tickets
                         tickets = TM.Ticket.query.find({
                             'app_config_id': self.app.config._id,
@@ -1593,12 +1662,13 @@ class TrackerAdminController(DefaultAdminController):
                             old_name}).all()
                         for t in tickets:
                             t.custom_fields[milestone_fld_name] = \
-                                    milestone['name']
+                                milestone['name']
 
-        self.app.globals.custom_fields=custom_fields
+        self.app.globals.custom_fields = custom_fields
         flash('Fields updated')
         redirect(request.referer)
 
+
 class RootRestController(BaseController):
 
     def __init__(self):
@@ -1616,7 +1686,8 @@ class RootRestController(BaseController):
         results['tracker_config'] = c.app.config.__json__()
         if not has_access(c.app, 'admin', c.user):
             try:
-                del results['tracker_config']['options']['TicketMonitoringEmail']
+                del results['tracker_config'][
+                    'options']['TicketMonitoringEmail']
             except KeyError:
                 pass
         results['milestones'] = c.app.milestones
@@ -1636,7 +1707,7 @@ class RootRestController(BaseController):
         ticket = TM.Ticket.new()
         ticket.update(ticket_form)
         c.app.globals.invalidate_bin_counts()
-        redirect(str(ticket.ticket_num)+'/')
+        redirect(str(ticket.ticket_num) + '/')
 
     @expose('json:')
     def validate_import(self, doc=None, options=None, **post_data):
@@ -1654,7 +1725,8 @@ class RootRestController(BaseController):
         with h.notifications_disabled(c.project):
             require_access(c.project, 'admin')
             if c.api_token.get_capability('import') != [c.project.neighborhood.name, c.project.shortname]:
-                log.error('Import capability is not enabled for %s', c.project.shortname)
+                log.error('Import capability is not enabled for %s',
+                          c.project.shortname)
                 raise exc.HTTPForbidden(detail='Import is not allowed')
 
             migrator = ImportSupport()
@@ -1667,7 +1739,8 @@ class RootRestController(BaseController):
 
     @expose('json:')
     def search(self, q=None, limit=100, page=0, sort=None, **kw):
-        results = TM.Ticket.paged_search(c.app.config, c.user, q, limit, page, sort, show_deleted=False)
+        results = TM.Ticket.paged_search(
+            c.app.config, c.user, q, limit, page, sort, show_deleted=False)
         results['tickets'] = [dict(ticket_num=t.ticket_num, summary=t.summary)
                               for t in results['tickets']]
         return results
@@ -1676,19 +1749,21 @@ class RootRestController(BaseController):
     def _lookup(self, ticket_num, *remainder):
         return TicketRestController(ticket_num), remainder
 
+
 class TicketRestController(BaseController):
 
     def __init__(self, ticket_num):
         if ticket_num is not None:
             self.ticket_num = int(ticket_num)
             self.ticket = TM.Ticket.query.get(app_config_id=c.app.config._id,
-                                                    ticket_num=self.ticket_num)
+                                              ticket_num=self.ticket_num)
             if self.ticket is None:
                 moved_ticket = TM.MovedTicket.query.get(
                     app_config_id=c.app.config._id,
                     ticket_num=self.ticket_num)
                 if moved_ticket:
-                    utils.permanent_redirect('/rest' + moved_ticket.moved_to_url)
+                    utils.permanent_redirect(
+                        '/rest' + moved_ticket.moved_to_url)
 
                 raise exc.HTTPNotFound()
 
@@ -1711,6 +1786,7 @@ class TicketRestController(BaseController):
         c.app.globals.invalidate_bin_counts()
         redirect('.')
 
+
 class MilestoneController(BaseController):
 
     def __init__(self, root, field, milestone):
@@ -1730,29 +1806,31 @@ class MilestoneController(BaseController):
         self.milestone = m
         self.progress_key = '%s:%s' % (fld.name, m.name.replace(':', '\:'))
         self.mongo_query = {
-            'custom_fields.%s' % fld.name: m.name }
+            'custom_fields.%s' % fld.name: m.name}
 
     @with_trailing_slash
     @h.vardec
     @expose('jinja:forgetracker:templates/tracker/milestone.html')
     @validate(validators=dict(
-            limit=validators.Int(if_invalid=None),
-            page=validators.Int(if_empty=0, if_invalid=0),
-            sort=validators.UnicodeString(if_empty=None),
-            deleted=validators.StringBool(if_empty=False)))
+        limit=validators.Int(if_invalid=None),
+        page=validators.Int(if_empty=0, if_invalid=0),
+        sort=validators.UnicodeString(if_empty=None),
+        deleted=validators.StringBool(if_empty=False)))
     def index(self, q=None, columns=None, page=0, query=None, sort=None, deleted=False, **kw):
         require(has_access(c.app, 'read'))
         show_deleted = [False]
         if deleted and has_access(c.app, 'delete'):
-            show_deleted = [False,True]
+            show_deleted = [False, True]
 
         result = TM.Ticket.paged_query(c.app.config, c.user,
-            self.mongo_query, page=page, sort=sort, deleted={'$in':show_deleted}, **kw)
+                                       self.mongo_query, page=page, sort=sort, deleted={'$in': show_deleted}, **kw)
         result['columns'] = columns or mongo_columns()
-        result['sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
+        result[
+            'sortable_custom_fields'] = c.app.globals.sortable_custom_fields_shown_in_search()
         result['allow_edit'] = has_access(c.app, 'update')()
         result['allow_move'] = has_access(c.app, 'admin')()
-        result['help_msg'] = c.app.config.options.get('TicketHelpSearch','').strip()
+        result['help_msg'] = c.app.config.options.get(
+            'TicketHelpSearch', '').strip()
         result['deleted'] = deleted
         progress = c.app.globals.milestone_count(self.progress_key)
         result.pop('q')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeTracker/forgetracker/widgets/admin.py
----------------------------------------------------------------------
diff --git a/ForgeTracker/forgetracker/widgets/admin.py b/ForgeTracker/forgetracker/widgets/admin.py
index 09bc443..8ad8d1c 100644
--- a/ForgeTracker/forgetracker/widgets/admin.py
+++ b/ForgeTracker/forgetracker/widgets/admin.py
@@ -21,11 +21,12 @@ from formencode import validators as fev
 from allura.lib.widgets import forms as ff
 from allura.lib.widgets import form_fields as ffw
 
+
 class OptionsAdmin(ff.AdminForm):
-    template='jinja:forgetracker:templates/tracker_widgets/options_admin.html'
-    defaults=dict(
+    template = 'jinja:forgetracker:templates/tracker_widgets/options_admin.html'
+    defaults = dict(
         ff.ForgeForm.defaults,
-        submit_text = 'Save')
+        submit_text='Save')
 
     @property
     def fields(self):
@@ -43,11 +44,15 @@ class OptionsAdmin(ff.AdminForm):
                 label='Send notifications for',
                 grid_width='7',
                 options=[
-                    ew.Option(py_value='NewTicketsOnly', label='New tickets only'),
-                    ew.Option(py_value='NewPublicTicketsOnly', label='New public tickets only'),
-                    ew.Option(py_value='AllTicketChanges', label='All ticket changes'),
-                    ew.Option(py_value='AllPublicTicketChanges', label='All public ticket changes'),
-                    ]),
+                    ew.Option(py_value='NewTicketsOnly',
+                              label='New tickets only'),
+                    ew.Option(py_value='NewPublicTicketsOnly',
+                              label='New public tickets only'),
+                    ew.Option(py_value='AllTicketChanges',
+                              label='All ticket changes'),
+                    ew.Option(py_value='AllPublicTicketChanges',
+                              label='All public ticket changes'),
+                ]),
             ffw.MarkdownEdit(
                 name='TicketHelpNew',
                 label='Help text to display on new ticket page',