You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by tv...@apache.org on 2014/01/10 19:19:45 UTC

[21/32] PEP8 cleanup

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/refreshrepo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/refreshrepo.py b/Allura/allura/scripts/refreshrepo.py
index f55a054..1e0c3ee 100644
--- a/Allura/allura/scripts/refreshrepo.py
+++ b/Allura/allura/scripts/refreshrepo.py
@@ -31,6 +31,7 @@ log = logging.getLogger(__name__)
 
 
 class RefreshRepo(ScriptTask):
+
     @classmethod
     def execute(cls, options):
         q_project = {}
@@ -62,73 +63,92 @@ class RefreshRepo(ScriptTask):
                         continue
                     if c.app.repo.tool.lower() not in options.repo_types:
                         log.info("Skipping %r: wrong type (%s)", c.app.repo,
-                                c.app.repo.tool.lower())
+                                 c.app.repo.tool.lower())
                         continue
 
                     if options.clean:
                         ci_ids = list(c.app.repo.all_commit_ids())
-                        log.info("Deleting mongo data for %i commits...", len(ci_ids))
+                        log.info("Deleting mongo data for %i commits...",
+                                 len(ci_ids))
                         # like the tree_ids themselves below, we need to process these in
                         # chunks to avoid hitting the BSON max size limit
                         tree_ids = []
                         for ci_ids_chunk in chunked_list(ci_ids, 3000):
                             tree_ids.extend([
-                                    tree_id for doc in
-                                    M.repo.TreesDoc.m.find({"_id": {"$in": ci_ids_chunk}},
-                                                           {"tree_ids": 1})
-                                    for tree_id in doc.get("tree_ids", [])])
-
-                            i = M.repo.CommitDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                                tree_id for doc in
+                                M.repo.TreesDoc.m.find(
+                                    {"_id": {"$in": ci_ids_chunk}},
+                                    {"tree_ids": 1})
+                                for tree_id in doc.get("tree_ids", [])])
+
+                            i = M.repo.CommitDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i CommitDoc docs...", i)
-                                M.repo.CommitDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.CommitDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
                         # delete these in chunks, otherwise the query doc can
                         # exceed the max BSON size limit (16MB at the moment)
                         for tree_ids_chunk in chunked_list(tree_ids, 300000):
-                            i = M.repo.TreeDoc.m.find({"_id": {"$in": tree_ids_chunk}}).count()
+                            i = M.repo.TreeDoc.m.find(
+                                {"_id": {"$in": tree_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i TreeDoc docs...", i)
-                                M.repo.TreeDoc.m.remove({"_id": {"$in": tree_ids_chunk}})
+                                M.repo.TreeDoc.m.remove(
+                                    {"_id": {"$in": tree_ids_chunk}})
                         del tree_ids
 
                         # delete these after TreeDoc and LastCommitDoc so that if
                         # we crash, we don't lose the ability to delete those
                         for ci_ids_chunk in chunked_list(ci_ids, 3000):
                             # delete TreesDocs
-                            i = M.repo.TreesDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.TreesDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i TreesDoc docs...", i)
-                                M.repo.TreesDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.TreesDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
                             # delete LastCommitDocs
-                            i = M.repo.LastCommitDoc.m.find(dict(commit_ids={'$in': ci_ids_chunk})).count()
+                            i = M.repo.LastCommitDoc.m.find(
+                                dict(commit_ids={'$in': ci_ids_chunk})).count()
                             if i:
-                                log.info("Deleting %i remaining LastCommitDoc docs, by repo id...", i)
-                                M.repo.LastCommitDoc.m.remove(dict(commit_ids={'$in': ci_ids_chunk}))
+                                log.info(
+                                    "Deleting %i remaining LastCommitDoc docs, by repo id...", i)
+                                M.repo.LastCommitDoc.m.remove(
+                                    dict(commit_ids={'$in': ci_ids_chunk}))
 
-                            i = M.repo.DiffInfoDoc.m.find({"_id": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.DiffInfoDoc.m.find(
+                                {"_id": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i DiffInfoDoc docs...", i)
-                                M.repo.DiffInfoDoc.m.remove({"_id": {"$in": ci_ids_chunk}})
+                                M.repo.DiffInfoDoc.m.remove(
+                                    {"_id": {"$in": ci_ids_chunk}})
 
-                            i = M.repo.CommitRunDoc.m.find({"commit_ids": {"$in": ci_ids_chunk}}).count()
+                            i = M.repo.CommitRunDoc.m.find(
+                                {"commit_ids": {"$in": ci_ids_chunk}}).count()
                             if i:
                                 log.info("Deleting %i CommitRunDoc docs...", i)
-                                M.repo.CommitRunDoc.m.remove({"commit_ids": {"$in": ci_ids_chunk}})
+                                M.repo.CommitRunDoc.m.remove(
+                                    {"commit_ids": {"$in": ci_ids_chunk}})
                         del ci_ids
 
                     try:
                         if options.all:
-                            log.info('Refreshing ALL commits in %r', c.app.repo)
+                            log.info('Refreshing ALL commits in %r',
+                                     c.app.repo)
                         else:
-                            log.info('Refreshing NEW commits in %r', c.app.repo)
+                            log.info('Refreshing NEW commits in %r',
+                                     c.app.repo)
                         if options.profile:
                             import cProfile
-                            cProfile.runctx('c.app.repo.refresh(options.all, notify=options.notify)',
-                                    globals(), locals(), 'refresh.profile')
+                            cProfile.runctx(
+                                'c.app.repo.refresh(options.all, notify=options.notify)',
+                                globals(), locals(), 'refresh.profile')
                         else:
-                            c.app.repo.refresh(options.all, notify=options.notify)
+                            c.app.repo.refresh(
+                                options.all, notify=options.notify)
                     except:
                         log.exception('Error refreshing %r', c.app.repo)
             ThreadLocalORMSession.flush_all()
@@ -141,41 +161,44 @@ class RefreshRepo(ScriptTask):
                 repo_type = repo_type.strip()
                 if repo_type not in ['svn', 'git', 'hg']:
                     raise argparse.ArgumentTypeError(
-                            '{} is not a valid repo type.'.format(repo_type))
+                        '{} is not a valid repo type.'.format(repo_type))
                 repo_types.append(repo_type)
             return repo_types
 
         parser = argparse.ArgumentParser(description='Scan repos on filesytem and '
-                'update repo metadata in MongoDB. Run for all repos (no args), '
-                'or restrict by neighborhood, project, or code tool mount point.')
+                                         'update repo metadata in MongoDB. Run for all repos (no args), '
+                                         'or restrict by neighborhood, project, or code tool mount point.')
         parser.add_argument('--nbhd', action='store', default='', dest='nbhd',
-                help='Restrict update to a particular neighborhood, e.g. /p/.')
-        parser.add_argument('--project', action='store', default='', dest='project',
-                help='Restrict update to a particular project. To specify a '
-                'subproject, use a slash: project/subproject.')
+                            help='Restrict update to a particular neighborhood, e.g. /p/.')
+        parser.add_argument(
+            '--project', action='store', default='', dest='project',
+            help='Restrict update to a particular project. To specify a '
+            'subproject, use a slash: project/subproject.')
         parser.add_argument('--project-regex', action='store', default='',
-                dest='project_regex',
-                help='Restrict update to projects for which the shortname matches '
-                'the provided regex.')
-        parser.add_argument('--repo-types', action='store', type=repo_type_list,
-                default=['svn', 'git', 'hg'], dest='repo_types',
-                help='Only refresh repos of the given type(s). Defaults to: '
-                'svn,git,hg. Example: --repo-types=git,hg')
+                            dest='project_regex',
+                            help='Restrict update to projects for which the shortname matches '
+                            'the provided regex.')
+        parser.add_argument(
+            '--repo-types', action='store', type=repo_type_list,
+            default=['svn', 'git', 'hg'], dest='repo_types',
+            help='Only refresh repos of the given type(s). Defaults to: '
+            'svn,git,hg. Example: --repo-types=git,hg')
         parser.add_argument('--mount-point', default='', dest='mount_point',
-                help='Restrict update to repos at the given tool mount point. ')
+                            help='Restrict update to repos at the given tool mount point. ')
         parser.add_argument('--clean', action='store_true', dest='clean',
-                default=False, help='Remove repo-related mongo docs (for '
-                'project(s) being refreshed only) before doing the refresh.')
-        parser.add_argument('--all', action='store_true', dest='all', default=False,
-                help='Refresh all commits (not just the ones that are new).')
+                            default=False, help='Remove repo-related mongo docs (for '
+                            'project(s) being refreshed only) before doing the refresh.')
+        parser.add_argument(
+            '--all', action='store_true', dest='all', default=False,
+            help='Refresh all commits (not just the ones that are new).')
         parser.add_argument('--notify', action='store_true', dest='notify',
-                default=False, help='Send email notifications of new commits.')
+                            default=False, help='Send email notifications of new commits.')
         parser.add_argument('--dry-run', action='store_true', dest='dry_run',
-                default=False, help='Log names of projects that would have their '
-                'repos refreshed, but do not perform the actual refresh.')
+                            default=False, help='Log names of projects that would have their '
+                            'repos refreshed, but do not perform the actual refresh.')
         parser.add_argument('--profile', action='store_true', dest='profile',
-                default=False, help='Enable the profiler (slow). Will log '
-                'profiling output to ./refresh.profile')
+                            default=False, help='Enable the profiler (slow). Will log '
+                            'profiling output to ./refresh.profile')
         return parser
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/scripttask.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/scripttask.py b/Allura/allura/scripts/scripttask.py
index 229bbe6..0b9d2be 100644
--- a/Allura/allura/scripts/scripttask.py
+++ b/Allura/allura/scripts/scripttask.py
@@ -55,14 +55,16 @@ from allura.lib.decorators import task
 log = logging.getLogger(__name__)
 
 
-
 class ScriptTask(object):
+
     """Base class for a command-line script that is also executable as a task."""
 
     class __metaclass__(type):
+
         @property
         def __doc__(cls):
             return cls.parser().format_help()
+
         def __new__(meta, classname, bases, classDict):
             return task(type.__new__(meta, classname, bases, classDict))
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/trac_export.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/trac_export.py b/Allura/allura/scripts/trac_export.py
index 8be9fe0..f5cd7c3 100644
--- a/Allura/allura/scripts/trac_export.py
+++ b/Allura/allura/scripts/trac_export.py
@@ -49,12 +49,18 @@ def parse_options():
     optparser = OptionParser(usage=''' %prog <Trac URL>
 
 Export ticket data from a Trac instance''')
-    optparser.add_option('-o', '--out-file', dest='out_filename', help='Write to file (default stdout)')
-    optparser.add_option('--no-attachments', dest='do_attachments', action='store_false', default=True, help='Export attachment info')
-    optparser.add_option('--only-tickets', dest='only_tickets', action='store_true', help='Export only ticket list')
-    optparser.add_option('--start', dest='start_id', type='int', default=1, help='Start with given ticket numer (or next accessible)')
-    optparser.add_option('--limit', dest='limit', type='int', default=None, help='Limit number of tickets')
-    optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose operation')
+    optparser.add_option('-o', '--out-file', dest='out_filename',
+                         help='Write to file (default stdout)')
+    optparser.add_option('--no-attachments', dest='do_attachments',
+                         action='store_false', default=True, help='Export attachment info')
+    optparser.add_option('--only-tickets', dest='only_tickets',
+                         action='store_true', help='Export only ticket list')
+    optparser.add_option('--start', dest='start_id', type='int', default=1,
+                         help='Start with given ticket numer (or next accessible)')
+    optparser.add_option('--limit', dest='limit', type='int',
+                         default=None, help='Limit number of tickets')
+    optparser.add_option('-v', '--verbose', dest='verbose',
+                         action='store_true', help='Verbose operation')
     options, args = optparser.parse_args()
     if len(args) != 1:
         optparser.error("Wrong number of arguments.")
@@ -65,8 +71,9 @@ class TracExport(object):
 
     PAGE_SIZE = 100
     TICKET_URL = 'ticket/%d'
-    QUERY_MAX_ID_URL  = 'query?col=id&order=id&desc=1&max=2'
-    QUERY_BY_PAGE_URL = 'query?col=id&col=time&col=changetime&order=id&max=' + str(PAGE_SIZE)+ '&page=%d'
+    QUERY_MAX_ID_URL = 'query?col=id&order=id&desc=1&max=2'
+    QUERY_BY_PAGE_URL = 'query?col=id&col=time&col=changetime&order=id&max=' + \
+        str(PAGE_SIZE) + '&page=%d'
     ATTACHMENT_LIST_URL = 'attachment/ticket/%d/'
     ATTACHMENT_URL = 'raw-attachment/ticket/%d/%s'
 
@@ -108,7 +115,7 @@ class TracExport(object):
         if type is None:
             return url
         glue = '&' if '?' in suburl else '?'
-        return  url + glue + 'format=' + type
+        return url + glue + 'format=' + type
 
     def log_url(self, url):
         log.info(url)
@@ -134,7 +141,8 @@ class TracExport(object):
         # telling that access denied. So, we'll emulate 403 ourselves.
         # TODO: currently, any non-csv result treated as 403.
         if not f.info()['Content-Type'].startswith('text/csv'):
-            raise urllib2.HTTPError(url, 403, 'Forbidden - emulated', f.info(), f)
+            raise urllib2.HTTPError(
+                url, 403, 'Forbidden - emulated', f.info(), f)
         return f
 
     def parse_ticket(self, id):
@@ -154,12 +162,15 @@ class TracExport(object):
         d = BeautifulSoup(urlopen(url))
         self.clean_missing_wiki_links(d)
         desc = d.find('div', 'description').find('div', 'searchable')
-        ticket['description'] = html2text.html2text(desc.renderContents('utf8').decode('utf8')) if desc else ''
+        ticket['description'] = html2text.html2text(
+            desc.renderContents('utf8').decode('utf8')) if desc else ''
         comments = []
         for comment in d.findAll('form', action='#comment'):
             c = {}
-            c['submitter'] = re.sub(r'.* by ', '', comment.find('h3', 'change').text).strip()
-            c['date'] = self.trac2z_date(comment.find('a', 'timeline')['title'].replace(' in Timeline', ''))
+            c['submitter'] = re.sub(
+                r'.* by ', '', comment.find('h3', 'change').text).strip()
+            c['date'] = self.trac2z_date(
+                comment.find('a', 'timeline')['title'].replace(' in Timeline', ''))
             changes = unicode(comment.find('ul', 'changes') or '')
             body = comment.find('div', 'comment')
             body = body.renderContents('utf8').decode('utf8') if body else ''
@@ -189,18 +200,22 @@ class TracExport(object):
             size_s = attach.span['title']
             d['size'] = int(self.match_pattern(SIZE_PATTERN, size_s))
             timestamp_s = attach.find('a', {'class': 'timeline'})['title']
-            d['date'] = self.trac2z_date(self.match_pattern(TIMESTAMP_PATTERN, timestamp_s))
-            d['by'] = attach.find(text=re.compile('added by')).nextSibling.renderContents()
+            d['date'] = self.trac2z_date(
+                self.match_pattern(TIMESTAMP_PATTERN, timestamp_s))
+            d['by'] = attach.find(
+                text=re.compile('added by')).nextSibling.renderContents()
             d['description'] = ''
             # Skip whitespace
             while attach.nextSibling and type(attach.nextSibling) is NavigableString:
                 attach = attach.nextSibling
-            # if there's a description, there will be a <dd> element, other immediately next <dt>
+            # if there's a description, there will be a <dd> element, other
+            # immediately next <dt>
             if attach.nextSibling and attach.nextSibling.name == 'dd':
                 desc_el = attach.nextSibling
                 if desc_el:
                     # TODO: Convert to Allura link syntax as needed
-                    d['description'] = ''.join(desc_el.findAll(text=True)).strip()
+                    d['description'] = ''.join(
+                        desc_el.findAll(text=True)).strip()
             list.append(d)
         return list
 
@@ -245,7 +260,8 @@ class TracExport(object):
         for r in reader:
             if r and r[0].isdigit():
                 id = int(r[0])
-                extra = {'date': self.trac2z_date(r[1]), 'date_updated': self.trac2z_date(r[2])}
+                extra = {'date': self.trac2z_date(
+                    r[1]), 'date_updated': self.trac2z_date(r[2])}
                 res.append((id, extra))
         self.page += 1
 
@@ -276,6 +292,7 @@ class TracExport(object):
 
 
 class DateJSONEncoder(json.JSONEncoder):
+
     def default(self, obj):
         if isinstance(obj, time.struct_time):
             return time.strftime('%Y-%m-%dT%H:%M:%SZ', obj)
@@ -283,9 +300,9 @@ class DateJSONEncoder(json.JSONEncoder):
 
 
 def export(url, start_id=1, verbose=False, do_attachments=True,
-        only_tickets=False, limit=None):
+           only_tickets=False, limit=None):
     ex = TracExport(url, start_id=start_id,
-            verbose=verbose, do_attachments=do_attachments)
+                    verbose=verbose, do_attachments=do_attachments)
 
     doc = [t for t in islice(ex, limit)]
 
@@ -304,7 +321,8 @@ def main():
     out_file = sys.stdout
     if options.out_filename:
         out_file = open(options.out_filename, 'w')
-    out_file.write(json.dumps(doc, cls=DateJSONEncoder, indent=2, sort_keys=True))
+    out_file.write(
+        json.dumps(doc, cls=DateJSONEncoder, indent=2, sort_keys=True))
     # It's bad habit not to terminate lines
     out_file.write('\n')
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/update_checkout_url.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/update_checkout_url.py b/Allura/allura/scripts/update_checkout_url.py
index 7420dab..a1fc982 100644
--- a/Allura/allura/scripts/update_checkout_url.py
+++ b/Allura/allura/scripts/update_checkout_url.py
@@ -34,6 +34,7 @@ log = logging.getLogger(__name__)
 
 
 class UpdateCheckoutUrl(ScriptTask):
+
     @classmethod
     def execute(cls, options):
         query = {'tool_name': {'$regex': '^svn$', '$options': 'i'},
@@ -41,7 +42,8 @@ class UpdateCheckoutUrl(ScriptTask):
         for chunk in utils.chunked_find(M.AppConfig, query):
             for config in chunk:
                 repo = Repository.query.get(app_config_id=config._id)
-                trunk_path = "file://{0}{1}/trunk".format(repo.fs_path, repo.name)
+                trunk_path = "file://{0}{1}/trunk".format(repo.fs_path,
+                                                          repo.name)
                 if svn_path_exists(trunk_path):
                     config.options['checkout_url'] = "trunk"
                     log.info("Update checkout_url for: %s", trunk_path)
@@ -49,4 +51,3 @@ class UpdateCheckoutUrl(ScriptTask):
 
 if __name__ == '__main__':
     UpdateCheckoutUrl.main()
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/admin_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/admin_tasks.py b/Allura/allura/tasks/admin_tasks.py
index 44ed421..65f2775 100644
--- a/Allura/allura/tasks/admin_tasks.py
+++ b/Allura/allura/tasks/admin_tasks.py
@@ -35,5 +35,5 @@ install_app.__doc__ += '''
     Arguments::
 
         ''' + inspect.formatargspec(*inspect.getargspec(
-        M.Project.install_app
-    )).replace('self, ','')
+    M.Project.install_app
+)).replace('self, ', '')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/event_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/event_tasks.py b/Allura/allura/tasks/event_tasks.py
index 0f27266..8a2de35 100644
--- a/Allura/allura/tasks/event_tasks.py
+++ b/Allura/allura/tasks/event_tasks.py
@@ -20,6 +20,7 @@ import sys
 from allura.lib.decorators import task, event_handler
 from allura.lib.exceptions import CompoundError
 
+
 @task
 def event(event_type, *args, **kwargs):
     exceptions = []
@@ -33,4 +34,3 @@ def event(event_type, *args, **kwargs):
             raise exceptions[0][0], exceptions[0][1], exceptions[0][2]
         else:
             raise CompoundError(*exceptions)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/export_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/export_tasks.py b/Allura/allura/tasks/export_tasks.py
index 3fd9499..cafb288 100644
--- a/Allura/allura/tasks/export_tasks.py
+++ b/Allura/allura/tasks/export_tasks.py
@@ -48,9 +48,11 @@ def bulk_export(tools, filename=None, send_email=True):
 
 
 class BulkExport(object):
+
     def process(self, project, tools, user, filename=None, send_email=True):
         export_filename = filename or project.bulk_export_filename()
-        export_path = self.get_export_path(project.bulk_export_path(), export_filename)
+        export_path = self.get_export_path(
+            project.bulk_export_path(), export_filename)
         if not os.path.exists(export_path):
             os.makedirs(export_path)
         apps = [project.app_instance(tool) for tool in tools]
@@ -58,7 +60,8 @@ class BulkExport(object):
         results = [self.export(export_path, app) for app in exportable]
         exported = self.filter_successful(results)
         if exported:
-            zipdir(export_path, os.path.join(os.path.dirname(export_path), export_filename))
+            zipdir(export_path,
+                   os.path.join(os.path.dirname(export_path), export_filename))
         shutil.rmtree(export_path)
 
         if not user:
@@ -67,13 +70,14 @@ class BulkExport(object):
         if not send_email:
             return
 
-        tmpl = g.jinja2_env.get_template('allura:templates/mail/bulk_export.html')
+        tmpl = g.jinja2_env.get_template(
+            'allura:templates/mail/bulk_export.html')
         instructions = tg.config.get('bulk_export_download_instructions', '')
         instructions = instructions.format(
-                project=project.shortname,
-                filename=export_filename,
-                c=c,
-            )
+            project=project.shortname,
+            filename=export_filename,
+            c=c,
+        )
         exported_names = [a.config.options.mount_point for a in exported]
         tmpl_context = {
             'instructions': instructions,
@@ -110,7 +114,8 @@ class BulkExport(object):
             with open(json_file, 'w') as f:
                 app.bulk_export(f)
         except Exception as e:
-            log.error('Error exporting: %s on %s', tool, app.project.shortname, exc_info=True)
+            log.error('Error exporting: %s on %s', tool,
+                      app.project.shortname, exc_info=True)
             return None
         else:
             return app

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/index_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/index_tasks.py b/Allura/allura/tasks/index_tasks.py
index 0833bbb..7b6b4c6 100644
--- a/Allura/allura/tasks/index_tasks.py
+++ b/Allura/allura/tasks/index_tasks.py
@@ -27,6 +27,7 @@ from allura.lib.solr import make_solr_from_config
 
 log = logging.getLogger(__name__)
 
+
 @task
 def add_artifacts(ref_ids, update_solr=True, update_refs=True, solr_hosts=None):
     '''
@@ -69,19 +70,23 @@ def add_artifacts(ref_ids, update_solr=True, update_refs=True, solr_hosts=None):
     if exceptions:
         raise CompoundError(*exceptions)
 
+
 @task
 def del_artifacts(ref_ids):
     from allura import model as M
-    if not ref_ids: return
+    if not ref_ids:
+        return
     solr_query = 'id:({0})'.format(' || '.join(ref_ids))
     g.solr.delete(q=solr_query)
-    M.ArtifactReference.query.remove(dict(_id={'$in':ref_ids}))
-    M.Shortlink.query.remove(dict(ref_id={'$in':ref_ids}))
+    M.ArtifactReference.query.remove(dict(_id={'$in': ref_ids}))
+    M.Shortlink.query.remove(dict(ref_id={'$in': ref_ids}))
+
 
 @task
 def commit():
     g.solr.commit()
 
+
 @contextmanager
 def _indexing_disabled(session):
     session.disable_artifact_index = session.skip_mod_date = True

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/mail_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/mail_tasks.py b/Allura/allura/tasks/mail_tasks.py
index 0b87024..31c16f0 100644
--- a/Allura/allura/tasks/mail_tasks.py
+++ b/Allura/allura/tasks/mail_tasks.py
@@ -30,9 +30,10 @@ log = logging.getLogger(__name__)
 
 smtp_client = mail_util.SMTPClient()
 
+
 @task
 def route_email(
-    peer, mailfrom, rcpttos, data):
+        peer, mailfrom, rcpttos, data):
     '''Route messages according to their destination:
 
     <topic>@<mount_point>.<subproj2>.<subproj1>.<project>.projects.sourceforge.net
@@ -40,7 +41,7 @@ def route_email(
     '''
     try:
         msg = mail_util.parse_message(data)
-    except: # pragma no cover
+    except:  # pragma no cover
         log.exception('Parse Error: (%r,%r,%r)', peer, mailfrom, rcpttos)
         return
     if mail_util.is_autoreply(msg):
@@ -49,18 +50,21 @@ def route_email(
     mail_user = mail_util.identify_sender(peer, mailfrom, msg['headers'], msg)
     with h.push_config(c, user=mail_user):
         log.info('Received email from %s', c.user.username)
-        # For each of the addrs, determine the project/app and route appropriately
+        # For each of the addrs, determine the project/app and route
+        # appropriately
         for addr in rcpttos:
             try:
                 userpart, project, app = mail_util.parse_address(addr)
                 with h.push_config(c, project=project, app=app):
                     if not app.has_access(c.user, userpart):
-                        log.info('Access denied for %s to mailbox %s', c.user, userpart)
+                        log.info('Access denied for %s to mailbox %s',
+                                 c.user, userpart)
                     else:
                         if msg['multipart']:
                             msg_hdrs = msg['headers']
                             for part in msg['parts']:
-                                if part.get('content_type', '').startswith('multipart/'): continue
+                                if part.get('content_type', '').startswith('multipart/'):
+                                    continue
                                 msg = dict(
                                     headers=dict(msg_hdrs, **part['headers']),
                                     message_id=part['message_id'],
@@ -77,6 +81,7 @@ def route_email(
             except:
                 log.exception('Error routing mail to %s', addr)
 
+
 @task
 def sendmail(fromaddr, destinations, text, reply_to, subject,
              message_id, in_reply_to=None, sender=None, references=None):
@@ -110,11 +115,13 @@ def sendmail(fromaddr, destinations, text, reply_to, subject,
             addr = user.email_address_header()
             if not addr and user.email_addresses:
                 addr = user.email_addresses[0]
-                log.warning('User %s has not set primary email address, using %s',
-                            user._id, addr)
+                log.warning(
+                    'User %s has not set primary email address, using %s',
+                    user._id, addr)
             if not addr:
-                log.error("User %s (%s) has not set any email address, can't deliver",
-                          user._id, user.username)
+                log.error(
+                    "User %s (%s) has not set any email address, can't deliver",
+                    user._id, user.username)
                 continue
             if user.get_pref('email_format') == 'plain':
                 addrs_plain.append(addr)
@@ -137,18 +144,19 @@ def sendmail(fromaddr, destinations, text, reply_to, subject,
         addrs_html, fromaddr, reply_to, subject, message_id,
         in_reply_to, html_msg, sender=sender, references=references)
 
+
 @task
 def sendsimplemail(
-    fromaddr,
-    toaddr,
-    text,
-    reply_to,
-    subject,
-    message_id,
-    in_reply_to=None,
-    sender=None,
-    references=None,
-    cc=None):
+        fromaddr,
+        toaddr,
+        text,
+        reply_to,
+        subject,
+        message_id,
+        in_reply_to=None,
+        sender=None,
+        references=None,
+        cc=None):
     from allura import model as M
     if fromaddr is None:
         fromaddr = u'noreply@in.sf.net'

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/notification_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/notification_tasks.py b/Allura/allura/tasks/notification_tasks.py
index 888e0f2..a6b7564 100644
--- a/Allura/allura/tasks/notification_tasks.py
+++ b/Allura/allura/tasks/notification_tasks.py
@@ -17,6 +17,7 @@
 
 from allura.lib.decorators import task
 
+
 @task
 def notify(n_id, ref_id, topic):
     from allura import model as M

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tasks/repo_tasks.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tasks/repo_tasks.py b/Allura/allura/tasks/repo_tasks.py
index 978f027..df84337 100644
--- a/Allura/allura/tasks/repo_tasks.py
+++ b/Allura/allura/tasks/repo_tasks.py
@@ -24,6 +24,7 @@ from pylons import tmpl_context as c, app_globals as g
 from allura.lib.decorators import task
 from allura.lib.repository import RepositoryApp
 
+
 @task
 def init(**kwargs):
     from allura import model as M
@@ -33,6 +34,7 @@ def init(**kwargs):
         text='Repository %s/%s created' % (
             c.project.shortname, c.app.config.options.mount_point))
 
+
 @task
 def clone(cloned_from_path, cloned_from_name, cloned_from_url):
     from allura import model as M
@@ -46,7 +48,9 @@ def clone(cloned_from_path, cloned_from_name, cloned_from_url):
             text='Repository %s/%s created' % (
                 c.project.shortname, c.app.config.options.mount_point))
     except Exception, e:
-        g.post_event('repo_clone_task_failed', cloned_from_url, cloned_from_path, traceback.format_exc())
+        g.post_event('repo_clone_task_failed', cloned_from_url,
+                     cloned_from_path, traceback.format_exc())
+
 
 @task
 def reclone(*args, **kwargs):
@@ -58,14 +62,15 @@ def reclone(*args, **kwargs):
         repo.delete()
     ThreadLocalORMSession.flush_all()
     M.MergeRequest.query.remove(dict(
-            app_config_id=c.app.config._id))
+        app_config_id=c.app.config._id))
     clone(*args, **kwargs)
 
+
 @task
 def refresh(**kwargs):
     from allura import model as M
     log = logging.getLogger(__name__)
-    #don't create multiple refresh tasks
+    # don't create multiple refresh tasks
     q = {
         'task_name': 'allura.tasks.repo_tasks.refresh',
         'state': {'$in': ['busy', 'ready']},
@@ -73,16 +78,18 @@ def refresh(**kwargs):
         'context.project_id': c.project._id,
     }
     refresh_tasks_count = M.MonQTask.query.find(q).count()
-    if refresh_tasks_count <= 1: #only this task
+    if refresh_tasks_count <= 1:  # only this task
         c.app.repo.refresh()
-        #checking if we have new commits arrived
-        #during refresh and re-queue task if so
+        # checking if we have new commits arrived
+        # during refresh and re-queue task if so
         new_commit_ids = c.app.repo.unknown_commit_ids()
         if len(new_commit_ids) > 0:
             refresh.post()
             log.info('New refresh task is queued due to new commit(s).')
     else:
-        log.info('Refresh task for %s:%s skipped due to backlog', c.project.shortname, c.app.config.options.mount_point)
+        log.info('Refresh task for %s:%s skipped due to backlog',
+                 c.project.shortname, c.app.config.options.mount_point)
+
 
 @task
 def uninstall(**kwargs):
@@ -92,22 +99,25 @@ def uninstall(**kwargs):
         shutil.rmtree(repo.full_fs_path, ignore_errors=True)
         repo.delete()
     M.MergeRequest.query.remove(dict(
-            app_config_id=c.app.config._id))
+        app_config_id=c.app.config._id))
     super(RepositoryApp, c.app).uninstall(c.project)
     from ming.orm import ThreadLocalORMSession
     ThreadLocalORMSession.flush_all()
 
+
 @task
 def nop():
     log = logging.getLogger(__name__)
     log.info('nop')
 
+
 @task
 def reclone_repo(*args, **kwargs):
     from allura import model as M
     try:
         nbhd = M.Neighborhood.query.get(url_prefix='/%s/' % kwargs['prefix'])
-        c.project = M.Project.query.get(shortname=kwargs['shortname'], neighborhood_id=nbhd._id)
+        c.project = M.Project.query.get(
+            shortname=kwargs['shortname'], neighborhood_id=nbhd._id)
         c.app = c.project.app_instance(kwargs['mount_point'])
         source_url = c.app.config.options.get('init_from_url')
         source_path = c.app.config.options.get('init_from_path')
@@ -117,7 +127,9 @@ def reclone_repo(*args, **kwargs):
             text='Repository %s/%s created' % (
                 c.project.shortname, c.app.config.options.mount_point))
     except Exception, e:
-        g.post_event('repo_clone_task_failed', source_url, source_path, traceback.format_exc())
+        g.post_event('repo_clone_task_failed', source_url,
+                     source_path, traceback.format_exc())
+
 
 @task
 def tarball(revision, path):
@@ -126,13 +138,18 @@ def tarball(revision, path):
         repo = c.app.repo
         status = repo.get_tarball_status(revision, path)
         if status == 'complete':
-            log.info('Skipping snapshot for repository: %s:%s rev %s because it is already %s' %
-                     (c.project.shortname, c.app.config.options.mount_point, revision, status))
+            log.info(
+                'Skipping snapshot for repository: %s:%s rev %s because it is already %s' %
+                (c.project.shortname, c.app.config.options.mount_point, revision, status))
         else:
             try:
                 repo.tarball(revision, path)
             except:
-                log.error('Could not create snapshot for repository: %s:%s revision %s path %s' % (c.project.shortname, c.app.config.options.mount_point, revision, path), exc_info=True)
+                log.error(
+                    'Could not create snapshot for repository: %s:%s revision %s path %s' %
+                    (c.project.shortname, c.app.config.options.mount_point, revision, path), exc_info=True)
                 raise
     else:
-        log.warn('Skipped creation of snapshot: %s:%s because revision is not specified' % (c.project.shortname, c.app.config.options.mount_point))
+        log.warn(
+            'Skipped creation of snapshot: %s:%s because revision is not specified' %
+            (c.project.shortname, c.app.config.options.mount_point))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/__init__.py b/Allura/allura/tests/__init__.py
index c715a48..8047c6b 100644
--- a/Allura/allura/tests/__init__.py
+++ b/Allura/allura/tests/__init__.py
@@ -26,7 +26,9 @@ import alluratest.controller
 import socket
 socket.setdefaulttimeout(None)
 
+
 class TestController(alluratest.controller.TestController):
+
     """
     Base functional test case for the controllers.
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/decorators.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/decorators.py b/Allura/allura/tests/decorators.py
index 3e1c43d..521c76c 100644
--- a/Allura/allura/tests/decorators.py
+++ b/Allura/allura/tests/decorators.py
@@ -53,8 +53,8 @@ def NullContextManager():
 
 
 def with_tool(project_shortname, ep_name, mount_point=None, mount_label=None,
-        ordinal=None, post_install_hook=None, username='test-admin',
-        **override_options):
+              ordinal=None, post_install_hook=None, username='test-admin',
+              **override_options):
     def _with_tool(func):
         @wraps(func)
         def wrapped(*args, **kw):
@@ -62,7 +62,8 @@ def with_tool(project_shortname, ep_name, mount_point=None, mount_label=None,
             p = M.Project.query.get(shortname=project_shortname)
             c.project = p
             if mount_point and not p.app_instance(mount_point):
-                c.app = p.install_app(ep_name, mount_point, mount_label, ordinal, **override_options)
+                c.app = p.install_app(
+                    ep_name, mount_point, mount_label, ordinal, **override_options)
                 if post_install_hook:
                     post_install_hook(c.app)
 
@@ -87,7 +88,9 @@ with_tracker = with_tool('test', 'Tickets', 'bugs')
 with_wiki = with_tool('test', 'Wiki', 'wiki')
 with_url = with_tool('test', 'ShortUrl', 'url')
 
+
 class raises(object):
+
     '''
     Test helper in the form of a context manager, to assert that something raises an exception.
     After completion, the 'exc' attribute can be used to do further inspection of the exception
@@ -124,6 +127,7 @@ def without_module(*module_names):
 
 
 class patch_middleware_config(object):
+
     '''
     Context manager that patches the configuration used during middleware
     setup for Allura

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/functional/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/functional/__init__.py b/Allura/allura/tests/functional/__init__.py
index fde2aa9..40a4394 100644
--- a/Allura/allura/tests/functional/__init__.py
+++ b/Allura/allura/tests/functional/__init__.py
@@ -17,4 +17,4 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-"""Functional test suite for the controllers of the application."""
\ No newline at end of file
+"""Functional test suite for the controllers of the application."""