You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by jo...@apache.org on 2014/01/10 22:23:18 UTC

[22/36] PEP8 cleanup

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/repo_refresh.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/repo_refresh.py b/Allura/allura/model/repo_refresh.py
index 2a8af51..732d6a5 100644
--- a/Allura/allura/model/repo_refresh.py
+++ b/Allura/allura/model/repo_refresh.py
@@ -39,7 +39,8 @@ from allura.model.auth import User
 
 log = logging.getLogger(__name__)
 
-QSIZE=100
+QSIZE = 100
+
 
 def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     all_commit_ids = commit_ids = list(repo.all_commit_ids())
@@ -63,8 +64,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     seen = set()
     for i, oid in enumerate(commit_ids):
         repo.refresh_commit_info(oid, seen, not all_commits)
-        if (i+1) % 100 == 0:
-            log.info('Refresh commit info %d: %s', (i+1), oid)
+        if (i + 1) % 100 == 0:
+            log.info('Refresh commit info %d: %s', (i + 1), oid)
 
     refresh_commit_repos(all_commit_ids, repo)
 
@@ -72,15 +73,17 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     for i, oid in enumerate(commit_ids):
         ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
         refresh_children(ci)
-        if (i+1) % 100 == 0:
-            log.info('Refresh child info %d for parents of %s', (i+1), ci._id)
+        if (i + 1) % 100 == 0:
+            log.info('Refresh child info %d for parents of %s',
+                     (i + 1), ci._id)
 
     if repo._refresh_precompute:
         # Refresh commit runs
         commit_run_ids = commit_ids
         # Check if the CommitRuns for the repo are in a good state by checking for
         # a CommitRunDoc that contains the last known commit. If there isn't one,
-        # the CommitRuns for this repo are in a bad state - rebuild them entirely.
+        # the CommitRuns for this repo are in a bad state - rebuild them
+        # entirely.
         if commit_run_ids != all_commit_ids:
             last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
             log.info('Last known commit id: %s', last_commit)
@@ -101,8 +104,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
         for i, oid in enumerate(commit_ids):
             ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
             cache = refresh_commit_trees(ci, cache)
-            if (i+1) % 100 == 0:
-                log.info('Refresh commit trees %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Refresh commit trees %d: %s', (i + 1), ci._id)
 
     # Compute diffs
     cache = {}
@@ -115,8 +118,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             ci = mapper(Commit).create(cid, dict(instrument=False))
             ci.set_context(repo)
             compute_diffs(repo._id, cache, ci)
-            if (i+1) % 100 == 0:
-                log.info('Compute diffs %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Compute diffs %d: %s', (i + 1), ci._id)
 
     if repo._refresh_precompute:
         model_cache = ModelCache()
@@ -126,8 +129,8 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             ci.set_context(repo)
             compute_lcds(ci, model_cache, lcid_cache)
             ThreadLocalORMSession.flush_all()
-            if (i+1) % 100 == 0:
-                log.info('Compute last commit info %d: %s', (i+1), ci._id)
+            if (i + 1) % 100 == 0:
+                log.info('Compute last commit info %d: %s', (i + 1), ci._id)
 
     if not all_commits and not new_clone:
         for commit in commit_ids:
@@ -138,7 +141,7 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
             if user is not None:
                 g.statsUpdater.newCommit(new, repo.app_config.project, user)
                 g.director.create_activity(user, 'committed', new,
-                        related_nodes=[repo.app_config.project])
+                                           related_nodes=[repo.app_config.project])
 
     log.info('Refresh complete for %s', repo.full_fs_path)
     g.post_event('repo_refreshed', len(commit_ids), all_commits, new_clone)
@@ -147,56 +150,60 @@ def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
     if notify:
         send_notifications(repo, commit_ids)
 
+
 def refresh_commit_trees(ci, cache):
     '''Refresh the list of trees included withn a commit'''
-    if ci.tree_id is None: return cache
+    if ci.tree_id is None:
+        return cache
     trees_doc = TreesDoc(dict(
-            _id=ci._id,
-            tree_ids = list(trees(ci.tree_id, cache))))
+        _id=ci._id,
+        tree_ids=list(trees(ci.tree_id, cache))))
     trees_doc.m.save(safe=False)
     new_cache = dict(
         (oid, cache[oid])
         for oid in trees_doc.tree_ids)
     return new_cache
 
+
 def refresh_commit_repos(all_commit_ids, repo):
     '''Refresh the list of repositories within which a set of commits are
     contained'''
     for oids in utils.chunked_iter(all_commit_ids, QSIZE):
         for ci in CommitDoc.m.find(dict(
-                _id={'$in':list(oids)},
+                _id={'$in': list(oids)},
                 repo_ids={'$ne': repo._id})):
             oid = ci._id
             ci.repo_ids.append(repo._id)
             index_id = 'allura.model.repo.Commit#' + oid
             ref = ArtifactReferenceDoc(dict(
-                    _id=index_id,
-                    artifact_reference=dict(
-                        cls=bson.Binary(dumps(Commit)),
-                        project_id=repo.app.config.project_id,
-                    app_config_id=repo.app.config._id,
-                        artifact_id=oid),
-                    references=[]))
-            link0 = ShortlinkDoc(dict(
-                    _id=bson.ObjectId(),
-                    ref_id=index_id,
+                _id=index_id,
+                artifact_reference=dict(
+                    cls=bson.Binary(dumps(Commit)),
                     project_id=repo.app.config.project_id,
                     app_config_id=repo.app.config._id,
-                    link=repo.shorthand_for_commit(oid)[1:-1],
-                    url=repo.url_for_commit(oid)))
+                    artifact_id=oid),
+                references=[]))
+            link0 = ShortlinkDoc(dict(
+                _id=bson.ObjectId(),
+                ref_id=index_id,
+                project_id=repo.app.config.project_id,
+                app_config_id=repo.app.config._id,
+                link=repo.shorthand_for_commit(oid)[1:-1],
+                url=repo.url_for_commit(oid)))
             # Always create a link for the full commit ID
             link1 = ShortlinkDoc(dict(
-                    _id=bson.ObjectId(),
-                    ref_id=index_id,
-                    project_id=repo.app.config.project_id,
-                    app_config_id=repo.app.config._id,
-                    link=oid,
-                    url=repo.url_for_commit(oid)))
+                _id=bson.ObjectId(),
+                ref_id=index_id,
+                project_id=repo.app.config.project_id,
+                app_config_id=repo.app.config._id,
+                link=oid,
+                url=repo.url_for_commit(oid)))
             ci.m.save(safe=False, validate=False)
             ref.m.save(safe=False, validate=False)
             link0.m.save(safe=False, validate=False)
             link1.m.save(safe=False, validate=False)
 
+
 def refresh_children(ci):
     '''Refresh the list of children of the given commit'''
     CommitDoc.m.update_partial(
@@ -204,12 +211,14 @@ def refresh_children(ci):
         {'$addToSet': dict(child_ids=ci._id)},
         multi=True)
 
+
 class CommitRunBuilder(object):
+
     '''Class used to build up linear runs of single-parent commits'''
 
     def __init__(self, commit_ids):
         self.commit_ids = commit_ids
-        self.run_index = {} # by commit ID
+        self.run_index = {}  # by commit ID
         self.runs = {}          # by run ID
         self.reasons = {}    # reasons to stop merging runs
 
@@ -217,14 +226,15 @@ class CommitRunBuilder(object):
         '''Build up the runs'''
         for oids in utils.chunked_iter(self.commit_ids, QSIZE):
             oids = list(oids)
-            for ci in CommitDoc.m.find(dict(_id={'$in':oids})):
-                if ci._id in self.run_index: continue
+            for ci in CommitDoc.m.find(dict(_id={'$in': oids})):
+                if ci._id in self.run_index:
+                    continue
                 self.run_index[ci._id] = ci._id
                 self.runs[ci._id] = CommitRunDoc(dict(
-                        _id=ci._id,
-                        parent_commit_ids=ci.parent_ids,
-                        commit_ids=[ci._id],
-                        commit_times=[ci.authored['date']]))
+                    _id=ci._id,
+                    parent_commit_ids=ci.parent_ids,
+                    commit_ids=[ci._id],
+                    commit_times=[ci.authored['date']]))
             self.merge_runs()
         log.info('%d runs', len(self.runs))
         for rid, run in sorted(self.runs.items()):
@@ -246,11 +256,12 @@ class CommitRunBuilder(object):
         runs = runs.values()
         while runs:
             run = runs.pop()
-            if run._id in seen_run_ids: continue
+            if run._id in seen_run_ids:
+                continue
             seen_run_ids.add(run._id)
             yield run
             for run in CommitRunDoc.m.find(
-                dict(commit_ids={'$in':run.parent_commit_ids})):
+                    dict(commit_ids={'$in': run.parent_commit_ids})):
                 runs.append(run)
 
     def cleanup(self):
@@ -260,9 +271,11 @@ class CommitRunBuilder(object):
             for run in self._all_runs())
         for rid, run in runs.items():
             p_cis = run['parent_commit_ids']
-            if len(p_cis) != 1: continue
+            if len(p_cis) != 1:
+                continue
             parent_run = runs.get(p_cis[0], None)
-            if parent_run is None: continue
+            if parent_run is None:
+                continue
             run['commit_ids'] += parent_run['commit_ids']
             run['commit_times'] += parent_run['commit_times']
             run['parent_commit_ids'] = parent_run['parent_commit_ids']
@@ -272,13 +285,14 @@ class CommitRunBuilder(object):
         for run1 in runs.values():
             # if run1 is a subset of another run, delete it
             if CommitRunDoc.m.find(dict(commit_ids={'$all': run1.commit_ids},
-                    _id={'$ne': run1._id})).count():
+                                        _id={'$ne': run1._id})).count():
                 log.info('... delete %r (subset of another run)', run1)
                 run1.m.delete()
                 continue
             for run2 in CommitRunDoc.m.find(dict(
                     commit_ids=run1.commit_ids[0])):
-                if run1._id == run2._id: continue
+                if run1._id == run2._id:
+                    continue
                 log.info('... delete %r (part of %r)', run2, run1)
                 run2.m.delete()
 
@@ -287,7 +301,8 @@ class CommitRunBuilder(object):
         while True:
             for run_id, run in self.runs.iteritems():
                 if len(run.parent_commit_ids) != 1:
-                    self.reasons[run_id] = '%d parents' % len(run.parent_commit_ids)
+                    self.reasons[run_id] = '%d parents' % len(
+                        run.parent_commit_ids)
                     continue
                 p_oid = run.parent_commit_ids[0]
                 p_run_id = self.run_index.get(p_oid)
@@ -299,7 +314,8 @@ class CommitRunBuilder(object):
                     self.reasons[run_id] = 'parent run not found'
                     continue
                 if p_run.commit_ids[0] != p_oid:
-                    self.reasons[run_id] = 'parent does not start with parent commit'
+                    self.reasons[
+                        run_id] = 'parent does not start with parent commit'
                     continue
                 run.commit_ids += p_run.commit_ids
                 run.commit_times += p_run.commit_times
@@ -311,44 +327,48 @@ class CommitRunBuilder(object):
                 break
             del self.runs[p_run_id]
 
+
 def trees(id, cache):
     '''Recursively generate the list of trees contained within a given tree ID'''
     yield id
     entries = cache.get(id, None)
     if entries is None:
         t = TreeDoc.m.get(_id=id)
-        entries = [ o.id for o in t.tree_ids ]
+        entries = [o.id for o in t.tree_ids]
         cache[id] = entries
     for i in entries:
         for x in trees(i, cache):
             yield x
 
+
 def unknown_commit_ids(all_commit_ids):
     '''filter out all commit ids that have already been cached'''
     result = []
     for chunk in utils.chunked_iter(all_commit_ids, QSIZE):
         chunk = list(chunk)
-        q = CommitDoc.m.find(dict(_id={'$in':chunk}))
+        q = CommitDoc.m.find(dict(_id={'$in': chunk}))
         known_commit_ids = set(ci._id for ci in q)
-        result += [ oid for oid in chunk if oid not in known_commit_ids ]
+        result += [oid for oid in chunk if oid not in known_commit_ids]
     return result
 
+
 def compute_diffs(repo_id, tree_cache, rhs_ci):
     '''compute simple differences between a commit and its first parent'''
-    if rhs_ci.tree_id is None: return tree_cache
+    if rhs_ci.tree_id is None:
+        return tree_cache
 
     def _update_cache(lhs_tree_ids, rhs_tree_ids):
         # crazy cache logic that I'm not certain I understand
         new_tree_ids = [
             tid for tid in chain(lhs_tree_ids, rhs_tree_ids)
-            if tid not in tree_cache ]
+            if tid not in tree_cache]
         tree_index = dict(
-            (t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}),validate=False))
+            (t._id, t) for t in TreeDoc.m.find(dict(_id={'$in': new_tree_ids}), validate=False))
         tree_index.update(tree_cache)
         rhs_tree_ids_set = set(rhs_tree_ids)
         tree_cache.clear()
         tree_cache.update(
-            (id, t) for id,t in tree_index.iteritems() if id in rhs_tree_ids_set)
+            (id, t) for id, t in tree_index.iteritems() if id in rhs_tree_ids_set)
         return tree_index
 
     empty_tree = Object(_id=None, tree_ids=[], blob_ids=[], other_ids=[])
@@ -356,20 +376,24 @@ def compute_diffs(repo_id, tree_cache, rhs_ci):
     differences = []
     rhs_treesdoc = TreesDoc.m.get(_id=rhs_ci._id)
     if not rhs_treesdoc:
-        # FIXME: These sometimes don't exist for unknown reasons; they should be auto-gen'ed
+        # FIXME: These sometimes don't exist for unknown reasons; they should
+        # be auto-gen'ed
         log.error('Missing TreesDoc: %s', rhs_ci)
         return tree_cache
     for lhs_cid in rhs_ci.parent_ids:
         lhs_ci = CommitDoc.m.get(_id=lhs_cid)
         if lhs_ci is None:
-            log.error('Commit ID referenced as parent but not found: %s parent of %s', lhs_cid, rhs_ci)
+            log.error(
+                'Commit ID referenced as parent but not found: %s parent of %s', lhs_cid, rhs_ci)
             continue
         lhs_treesdoc = TreesDoc.m.get(_id=lhs_cid)
         if not lhs_treesdoc:
-            # FIXME: These sometimes don't exist for unknown reasons; they should be auto-gen'ed
+            # FIXME: These sometimes don't exist for unknown reasons; they
+            # should be auto-gen'ed
             log.error('Missing TreesDoc: %s', rhs_ci)
             continue
-        tree_index = _update_cache(lhs_treesdoc.tree_ids, rhs_treesdoc.tree_ids)
+        tree_index = _update_cache(
+            lhs_treesdoc.tree_ids, rhs_treesdoc.tree_ids)
         rhs_tree = tree_index[rhs_ci.tree_id]
         lhs_tree = tree_index.get(lhs_ci.tree_id, empty_tree)
         for name, lhs_id, rhs_id in _diff_trees(lhs_tree, rhs_tree, tree_index):
@@ -384,11 +408,12 @@ def compute_diffs(repo_id, tree_cache, rhs_ci):
                 dict(name=name, lhs_id=lhs_id, rhs_id=rhs_id))
     # Build the diffinfo
     di = DiffInfoDoc(dict(
-            _id=rhs_ci._id,
-            differences=differences))
+        _id=rhs_ci._id,
+        differences=differences))
     di.m.save()
     return tree_cache
 
+
 def send_notifications(repo, commit_ids):
     '''Create appropriate notification and feed objects for a refresh'''
     from allura.model import Feed, Notification
@@ -398,7 +423,7 @@ def send_notifications(repo, commit_ids):
         chunk = list(oids)
         index = dict(
             (doc._id, doc)
-            for doc in Commit.query.find(dict(_id={'$in':chunk})))
+            for doc in Commit.query.find(dict(_id={'$in': chunk})))
         for oid in chunk:
             ci = index[oid]
             href = repo.url_for_commit(oid)
@@ -414,13 +439,13 @@ def send_notifications(repo, commit_ids):
                 unique_id=href)
             branches = repo.symbolics_for_commit(ci)[0]
             commit_msgs.append('%s: %s by %s %s%s' % (
-                    ",".join(b for b in branches),
-                    summary, ci.authored.name, base_url, ci.url()))
+                ",".join(b for b in branches),
+                summary, ci.authored.name, base_url, ci.url()))
     if commit_msgs:
         if len(commit_msgs) > 1:
             subject = '%d new commits to %s %s' % (
                 len(commit_msgs), repo.app.project.name, repo.app.config.options.mount_label)
-            text='\n\n'.join(commit_msgs)
+            text = '\n\n'.join(commit_msgs)
         else:
             subject = '{0} - {1}: {2}'.format(
                 repo.shorthand_for_commit(ci._id),
@@ -428,10 +453,10 @@ def send_notifications(repo, commit_ids):
                 summary)
             branches = repo.symbolics_for_commit(ci)[0]
             text_branches = ('%s: ' % ",".join(b for b in branches)
-                    if branches else '')
+                             if branches else '')
             text = "%s%s %s%s" % (text_branches,
-                               ci.message,
-                               base_url, ci.url())
+                                  ci.message,
+                                  base_url, ci.url())
 
         Notification.post(
             artifact=repo,
@@ -448,24 +473,29 @@ def _title(message):
 
 
 def _summarize(message):
-    if not message: return ''
+    if not message:
+        return ''
     summary = []
     for line in message.splitlines():
         line = line.rstrip()
-        if line: summary.append(line)
-        else: break
+        if line:
+            summary.append(line)
+        else:
+            break
     return ' '.join(summary)
 
+
 def _diff_trees(lhs, rhs, index, *path):
     def _fq(name):
         return '/'.join(reversed(
-                (name,) + path))
+            (name,) + path))
     # Diff the trees (and keep deterministic order)
     rhs_tree_ids = OrderedDict(
         (o.name, o.id)
         for o in rhs.tree_ids)
     for o in lhs.tree_ids:
-        rhs_id = rhs_tree_ids.pop(o.name, None)  # remove so won't be picked up as added, below
+        # remove so won't be picked up as added, below
+        rhs_id = rhs_tree_ids.pop(o.name, None)
         if rhs_id == o.id:  # no change
             continue
         elif rhs_id is None:  # removed
@@ -487,7 +517,7 @@ def _diff_trees(lhs, rhs, index, *path):
     for o in lhs.blob_ids:
         rhs_id = rhs_blob_ids.pop(o.name, None)
         if rhs_id == o.id:
-            continue # no change
+            continue  # no change
         elif rhs_id is None:
             yield (_fq(o.name), o.id, None)
         else:
@@ -495,11 +525,13 @@ def _diff_trees(lhs, rhs, index, *path):
     for name, id in rhs_blob_ids.items():
         yield (_fq(name), None, id)
 
+
 def get_commit_info(commit):
     if not isinstance(commit, Commit):
         commit = mapper(Commit).create(commit, dict(instrument=False))
     sess = session(commit)
-    if sess: sess.expunge(commit)
+    if sess:
+        sess.expunge(commit)
     return dict(
         id=commit._id,
         author=commit.authored.name,
@@ -508,7 +540,8 @@ def get_commit_info(commit):
         author_url=commit.author_url,
         shortlink=commit.shorthand_id(),
         summary=commit.summary
-        )
+    )
+
 
 def last_known_commit_id(all_commit_ids, new_commit_ids):
     """
@@ -520,8 +553,10 @@ def last_known_commit_id(all_commit_ids, new_commit_ids):
         new_commit_ids: Commit ids that are not yet cached in mongo, sorted
                         oldest to newest.
     """
-    if not all_commit_ids: return None
-    if not new_commit_ids: return all_commit_ids[-1]
+    if not all_commit_ids:
+        return None
+    if not new_commit_ids:
+        return all_commit_ids[-1]
     return all_commit_ids[all_commit_ids.index(new_commit_ids[0]) - 1]
 
 
@@ -540,6 +575,7 @@ def compute_lcds(commit, model_cache, lcid_cache):
         for changed_path in tree.commit.changed_paths:
             lcid_cache[changed_path] = tree.commit._id
 
+
 def _compute_lcds(tree, cache):
     path = tree.path().strip('/')
     if path not in tree.commit.changed_paths:
@@ -550,6 +586,7 @@ def _compute_lcds(tree, cache):
         sub_tree = _pull_tree(cache, x.id, tree, x.name)
         _compute_lcds(sub_tree, cache)
 
+
 def _pull_tree(cache, tree_id, *context):
     '''
     Since the Tree instances stick around in our cache,
@@ -559,15 +596,16 @@ def _pull_tree(cache, tree_id, *context):
     '''
     cache_tree = cache.get(Tree, dict(_id=tree_id))
     new_tree = Tree(
-            _id=cache_tree._id,
-            tree_ids=cache_tree.tree_ids,
-            blob_ids=cache_tree.blob_ids,
-            other_ids=cache_tree.other_ids,
-        )
+        _id=cache_tree._id,
+        tree_ids=cache_tree.tree_ids,
+        blob_ids=cache_tree.blob_ids,
+        other_ids=cache_tree.other_ids,
+    )
     session(new_tree).expunge(new_tree)
     new_tree.set_context(*context)
     return new_tree
 
+
 def _update_tree_cache(tree_ids, cache):
     current_ids = set(tree_ids)
     cached_ids = set(cache.instance_ids(Tree))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/repository.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/repository.py b/Allura/allura/model/repository.py
index 950d3bc..aaa97cc 100644
--- a/Allura/allura/model/repository.py
+++ b/Allura/allura/model/repository.py
@@ -65,26 +65,28 @@ config = utils.ConfigProxy(
     common_prefix='forgemail.url')
 
 README_RE = re.compile('^README(\.[^.]*)?$', re.IGNORECASE)
-VIEWABLE_EXTENSIONS = ['.php','.py','.js','.java','.html','.htm','.yaml','.sh',
-    '.rb','.phtml','.txt','.bat','.ps1','.xhtml','.css','.cfm','.jsp','.jspx',
-    '.pl','.php4','.php3','.rhtml','.svg','.markdown','.json','.ini','.tcl','.vbs','.xsl']
+VIEWABLE_EXTENSIONS = [
+    '.php', '.py', '.js', '.java', '.html', '.htm', '.yaml', '.sh',
+    '.rb', '.phtml', '.txt', '.bat', '.ps1', '.xhtml', '.css', '.cfm', '.jsp', '.jspx',
+    '.pl', '.php4', '.php3', '.rhtml', '.svg', '.markdown', '.json', '.ini', '.tcl', '.vbs', '.xsl']
+
 
 class RepositoryImplementation(object):
 
     # Repository-specific code
-    def init(self): # pragma no cover
+    def init(self):  # pragma no cover
         raise NotImplementedError, 'init'
 
-    def clone_from(self, source_url): # pragma no cover
+    def clone_from(self, source_url):  # pragma no cover
         raise NotImplementedError, 'clone_from'
 
-    def commit(self, revision): # pragma no cover
+    def commit(self, revision):  # pragma no cover
         raise NotImplementedError, 'commit'
 
-    def all_commit_ids(self): # pragma no cover
+    def all_commit_ids(self):  # pragma no cover
         raise NotImplementedError, 'all_commit_ids'
 
-    def new_commits(self, all_commits=False): # pragma no cover
+    def new_commits(self, all_commits=False):  # pragma no cover
         '''Return a list of native commits in topological order (heads first).
 
         "commit" is a repo-native object, NOT a Commit object.
@@ -92,21 +94,22 @@ class RepositoryImplementation(object):
         '''
         raise NotImplementedError, 'new_commits'
 
-    def commit_parents(self, commit): # pragma no cover
+    def commit_parents(self, commit):  # pragma no cover
         '''Return a list of native commits for the parents of the given (native)
         commit'''
         raise NotImplementedError, 'commit_parents'
 
-    def refresh_commit_info(self, oid, lazy=True): # pragma no cover
+    def refresh_commit_info(self, oid, lazy=True):  # pragma no cover
         '''Refresh the data in the commit with id oid'''
         raise NotImplementedError, 'refresh_commit_info'
 
-    def _setup_hooks(self, source_path=None): # pragma no cover
+    def _setup_hooks(self, source_path=None):  # pragma no cover
         '''Install a hook in the repository that will ping the refresh url for
         the repo.  Optionally provide a path from which to copy existing hooks.'''
         raise NotImplementedError, '_setup_hooks'
 
-    def log(self, revs=None, path=None, exclude=None, id_only=True, **kw): # pragma no cover
+    # pragma no cover
+    def log(self, revs=None, path=None, exclude=None, id_only=True, **kw):
         """
         Returns a generator that returns information about commits reachable
         by revs.
@@ -128,11 +131,11 @@ class RepositoryImplementation(object):
         """
         raise NotImplementedError, 'log'
 
-    def compute_tree_new(self, commit, path='/'): # pragma no cover
+    def compute_tree_new(self, commit, path='/'):  # pragma no cover
         '''Used in hg and svn to compute a git-like-tree lazily with the new models'''
         raise NotImplementedError, 'compute_tree'
 
-    def open_blob(self, blob): # pragma no cover
+    def open_blob(self, blob):  # pragma no cover
         '''Return a file-like object that contains the contents of the blob'''
         raise NotImplementedError, 'open_blob'
 
@@ -168,7 +171,8 @@ class RepositoryImplementation(object):
             object_id = commit._id
 
         if '/' in object_id:
-            object_id = os.path.join(object_id, self._repo.app.END_OF_REF_ESCAPE)
+            object_id = os.path.join(
+                object_id, self._repo.app.END_OF_REF_ESCAPE)
 
         return os.path.join(self._repo.url(), url_type, object_id) + '/'
 
@@ -178,7 +182,8 @@ class RepositoryImplementation(object):
         If create_repo_dir is True, also ensure that the directory
         of the repo itself exists.
         '''
-        if not self._repo.fs_path.endswith('/'): self._repo.fs_path += '/'
+        if not self._repo.fs_path.endswith('/'):
+            self._repo.fs_path += '/'
         fullname = self._repo.fs_path + self._repo.name
         # make the base dir for repo, regardless
         if not os.path.exists(self._repo.fs_path):
@@ -188,10 +193,11 @@ class RepositoryImplementation(object):
         return fullname
 
     def _setup_special_files(self, source_path=None):
-        magic_file = os.path.join(self._repo.fs_path, self._repo.name, '.SOURCEFORGE-REPOSITORY')
+        magic_file = os.path.join(
+            self._repo.fs_path, self._repo.name, '.SOURCEFORGE-REPOSITORY')
         with open(magic_file, 'w') as f:
             f.write(self._repo.repo_id)
-        os.chmod(magic_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
+        os.chmod(magic_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
         self._setup_hooks(source_path)
 
     @property
@@ -232,17 +238,20 @@ class RepositoryImplementation(object):
         lcd_chunk_size = asint(tg.config.get('lcd_thread_chunk_size', 10))
         num_threads = 0
         for s in range(0, len(paths), lcd_chunk_size):
-            chunks.put(paths[s:s+lcd_chunk_size])
+            chunks.put(paths[s:s + lcd_chunk_size])
             num_threads += 1
+
         def get_ids():
             paths = set(chunks.get())
             try:
                 commit_id = commit._id
                 while paths and commit_id:
                     if time() - start_time >= timeout:
-                        log.error('last_commit_ids timeout for %s on %s', commit._id, ', '.join(paths))
+                        log.error('last_commit_ids timeout for %s on %s',
+                                  commit._id, ', '.join(paths))
                         break
-                    commit_id, changes = self._get_last_commit(commit._id, paths)
+                    commit_id, changes = self._get_last_commit(
+                        commit._id, paths)
                     if commit_id is None:
                         break
                     changed = prefix_paths_union(paths, changes)
@@ -288,27 +297,29 @@ class RepositoryImplementation(object):
         """
         raise NotImplemented('get_changes')
 
+
 class Repository(Artifact, ActivityObject):
-    BATCH_SIZE=100
+    BATCH_SIZE = 100
+
     class __mongometa__:
-        name='generic-repository'
+        name = 'generic-repository'
         indexes = ['upstream_repo.name']
     _impl = None
-    repo_id='repo'
-    type_s='Repository'
+    repo_id = 'repo'
+    type_s = 'Repository'
     _refresh_precompute = True
 
-    name=FieldProperty(str)
-    tool=FieldProperty(str)
-    fs_path=FieldProperty(str)
-    url_path=FieldProperty(str)
-    status=FieldProperty(str)
-    email_address=''
-    additional_viewable_extensions=FieldProperty(str)
+    name = FieldProperty(str)
+    tool = FieldProperty(str)
+    fs_path = FieldProperty(str)
+    url_path = FieldProperty(str)
+    status = FieldProperty(str)
+    email_address = ''
+    additional_viewable_extensions = FieldProperty(str)
     heads = FieldProperty(S.Deprecated)
     branches = FieldProperty(S.Deprecated)
     repo_tags = FieldProperty(S.Deprecated)
-    upstream_repo = FieldProperty(dict(name=str,url=str))
+    upstream_repo = FieldProperty(dict(name=str, url=str))
     default_branch_name = FieldProperty(str)
 
     def __init__(self, **kw):
@@ -358,7 +369,8 @@ class Repository(Artifact, ActivityObject):
         return urljoin(tg.config.get('scm.repos.tarball.url_prefix', '/'), r)
 
     def get_tarball_status(self, revision, path=None):
-        pathname = os.path.join(self.tarball_path, self.tarball_filename(revision, path))
+        pathname = os.path.join(
+            self.tarball_path, self.tarball_filename(revision, path))
         filename = '%s%s' % (pathname, '.zip')
         if os.path.isfile(filename):
             return 'complete'
@@ -368,12 +380,11 @@ class Repository(Artifact, ActivityObject):
             'task_name': 'allura.tasks.repo_tasks.tarball',
             'args': [revision, path or ''],
             'state': {'$in': ['busy', 'ready']},
-            })
+        })
 
         return task.state if task else None
 
-
-    def __repr__(self): # pragma no cover
+    def __repr__(self):  # pragma no cover
         return '<%s %s>' % (
             self.__class__.__name__,
             self.full_fs_path)
@@ -381,32 +392,46 @@ class Repository(Artifact, ActivityObject):
     # Proxy to _impl
     def init(self):
         return self._impl.init()
+
     def commit(self, rev):
         return self._impl.commit(rev)
+
     def all_commit_ids(self):
         return self._impl.all_commit_ids()
+
     def refresh_commit_info(self, oid, seen, lazy=True):
         return self._impl.refresh_commit_info(oid, seen, lazy)
+
     def open_blob(self, blob):
         return self._impl.open_blob(blob)
+
     def blob_size(self, blob):
         return self._impl.blob_size(blob)
+
     def shorthand_for_commit(self, oid):
         return self._impl.shorthand_for_commit(oid)
+
     def symbolics_for_commit(self, commit):
         return self._impl.symbolics_for_commit(commit)
+
     def url_for_commit(self, commit, url_type='ci'):
         return self._impl.url_for_commit(commit, url_type)
+
     def compute_tree_new(self, commit, path='/'):
         return self._impl.compute_tree_new(commit, path)
+
     def last_commit_ids(self, commit, paths):
         return self._impl.last_commit_ids(commit, paths)
+
     def get_changes(self, commit_id):
         return self._impl.get_changes(commit_id)
+
     def is_empty(self):
         return self._impl.is_empty()
+
     def is_file(self, path, rev=None):
         return self._impl.is_file(path, rev)
+
     def get_heads(self):
         """
         Return list of heads for the repo.
@@ -416,6 +441,7 @@ class Repository(Artifact, ActivityObject):
         try to remove the deprecated fields and clean this up.
         """
         return self._impl.heads
+
     def get_branches(self):
         """
         Return list of branches for the repo.
@@ -425,6 +451,7 @@ class Repository(Artifact, ActivityObject):
         should try to remove the deprecated fields and clean this up.
         """
         return self._impl.branches
+
     def get_tags(self):
         """
         Return list of tags for the repo.
@@ -434,15 +461,18 @@ class Repository(Artifact, ActivityObject):
         should try to remove the deprecated fields and clean this up.
         """
         return self._impl.tags
+
     @property
     def head(self):
         return self._impl.head
+
     def set_default_branch(self, name):
         return self._impl.set_default_branch(name)
 
     def _log(self, rev, skip, limit):
         head = self.commit(rev)
-        if head is None: return
+        if head is None:
+            return
         for _id in self.commitlog([head._id], skip, limit):
             ci = head.query.get(_id=_id)
             ci.set_context(self)
@@ -491,7 +521,7 @@ class Repository(Artifact, ActivityObject):
             branch = self.app.default_branch_name
         try:
             return self.commit(branch)
-        except: # pragma no cover
+        except:  # pragma no cover
             log.exception('Cannot get latest commit for a branch', branch)
             return None
 
@@ -500,17 +530,18 @@ class Repository(Artifact, ActivityObject):
 
     def refresh_url(self):
         return '/'.join([
-                tg.config.get('base_url', 'http://localhost:8080').rstrip('/'),
-                'auth/refresh_repo',
-                self.url().lstrip('/'),
-            ])
+            tg.config.get('base_url', 'http://localhost:8080').rstrip('/'),
+            'auth/refresh_repo',
+            self.url().lstrip('/'),
+        ])
 
     def shorthand_id(self):
         return self.name
 
     @property
     def email_address(self):
-        domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
+        domain = '.'.join(
+            reversed(self.app.url[1:-1].split('/'))).replace('_', '-')
         return u'noreply@%s%s' % (domain, config.common_suffix)
 
     def index(self):
@@ -532,8 +563,9 @@ class Repository(Artifact, ActivityObject):
         '''Return a URL string suitable for copy/paste that describes _this_ repo,
            e.g., for use in a clone/checkout command
         '''
-        tpl = string.Template(tg.config.get('scm.host.%s.%s' % (category, self.tool)))
-        return tpl.substitute(dict(username=username, path=self.url_path+self.name))
+        tpl = string.Template(
+            tg.config.get('scm.host.%s.%s' % (category, self.tool)))
+        return tpl.substitute(dict(username=username, path=self.url_path + self.name))
 
     def clone_command(self, category, username=''):
         '''Return a string suitable for copy/paste that would clone this repo locally
@@ -544,20 +576,21 @@ class Repository(Artifact, ActivityObject):
         tpl = string.Template(tg.config.get('scm.clone.%s.%s' % (category, self.tool)) or
                               tg.config.get('scm.clone.%s' % self.tool))
         return tpl.substitute(dict(username=username,
-                                   source_url=self.clone_url(category, username),
+                                   source_url=self.clone_url(
+                                       category, username),
                                    dest_path=self.suggested_clone_dest_path()))
 
     def merge_requests_by_statuses(self, *statuses):
         return MergeRequest.query.find(dict(
-                app_config_id=self.app.config._id,
-                status={'$in':statuses})).sort(
+            app_config_id=self.app.config._id,
+            status={'$in': statuses})).sort(
             'request_number')
 
     @LazyProperty
     def _additional_viewable_extensions(self):
         ext_list = self.additional_viewable_extensions or ''
         ext_list = [ext.strip() for ext in ext_list.split(',') if ext]
-        ext_list += [ '.ini', '.gitignore', '.svnignore', 'README' ]
+        ext_list += ['.ini', '.gitignore', '.svnignore', 'README']
         return ext_list
 
     def guess_type(self, name):
@@ -586,16 +619,16 @@ class Repository(Artifact, ActivityObject):
             self.set_status('ready')
 
     def push_upstream_context(self):
-        project, rest=h.find_project(self.upstream_repo.name)
+        project, rest = h.find_project(self.upstream_repo.name)
         with h.push_context(project._id):
             app = project.app_instance(rest[0])
         return h.push_context(project._id, app_config_id=app.config._id)
 
     def pending_upstream_merges(self):
         q = {
-            'downstream.project_id':self.project_id,
-            'downstream.mount_point':self.app.config.options.mount_point,
-            'status':'open'}
+            'downstream.project_id': self.project_id,
+            'downstream.mount_point': self.app.config.options.mount_point,
+            'status': 'open'}
         with self.push_upstream_context():
             return MergeRequest.query.find(q).count()
 
@@ -634,26 +667,28 @@ class Repository(Artifact, ActivityObject):
         self.status = status
         session(self).flush(self)
 
+
 class MergeRequest(VersionedArtifact, ActivityObject):
-    statuses=['open', 'merged', 'rejected']
+    statuses = ['open', 'merged', 'rejected']
+
     class __mongometa__:
-        name='merge-request'
-        indexes=['commit_id']
-        unique_indexes=[('app_config_id', 'request_number')]
-    type_s='MergeRequest'
-
-    request_number=FieldProperty(int)
-    status=FieldProperty(str, if_missing='open')
-    downstream=FieldProperty(dict(
-            project_id=S.ObjectId,
-            mount_point=str,
-            commit_id=str))
-    source_branch=FieldProperty(str,if_missing='')
-    target_branch=FieldProperty(str)
-    creator_id=FieldProperty(S.ObjectId, if_missing=lambda:c.user._id)
-    created=FieldProperty(datetime, if_missing=datetime.utcnow)
-    summary=FieldProperty(str)
-    description=FieldProperty(str)
+        name = 'merge-request'
+        indexes = ['commit_id']
+        unique_indexes = [('app_config_id', 'request_number')]
+    type_s = 'MergeRequest'
+
+    request_number = FieldProperty(int)
+    status = FieldProperty(str, if_missing='open')
+    downstream = FieldProperty(dict(
+        project_id=S.ObjectId,
+        mount_point=str,
+        commit_id=str))
+    source_branch = FieldProperty(str, if_missing='')
+    target_branch = FieldProperty(str)
+    creator_id = FieldProperty(S.ObjectId, if_missing=lambda: c.user._id)
+    created = FieldProperty(datetime, if_missing=datetime.utcnow)
+    summary = FieldProperty(str)
+    description = FieldProperty(str)
 
     @property
     def activity_name(self):
@@ -701,13 +736,13 @@ class MergeRequest(VersionedArtifact, ActivityObject):
     @classmethod
     def upsert(cls, **kw):
         num = cls.query.find(dict(
-                app_config_id=c.app.config._id)).count()+1
+            app_config_id=c.app.config._id)).count() + 1
         while True:
             try:
                 r = cls(request_number=num, **kw)
                 session(r).flush(r)
                 return r
-            except pymongo.errors.DuplicateKeyError: # pragma no cover
+            except pymongo.errors.DuplicateKeyError:  # pragma no cover
                 session(r).expunge(r)
                 num += 1
 
@@ -725,6 +760,7 @@ class MergeRequest(VersionedArtifact, ActivityObject):
 
 
 class GitLikeTree(object):
+
     '''
     A tree node similar to that which is used in git
 
@@ -734,19 +770,22 @@ class GitLikeTree(object):
 
     def __init__(self):
         self.blobs = {}  # blobs[name] = oid
-        self.trees = defaultdict(GitLikeTree) #trees[name] = GitLikeTree()
+        self.trees = defaultdict(GitLikeTree)  # trees[name] = GitLikeTree()
         self._hex = None
 
     def get_tree(self, path):
-        if path.startswith('/'): path = path[1:]
-        if not path: return self
+        if path.startswith('/'):
+            path = path[1:]
+        if not path:
+            return self
         cur = self
         for part in path.split('/'):
             cur = cur.trees[part]
         return cur
 
     def get_blob(self, path):
-        if path.startswith('/'): path = path[1:]
+        if path.startswith('/'):
+            path = path[1:]
         path_parts = path.split('/')
         dirpath, last = path_parts[:-1], path_parts[-1]
         cur = self
@@ -755,7 +794,8 @@ class GitLikeTree(object):
         return cur.blobs[last]
 
     def set_blob(self, path, oid):
-        if path.startswith('/'): path = path[1:]
+        if path.startswith('/'):
+            path = path[1:]
         path_parts = path.split('/')
         dirpath, filename = path_parts[:-1], path_parts[-1]
         cur = self
@@ -774,9 +814,9 @@ class GitLikeTree(object):
     def __repr__(self):
         # this can't change, is used in hex() above
         lines = ['t %s %s' % (t.hex(), name)
-                  for name, t in self.trees.iteritems() ]
+                 for name, t in self.trees.iteritems()]
         lines += ['b %s %s' % (oid, name)
-                  for name, oid in self.blobs.iteritems() ]
+                  for name, oid in self.blobs.iteritems()]
         return h.really_unicode('\n'.join(sorted(lines))).encode('utf-8')
 
     def __unicode__(self):
@@ -784,14 +824,16 @@ class GitLikeTree(object):
 
     def pretty_tree(self, indent=0, recurse=True, show_id=True):
         '''For debugging, show a nice tree representation'''
-        lines = [' '*indent + 't %s %s' %
-                 (name, '\n'+t.unicode_full_tree(indent+2, show_id=show_id) if recurse else t.hex())
-                  for name, t in sorted(self.trees.iteritems()) ]
-        lines += [' '*indent + 'b %s %s' % (name, oid if show_id else '')
-                  for name, oid in sorted(self.blobs.iteritems()) ]
+        lines = [' ' * indent + 't %s %s' %
+                 (name, '\n' + t.unicode_full_tree(indent + 2, show_id=show_id)
+                  if recurse else t.hex())
+                 for name, t in sorted(self.trees.iteritems())]
+        lines += [' ' * indent + 'b %s %s' % (name, oid if show_id else '')
+                  for name, oid in sorted(self.blobs.iteritems())]
         output = h.really_unicode('\n'.join(lines)).encode('utf-8')
         return output
 
+
 def topological_sort(graph):
     '''Return the topological sort of a graph.
 
@@ -810,7 +852,8 @@ def topological_sort(graph):
         if not parents:
             graph.pop(nid)
             roots.append(nid)
-        for p_nid in parents: children[p_nid].append(nid)
+        for p_nid in parents:
+            children[p_nid].append(nid)
     # Topo sort
     while roots:
         n = roots.pop()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/session.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/session.py b/Allura/allura/model/session.py
index 9a2c061..92f339d 100644
--- a/Allura/allura/model/session.py
+++ b/Allura/allura/model/session.py
@@ -28,6 +28,7 @@ from allura.tasks import index_tasks
 
 log = logging.getLogger(__name__)
 
+
 class ArtifactSessionExtension(SessionExtension):
 
     def __init__(self, session):
@@ -41,14 +42,14 @@ class ArtifactSessionExtension(SessionExtension):
             self.objects_added = list(self.session.uow.new)
             self.objects_modified = list(self.session.uow.dirty)
             self.objects_deleted = list(self.session.uow.deleted)
-        else: # pragma no cover
+        else:  # pragma no cover
             st = state(obj)
             if st.status == st.new:
-                self.objects_added = [ obj ]
+                self.objects_added = [obj]
             elif st.status == st.dirty:
-                self.objects_modified = [ obj ]
+                self.objects_modified = [obj]
             elif st.status == st.deleted:
-                self.objects_deleted = [ obj ]
+                self.objects_deleted = [obj]
 
     def after_flush(self, obj=None):
         "Update artifact references, and add/update this artifact to solr"
@@ -61,13 +62,14 @@ class ArtifactSessionExtension(SessionExtension):
             try:
                 arefs = [
                     ArtifactReference.from_artifact(obj)
-                    for obj in self.objects_added + self.objects_modified ]
+                    for obj in self.objects_added + self.objects_modified]
                 for obj in self.objects_added + self.objects_modified:
                     Shortlink.from_artifact(obj)
                 # Flush shortlinks
                 main_orm_session.flush()
             except Exception:
-                log.exception("Failed to update artifact references. Is this a borked project migration?")
+                log.exception(
+                    "Failed to update artifact references. Is this a borked project migration?")
             self.update_index(self.objects_deleted, arefs)
             for obj in self.objects_added:
                 g.zarkov_event('create', extra=obj.index_id())
@@ -88,7 +90,9 @@ class ArtifactSessionExtension(SessionExtension):
         if arefs:
             index_tasks.add_artifacts.post([aref._id for aref in arefs])
 
+
 class BatchIndexer(ArtifactSessionExtension):
+
     """
     Tracks needed search index operations over the life of a
     :class:`ming.odm.session.ThreadLocalODMSession` session, and performs them
@@ -116,7 +120,7 @@ class BatchIndexer(ArtifactSessionExtension):
         from .index import ArtifactReference
         del_index_ids = [obj.index_id() for obj in objects_deleted]
         deleted_aref_ids = [aref._id for aref in
-            ArtifactReference.query.find(dict(_id={'$in': del_index_ids}))]
+                            ArtifactReference.query.find(dict(_id={'$in': del_index_ids}))]
         cls = self.__class__
         cls.to_add -= set(deleted_aref_ids)
         cls.to_delete |= set(del_index_ids)
@@ -157,7 +161,8 @@ class BatchIndexer(ArtifactSessionExtension):
         try:
             task_func.post(chunk)
         except pymongo.errors.InvalidDocument as e:
-            # there are many types of InvalidDocument, only recurse if its expected to help
+            # there are many types of InvalidDocument, only recurse if its
+            # expected to help
             if str(e).startswith('BSON document too large'):
                 cls._post(task_func, chunk[:len(chunk) // 2])
                 cls._post(task_func, chunk[len(chunk) // 2:])
@@ -172,6 +177,7 @@ def substitute_extensions(session, extensions=None):
     :class:`ming.odm.session.ThreadLocalODMSession` session.
     """
     original_exts = session._kwargs.get('extensions', [])
+
     def _set_exts(exts):
         session.flush()
         session.close()
@@ -181,7 +187,6 @@ def substitute_extensions(session, extensions=None):
     _set_exts(original_exts)
 
 
-
 main_doc_session = Session.by_name('main')
 project_doc_session = Session.by_name('project')
 task_doc_session = Session.by_name('task')
@@ -190,7 +195,7 @@ project_orm_session = ThreadLocalORMSession(project_doc_session)
 task_orm_session = ThreadLocalORMSession(task_doc_session)
 artifact_orm_session = ThreadLocalORMSession(
     doc_session=project_doc_session,
-    extensions = [ ArtifactSessionExtension ])
+    extensions=[ArtifactSessionExtension])
 repository_orm_session = ThreadLocalORMSession(
     doc_session=main_doc_session,
-    extensions = [  ])
+    extensions=[])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/stats.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/stats.py b/Allura/allura/model/stats.py
index 3946557..03423e5 100644
--- a/Allura/allura/model/stats.py
+++ b/Allura/allura/model/stats.py
@@ -34,33 +34,35 @@ import difflib
 from allura.model.session import main_orm_session
 from allura.lib import helpers as h
 
+
 class Stats(MappedClass):
+
     class __mongometa__:
-        name='basestats'
+        name = 'basestats'
         session = main_orm_session
-        unique_indexes = [ '_id']
+        unique_indexes = ['_id']
 
-    _id=FieldProperty(S.ObjectId)
+    _id = FieldProperty(S.ObjectId)
 
-    visible = FieldProperty(bool, if_missing = True)
+    visible = FieldProperty(bool, if_missing=True)
     registration_date = FieldProperty(datetime)
     general = FieldProperty([dict(
-        category = S.ObjectId,
-        messages = [dict(
-            messagetype = str,
-            created = int,
-            modified = int)],
-        tickets = dict(
-            solved = int,
-            assigned = int,
-            revoked = int,
-            totsolvingtime = int),
-        commits = [dict(
-            lines = int,
-            number = int,
-            language = S.ObjectId)])])
-
-    lastmonth=FieldProperty(dict(
+        category=S.ObjectId,
+        messages=[dict(
+            messagetype=str,
+            created=int,
+            modified=int)],
+        tickets=dict(
+            solved=int,
+            assigned=int,
+            revoked=int,
+            totsolvingtime=int),
+        commits=[dict(
+            lines=int,
+            number=int,
+            language=S.ObjectId)])])
+
+    lastmonth = FieldProperty(dict(
         messages=[dict(
             datetime=datetime,
             created=bool,
@@ -89,33 +91,33 @@ class Stats(MappedClass):
         The user may have registered before stats were collected,
         making calculations based on registration date unfair."""
         min_date = config.get('userstats.start_date', '0001-1-1')
-        return max(datetime.strptime(min_date,'%Y-%m-%d'), self.registration_date)
+        return max(datetime.strptime(min_date, '%Y-%m-%d'), self.registration_date)
 
     def getCodeContribution(self):
-        days=(datetime.today() - self.start_date).days
+        days = (datetime.today() - self.start_date).days
         if not days:
-            days=1
+            days = 1
         for val in self['general']:
             if val['category'] is None:
                 for commits in val['commits']:
                     if commits['language'] is None:
                         if days > 30:
-                            return round(float(commits.lines)/days*30, 2)
+                            return round(float(commits.lines) / days * 30, 2)
                         else:
                             return float(commits.lines)
         return 0
 
     def getDiscussionContribution(self):
-        days=(datetime.today() - self.start_date).days
+        days = (datetime.today() - self.start_date).days
         if not days:
-            days=1
+            days = 1
         for val in self['general']:
             if val['category'] is None:
                 for artifact in val['messages']:
                     if artifact['messagetype'] is None:
-                        tot = artifact.created+artifact.modified
+                        tot = artifact.created + artifact.modified
                         if days > 30:
-                            return round(float(tot)/days*30,2)
+                            return round(float(tot) / days * 30, 2)
                         else:
                             return float(tot)
         return 0
@@ -129,30 +131,30 @@ class Stats(MappedClass):
                 return round(float(tickets.solved) / tickets.assigned, 2)
         return 0
 
-    def getCommits(self, category = None):
-        i = getElementIndex(self.general, category = category)
+    def getCommits(self, category=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(number=0, lines=0)
         cat = self.general[i]
-        j = getElementIndex(cat.commits, language = None)
+        j = getElementIndex(cat.commits, language=None)
         if j is None:
             return dict(number=0, lines=0)
         return dict(
             number=cat.commits[j]['number'],
             lines=cat.commits[j]['lines'])
 
-    def getArtifacts(self, category = None, art_type = None):
-        i = getElementIndex(self.general, category = category)
+    def getArtifacts(self, category=None, art_type=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(created=0, modified=0)
         cat = self.general[i]
-        j = getElementIndex(cat.messages, messagetype = art_type)
+        j = getElementIndex(cat.messages, messagetype=art_type)
         if j is None:
             return dict(created=0, modified=0)
         return dict(created=cat.messages[j].created, modified=cat.messages[j].modified)
 
-    def getTickets(self, category = None):
-        i = getElementIndex(self.general, category = category)
+    def getTickets(self, category=None):
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return dict(
                 assigned=0,
@@ -177,20 +179,20 @@ class Stats(MappedClass):
         by_cat = {}
         for entry in self.general:
             cat = entry.category
-            i = getElementIndex(entry.commits, language = None)
+            i = getElementIndex(entry.commits, language=None)
             if i is None:
                 n, lines = 0, 0
             else:
                 n, lines = entry.commits[i].number, entry.commits[i].lines
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(number=n, lines=lines)
         return by_cat
 
-    #For the moment, commit stats by language are not used, since each project
-    #can be linked to more than one programming language and we don't know how
-    #to which programming language should be credited a line of code modified
-    #within a project including two or more languages.
+    # For the moment, commit stats by language are not used, since each project
+    # can be linked to more than one programming language and we don't know how
+    # to which programming language should be credited a line of code modified
+    # within a project including two or more languages.
     def getCommitsByLanguage(self):
         langlist = []
         by_lang = {}
@@ -207,7 +209,7 @@ class Stats(MappedClass):
         for entry in self.general:
             cat = entry.category
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             if detailed:
                 by_cat[cat] = entry.messages
             else:
@@ -219,13 +221,13 @@ class Stats(MappedClass):
         return by_cat
 
     def getArtifactsByType(self, category=None):
-        i = getElementIndex(self.general, category = category)
+        i = getElementIndex(self.general, category=category)
         if i is None:
             return {}
         entry = self.general[i].messages
         by_type = dict([(el.messagetype, dict(created=el.created,
                                               modified=el.modified))
-                         for el in entry])
+                        for el in entry])
         return by_type
 
     def getTicketsByCategory(self):
@@ -235,7 +237,7 @@ class Stats(MappedClass):
         for entry in self.general:
             cat = entry.category
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             a, s = entry.tickets.assigned, entry.tickets.solved
             r, time = entry.tickets.solved, entry.tickets.totsolvingtime
             if s:
@@ -249,7 +251,7 @@ class Stats(MappedClass):
                 averagesolvingtime=_convertTimeDiff(average))
         return by_cat
 
-    def getLastMonthCommits(self, category = None):
+    def getLastMonthCommits(self, category=None):
         self.checkOldArtifacts()
         lineslist = [el.lines for el in self.lastmonth.commits
                      if category in el.categories + [None]]
@@ -260,8 +262,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
 
         by_cat = {}
         for cat in catlist:
@@ -270,7 +272,7 @@ class Stats(MappedClass):
             n = len(lineslist)
             lines = sum(lineslist)
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(number=n, lines=lines)
         return by_cat
 
@@ -279,8 +281,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        langlist=[el.language for el in self.general
-                  if el.language not in seen and not seen.add(el.language)]
+        langlist = [el.language for el in self.general
+                    if el.language not in seen and not seen.add(el.language)]
 
         by_lang = {}
         for lang in langlist:
@@ -289,36 +291,36 @@ class Stats(MappedClass):
             n = len(lineslist)
             lines = sum(lineslist)
             if lang != None:
-                lang = TroveCategory.query.get(_id = lang)
+                lang = TroveCategory.query.get(_id=lang)
             by_lang[lang] = dict(number=n, lines=lines)
         return by_lang
 
-    def getLastMonthArtifacts(self, category = None, art_type = None):
+    def getLastMonthArtifacts(self, category=None, art_type=None):
         self.checkOldArtifacts()
         cre, mod = reduce(
             addtuple,
-            [(int(el.created),1-int(el.created))
+            [(int(el.created), 1 - int(el.created))
                 for el in self.lastmonth.messages
                 if (category is None or category in el.categories) and
                 (el.messagetype == art_type or art_type is None)],
-            (0,0))
+            (0, 0))
         return dict(created=cre, modified=mod)
 
-    def getLastMonthArtifactsByType(self, category = None):
+    def getLastMonthArtifactsByType(self, category=None):
         self.checkOldArtifacts()
         seen = set()
-        types=[el.messagetype for el in self.lastmonth.messages
-               if el.messagetype not in seen and not seen.add(el.messagetype)]
+        types = [el.messagetype for el in self.lastmonth.messages
+                 if el.messagetype not in seen and not seen.add(el.messagetype)]
 
         by_type = {}
         for t in types:
             cre, mod = reduce(
                 addtuple,
-                [(int(el.created),1-int(el.created))
+                [(int(el.created), 1 - int(el.created))
                  for el in self.lastmonth.messages
                  if el.messagetype == t and
-                 category in [None]+el.categories],
-                (0,0))
+                 category in [None] + el.categories],
+                (0, 0))
             by_type[t] = dict(created=cre, modified=mod)
         return by_type
 
@@ -327,22 +329,22 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
 
         by_cat = {}
         for cat in catlist:
             cre, mod = reduce(
                 addtuple,
-                [(int(el.created),1-int(el.created))
+                [(int(el.created), 1 - int(el.created))
                  for el in self.lastmonth.messages
-                 if cat in el.categories + [None]], (0,0))
+                 if cat in el.categories + [None]], (0, 0))
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             by_cat[cat] = dict(created=cre, modified=mod)
         return by_cat
 
-    def getLastMonthTickets(self, category = None):
+    def getLastMonthTickets(self, category=None):
         from allura.model.project import TroveCategory
 
         self.checkOldArtifacts()
@@ -355,8 +357,8 @@ class Stats(MappedClass):
             [(1, el.solvingtime)
              for el in self.lastmonth.solvedtickets
              if category in el.categories + [None]],
-            (0,0))
-        if category!=None:
+            (0, 0))
+        if category != None:
             category = TroveCategory.query.get(_id=category)
         if s > 0:
             time = time / s
@@ -373,8 +375,8 @@ class Stats(MappedClass):
 
         self.checkOldArtifacts()
         seen = set()
-        catlist=[el.category for el in self.general
-                 if el.category not in seen and not seen.add(el.category)]
+        catlist = [el.category for el in self.general
+                   if el.category not in seen and not seen.add(el.category)]
         by_cat = {}
         for cat in catlist:
             a = len([el for el in self.lastmonth.assignedtickets
@@ -383,9 +385,9 @@ class Stats(MappedClass):
                      if cat in el.categories + [None]])
             s, time = reduce(addtuple, [(1, el.solvingtime)
                                         for el in self.lastmonth.solvedtickets
-                                        if cat in el.categories+[None]],(0,0))
+                                        if cat in el.categories + [None]], (0, 0))
             if cat != None:
-                cat = TroveCategory.query.get(_id = cat)
+                cat = TroveCategory.query.get(_id=cat)
             if s > 0:
                 time = time / s
             else:
@@ -436,8 +438,8 @@ class Stats(MappedClass):
 
     def addClosedTicket(self, open_datetime, close_datetime, project):
         topics = [t for t in project.trove_topic if t]
-        s_time=int((close_datetime-open_datetime).total_seconds())
-        self._updateTicketsStats(topics, 'solved', s_time = s_time)
+        s_time = int((close_datetime - open_datetime).total_seconds())
+        self._updateTicketsStats(topics, 'solved', s_time=s_time)
         self.lastmonth.solvedtickets.append(dict(
             datetime=close_datetime,
             categories=topics,
@@ -445,7 +447,7 @@ class Stats(MappedClass):
         self.checkOldArtifacts()
 
     def addCommit(self, newcommit, commit_datetime, project):
-        def _computeLines(newblob, oldblob = None):
+        def _computeLines(newblob, oldblob=None):
             if oldblob:
                 listold = list(oldblob)
             else:
@@ -462,7 +464,8 @@ class Stats(MappedClass):
                     listold, listnew,
                     ('old' + oldblob.path()).encode('utf-8'),
                     ('new' + newblob.path()).encode('utf-8'))
-                lines = len([l for l in diff if len(l) > 0 and l[0] == '+'])-1
+                lines = len(
+                    [l for l in diff if len(l) > 0 and l[0] == '+']) - 1
             else:
                 lines = 0
             return lines
@@ -506,16 +509,16 @@ class Stats(MappedClass):
             for changed in d.changed:
                 newblob = newcommit.tree.get_blob_by_path(changed)
                 oldblob = oldcommit.tree.get_blob_by_path(changed)
-                totlines+=_computeLines(newblob, oldblob)
+                totlines += _computeLines(newblob, oldblob)
 
             for copied in d.copied:
                 newblob = newcommit.tree.get_blob_by_path(copied['new'])
                 oldblob = oldcommit.tree.get_blob_by_path(copied['old'])
-                totlines+=_computeLines(newblob, oldblob)
+                totlines += _computeLines(newblob, oldblob)
 
             for added in d.added:
                 newblob = newcommit.tree.get_blob_by_path(added)
-                totlines+=_computeLines(newblob)
+                totlines += _computeLines(newblob)
 
         _addCommitData(self, topics, languages, totlines)
 
@@ -533,7 +536,7 @@ class Stats(MappedClass):
         lt = [None] + topics
         for mtype in [None, art_type]:
             for t in lt:
-                i = getElementIndex(self.general, category = t)
+                i = getElementIndex(self.general, category=t)
                 if i is None:
                     msg = dict(
                         category=t,
@@ -545,7 +548,7 @@ class Stats(MappedClass):
                             totsolvingtime=0),
                         messages=[])
                     self.general.append(msg)
-                    i = getElementIndex(self.general, category = t)
+                    i = getElementIndex(self.general, category=t)
                 j = getElementIndex(
                     self.general[i]['messages'], messagetype=mtype)
                 if j is None:
@@ -562,12 +565,12 @@ class Stats(MappedClass):
             messagetype=art_type))
         self.checkOldArtifacts()
 
-    def _updateTicketsStats(self, topics, action, s_time = None):
+    def _updateTicketsStats(self, topics, action, s_time=None):
         if action not in ['solved', 'assigned', 'revoked']:
             return
         lt = topics + [None]
         for t in lt:
-            i = getElementIndex(self.general, category = t)
+            i = getElementIndex(self.general, category=t)
             if i is None:
                 stats = dict(
                     category=t,
@@ -579,10 +582,11 @@ class Stats(MappedClass):
                         totsolvingtime=0),
                     messages=[])
                 self.general.append(stats)
-                i = getElementIndex(self.general, category = t)
+                i = getElementIndex(self.general, category=t)
             self.general[i]['tickets'][action] += 1
             if action == 'solved':
-                self.general[i]['tickets']['totsolvingtime']+=s_time
+                self.general[i]['tickets']['totsolvingtime'] += s_time
+
 
 def getElementIndex(el_list, **kw):
     for i in range(len(el_list)):
@@ -593,15 +597,17 @@ def getElementIndex(el_list, **kw):
             return i
     return None
 
+
 def addtuple(l1, l2):
     a, b = l1
     x, y = l2
-    return (a+x, b+y)
+    return (a + x, b + y)
+
 
 def _convertTimeDiff(int_seconds):
     if int_seconds is None:
         return None
-    diff = timedelta(seconds = int_seconds)
+    diff = timedelta(seconds=int_seconds)
     days, seconds = diff.days, diff.seconds
     hours = seconds / 3600
     seconds = seconds % 3600

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/timeline.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/timeline.py b/Allura/allura/model/timeline.py
index 8f8ac41..3487b9c 100644
--- a/Allura/allura/model/timeline.py
+++ b/Allura/allura/model/timeline.py
@@ -30,15 +30,17 @@ log = logging.getLogger(__name__)
 
 
 class Director(ActivityDirector):
+
     """Overrides the default ActivityDirector to kick off background
     timeline aggregations after an activity is created.
 
     """
+
     def create_activity(self, actor, verb, obj, target=None,
-            related_nodes=None):
+                        related_nodes=None):
         from allura.model.project import Project
         super(Director, self).create_activity(actor, verb, obj,
-                target=target, related_nodes=related_nodes)
+                                              target=target, related_nodes=related_nodes)
         # aggregate actor and follower's timelines
         create_timelines.post(actor.node_id)
         # aggregate project and follower's timelines
@@ -52,12 +54,14 @@ class Aggregator(BaseAggregator):
 
 
 class ActivityNode(NodeBase):
+
     @property
     def node_id(self):
         return "%s:%s" % (self.__class__.__name__, self._id)
 
 
 class ActivityObject(ActivityObjectBase):
+
     @property
     def activity_name(self):
         """Override this for each Artifact type."""
@@ -94,9 +98,11 @@ def perm_check(user):
         otherwise return False.
         """
         extras_dict = activity.obj.activity_extras
-        if not extras_dict: return True
+        if not extras_dict:
+            return True
         allura_id = extras_dict.get('allura_id')
-        if not allura_id: return True
+        if not allura_id:
+            return True
         classname, _id = allura_id.split(':', 1)
         cls = Mapper.by_classname(classname).mapped_class
         try:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/model/types.py
----------------------------------------------------------------------
diff --git a/Allura/allura/model/types.py b/Allura/allura/model/types.py
index ab7d341..12df733 100644
--- a/Allura/allura/model/types.py
+++ b/Allura/allura/model/types.py
@@ -20,7 +20,9 @@ from ming import schema as S
 
 EVERYONE, ALL_PERMISSIONS = None, '*'
 
+
 class MarkdownCache(S.Object):
+
     def __init__(self, **kw):
         super(MarkdownCache, self).__init__(
             fields=dict(
@@ -29,14 +31,17 @@ class MarkdownCache(S.Object):
                 render_time=S.Float()),
             **kw)
 
+
 class ACE(S.Object):
+
     '''ACE - access control entry'''
     ALLOW, DENY = 'ALLOW', 'DENY'
+
     def __init__(self, permissions, **kwargs):
         if permissions is None:
-            permission=S.String()
+            permission = S.String()
         else:
-            permission=S.OneOf('*', *permissions)
+            permission = S.OneOf('*', *permissions)
         super(ACE, self).__init__(
             fields=dict(
                 access=S.OneOf(self.ALLOW, self.DENY),
@@ -68,6 +73,7 @@ class ACE(S.Object):
             ace.role_id in (role_id, EVERYONE)
             and ace.permission in (permission, ALL_PERMISSIONS))
 
+
 class ACL(S.Array):
 
     def __init__(self, permissions=None, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/scripts/refresh_last_commits.py
----------------------------------------------------------------------
diff --git a/Allura/allura/scripts/refresh_last_commits.py b/Allura/allura/scripts/refresh_last_commits.py
index 3721795..b5cda5c 100644
--- a/Allura/allura/scripts/refresh_last_commits.py
+++ b/Allura/allura/scripts/refresh_last_commits.py
@@ -37,6 +37,7 @@ log = logging.getLogger(__name__)
 
 
 class RefreshLastCommits(ScriptTask):
+
     @classmethod
     def parser(cls):
         def _repo_type_list(s):
@@ -45,36 +46,38 @@ class RefreshLastCommits(ScriptTask):
                 repo_type = repo_type.strip()
                 if repo_type not in ['git', 'hg']:
                     raise argparse.ArgumentTypeError(
-                            '{0} is not a valid repo type.'.format(repo_type))
+                        '{0} is not a valid repo type.'.format(repo_type))
                 repo_types.append(repo_type)
             return repo_types
         parser = argparse.ArgumentParser(description='Using existing commit data, '
-                'refresh the last commit metadata in MongoDB. Run for all repos (no args), '
-                'or restrict by neighborhood, project, or code tool mount point.')
+                                         'refresh the last commit metadata in MongoDB. Run for all repos (no args), '
+                                         'or restrict by neighborhood, project, or code tool mount point.')
         parser.add_argument('--nbhd', action='store', default='', dest='nbhd',
-                help='Restrict update to a particular neighborhood, e.g. /p/.')
-        parser.add_argument('--project', action='store', default='', dest='project',
-                help='Restrict update to a particular project. To specify a '
-                'subproject, use a slash: project/subproject.')
+                            help='Restrict update to a particular neighborhood, e.g. /p/.')
+        parser.add_argument(
+            '--project', action='store', default='', dest='project',
+            help='Restrict update to a particular project. To specify a '
+            'subproject, use a slash: project/subproject.')
         parser.add_argument('--project-regex', action='store', default='',
-                dest='project_regex',
-                help='Restrict update to projects for which the shortname matches '
-                'the provided regex.')
-        parser.add_argument('--repo-types', action='store', type=_repo_type_list,
-                default=['git', 'hg'], dest='repo_types',
-                help='Only refresh last commits for repos of the given type(s). Defaults to: '
-                'git,hg. Example: --repo-types=git')
+                            dest='project_regex',
+                            help='Restrict update to projects for which the shortname matches '
+                            'the provided regex.')
+        parser.add_argument(
+            '--repo-types', action='store', type=_repo_type_list,
+            default=['git', 'hg'], dest='repo_types',
+            help='Only refresh last commits for repos of the given type(s). Defaults to: '
+            'git,hg. Example: --repo-types=git')
         parser.add_argument('--mount-point', default='', dest='mount_point',
-                help='Restrict update to repos at the given tool mount point. ')
+                            help='Restrict update to repos at the given tool mount point. ')
         parser.add_argument('--clean', action='store_true', dest='clean',
-                default=False, help='Remove last commit mongo docs for '
-                'project(s) being refreshed before doing the refresh.')
+                            default=False, help='Remove last commit mongo docs for '
+                            'project(s) being refreshed before doing the refresh.')
         parser.add_argument('--dry-run', action='store_true', dest='dry_run',
-                default=False, help='Log names of projects that would have their ')
+                            default=False, help='Log names of projects that would have their ')
         parser.add_argument('--diffs', action='store_true', dest='diffs',
-                default=False, help='Refresh / clean diffs as well as LCDs')
+                            default=False, help='Refresh / clean diffs as well as LCDs')
         parser.add_argument('--limit', action='store', type=int, dest='limit',
-                default=False, help='Limit of how many commits to process')
+                            default=False, help='Limit of how many commits to process')
         return parser
 
     @classmethod
@@ -94,7 +97,8 @@ class RefreshLastCommits(ScriptTask):
 
         for chunk in chunked_find(M.Project, q_project):
             for p in chunk:
-                log.info("Refreshing last commit data for project '%s'." % p.shortname)
+                log.info("Refreshing last commit data for project '%s'." %
+                         p.shortname)
                 if options.dry_run:
                     continue
                 c.project = p
@@ -109,17 +113,19 @@ class RefreshLastCommits(ScriptTask):
                         continue
                     if c.app.repo.tool.lower() not in options.repo_types:
                         log.info("Skipping %r: wrong type (%s)", c.app.repo,
-                                c.app.repo.tool.lower())
+                                 c.app.repo.tool.lower())
                         continue
 
                     c.app.repo.status = 'analyzing'
                     session(c.app.repo).flush(c.app.repo)
                     try:
-                        ci_ids = list(reversed(list(c.app.repo.all_commit_ids())))
+                        ci_ids = list(
+                            reversed(list(c.app.repo.all_commit_ids())))
                         if options.clean:
                             cls._clean(ci_ids, options.diffs)
 
-                        log.info('Refreshing all last commits in %r', c.app.repo)
+                        log.info('Refreshing all last commits in %r',
+                                 c.app.repo)
                         cls.refresh_repo_lcds(ci_ids, options)
                         new_commit_ids = app.repo.unknown_commit_ids()
                         if len(new_commit_ids) > 0:
@@ -141,14 +147,15 @@ class RefreshLastCommits(ScriptTask):
             for i, commit_id in enumerate(commit_ids):
                 commit = M.repo.Commit.query.get(_id=commit_id)
                 with time(timings):
-                    M.repo_refresh.compute_diffs(c.app.repo._id, tree_cache, commit)
+                    M.repo_refresh.compute_diffs(
+                        c.app.repo._id, tree_cache, commit)
                 if i % 1000 == 0:
                     cls._print_stats(i, timings, 1000)
 
         model_cache = M.repo.ModelCache(
-                max_instances={M.repo.LastCommit: 4000},
-                max_queries={M.repo.LastCommit: 4000},
-            )
+            max_instances={M.repo.LastCommit: 4000},
+            max_queries={M.repo.LastCommit: 4000},
+        )
         lcid_cache = {}
         timings = []
         print 'Processing last commits'
@@ -171,13 +178,17 @@ class RefreshLastCommits(ScriptTask):
     def _clean(cls, commit_ids, clean_diffs):
         if clean_diffs:
             # delete DiffInfoDocs
-            i = M.repo.DiffInfoDoc.m.find(dict(_id={'$in': commit_ids})).count()
-            log.info("Deleting %i DiffInfoDoc docs for %i commits...", i, len(commit_ids))
+            i = M.repo.DiffInfoDoc.m.find(
+                dict(_id={'$in': commit_ids})).count()
+            log.info("Deleting %i DiffInfoDoc docs for %i commits...",
+                     i, len(commit_ids))
             M.repo.DiffInfoDoc.m.remove(dict(_id={'$in': commit_ids}))
 
         # delete LastCommitDocs
-        i = M.repo.LastCommitDoc.m.find(dict(commit_id={'$in': commit_ids})).count()
-        log.info("Deleting %i LastCommitDoc docs for %i commits...", i, len(commit_ids))
+        i = M.repo.LastCommitDoc.m.find(
+            dict(commit_id={'$in': commit_ids})).count()
+        log.info("Deleting %i LastCommitDoc docs for %i commits...",
+                 i, len(commit_ids))
         M.repo.LastCommitDoc.m.remove(dict(commit_id={'$in': commit_ids}))
 
     @classmethod
@@ -187,7 +198,7 @@ class RefreshLastCommits(ScriptTask):
         at = tt / len(timings)
         mat = sum(timings[-debug_step:]) / debug_step
         print '  Processed %d commits (max: %f, avg: %f, mavg: %f, tot: %f)' % (
-                processed, mt, at, mat, tt)
+            processed, mt, at, mat, tt)
 
 
 @contextmanager
@@ -197,7 +208,6 @@ def time(timings):
     timings.append((datetime.utcnow() - s).total_seconds())
 
 
-
 if __name__ == '__main__':
     faulthandler.enable()
     RefreshLastCommits.main()