You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by jo...@apache.org on 2014/01/10 22:23:11 UTC

[15/36] PEP8 cleanup

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/spam/test_mollom.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/spam/test_mollom.py b/Allura/allura/tests/unit/spam/test_mollom.py
index ac7441d..931f8a3 100644
--- a/Allura/allura/tests/unit/spam/test_mollom.py
+++ b/Allura/allura/tests/unit/spam/test_mollom.py
@@ -28,19 +28,21 @@ from allura.lib.spam.mollomfilter import MOLLOM_AVAILABLE, MollomSpamFilter
 
 @unittest.skipIf(not MOLLOM_AVAILABLE, "Mollom not available")
 class TestMollom(unittest.TestCase):
+
     @mock.patch('allura.lib.spam.mollomfilter.Mollom')
     def setUp(self, mollom_lib):
         self.mollom = MollomSpamFilter({})
+
         def side_effect(*args, **kw):
             # side effect to test that data being sent to
             # mollom can be successfully urlencoded
             urllib.urlencode(kw.get('data', {}))
             return dict(spam=2)
         self.mollom.service.checkContent = mock.Mock(side_effect=side_effect,
-                return_value=dict(spam=2))
+                                                     return_value=dict(spam=2))
         self.fake_artifact = mock.Mock(**{'url.return_value': 'artifact url'})
         self.fake_user = mock.Mock(display_name=u'Søme User',
-                email_addresses=['user@domain'])
+                                   email_addresses=['user@domain'])
         self.fake_headers = dict(
             REMOTE_ADDR='fallback ip',
             X_FORWARDED_FOR='some ip',
@@ -58,19 +60,22 @@ class TestMollom(unittest.TestCase):
     def test_check(self, request, c):
         request.headers = self.fake_headers
         c.user = None
-        self.mollom.check(self.content, artifact = self.artifact)
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+        self.mollom.check(self.content, artifact=self.artifact)
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
     def test_check_with_user(self, request, c):
         request.headers = self.fake_headers
         c.user = None
-        self.mollom.check(self.content, user=self.fake_user, artifact=self.artifact)
+        self.mollom.check(self.content, user=self.fake_user,
+                          artifact=self.artifact)
         expected_data = self.expected_data
         expected_data.update(authorName=u'Søme User'.encode('utf8'),
-                authorMail='user@domain')
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+                             authorMail='user@domain')
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
@@ -80,8 +85,9 @@ class TestMollom(unittest.TestCase):
         self.mollom.check(self.content, artifact=self.artifact)
         expected_data = self.expected_data
         expected_data.update(authorName=u'Søme User'.encode('utf8'),
-                authorMail='user@domain')
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+                             authorMail='user@domain')
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     @mock.patch('allura.lib.spam.mollomfilter.c')
     @mock.patch('allura.lib.spam.mollomfilter.request')
@@ -92,8 +98,10 @@ class TestMollom(unittest.TestCase):
         request.remote_addr = self.fake_headers['REMOTE_ADDR']
         c.user = None
         self.mollom.check(self.content, artifact=self.artifact)
-        self.mollom.service.checkContent.assert_called_once_with(**self.expected_data)
+        self.mollom.service.checkContent.assert_called_once_with(
+            **self.expected_data)
 
     def test_submit_spam(self):
         self.mollom.submit_spam('test', artifact=self.artifact)
-        assert self.mollom.service.sendFeedback.call_args[0] == ('test_id', 'spam'), self.mollom.service.sendFeedback.call_args[0]
+        assert self.mollom.service.sendFeedback.call_args[0] == (
+            'test_id', 'spam'), self.mollom.service.sendFeedback.call_args[0]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/spam/test_spam_filter.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/spam/test_spam_filter.py b/Allura/allura/tests/unit/spam/test_spam_filter.py
index 5255503..1d96c7f 100644
--- a/Allura/allura/tests/unit/spam/test_spam_filter.py
+++ b/Allura/allura/tests/unit/spam/test_spam_filter.py
@@ -24,12 +24,14 @@ from allura.lib.spam import SpamFilter
 
 
 class MockFilter(SpamFilter):
+
     def check(*args, **kw):
         raise Exception("test exception")
         return True
 
 
 class TestSpamFilter(unittest.TestCase):
+
     def test_check(self):
         # default no-op impl always returns False
         self.assertFalse(SpamFilter({}).check('foo'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_app.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_app.py b/Allura/allura/tests/unit/test_app.py
index da36170..adbfc40 100644
--- a/Allura/allura/tests/unit/test_app.py
+++ b/Allura/allura/tests/unit/test_app.py
@@ -111,4 +111,3 @@ def install_app():
     app = Application(project, app_config)
     app.install(project)
     return app
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_helpers/test_ago.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_helpers/test_ago.py b/Allura/allura/tests/unit/test_helpers/test_ago.py
index 240469f..5e09a2c 100644
--- a/Allura/allura/tests/unit/test_helpers/test_ago.py
+++ b/Allura/allura/tests/unit/test_helpers/test_ago.py
@@ -22,6 +22,7 @@ from allura.lib import helpers
 
 
 class TestAgo:
+
     def setUp(self):
         self.start_time = datetime(2010, 1, 1, 0, 0, 0)
 
@@ -45,9 +46,9 @@ class TestAgo:
         self.assertTimeSince('2 days ago', 2010, 1, 2, 13, 0, 0)
 
     def test_that_months_are_rounded(self):
-        self.assertTimeSince('2010-01-01', 2010,2,8,0,0,0)
-        self.assertTimeSince('2010-01-01', 2010,2,9,0,0,0)
-        self.assertTimeSince('2010-01-01', 2010,2,20,0,0,0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 8, 0, 0, 0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 9, 0, 0, 0)
+        self.assertTimeSince('2010-01-01', 2010, 2, 20, 0, 0, 0)
 
     def test_that_years_are_rounded(self):
         self.assertTimeSince('2010-01-01', 2011, 6, 1, 0, 0, 0)
@@ -61,4 +62,3 @@ class TestAgo:
         with patch('allura.lib.helpers.datetime') as datetime_class:
             datetime_class.utcnow.return_value = end_time
             return helpers.ago(self.start_time)
-

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_helpers/test_set_context.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_helpers/test_set_context.py b/Allura/allura/tests/unit/test_helpers/test_set_context.py
index 84df78e..7784f74 100644
--- a/Allura/allura/tests/unit/test_helpers/test_set_context.py
+++ b/Allura/allura/tests/unit/test_helpers/test_set_context.py
@@ -24,12 +24,13 @@ from allura.lib.exceptions import NoSuchProjectError, NoSuchNeighborhoodError
 from allura.tests.unit import WithDatabase
 from allura.tests.unit import patches
 from allura.tests.unit.factories import (create_project,
-                                          create_app_config,
-                                          create_neighborhood)
+                                         create_app_config,
+                                         create_neighborhood)
 from allura.model.project import Neighborhood
 
 
 class TestWhenProjectIsFoundAndAppIsNot(WithDatabase):
+
     def setUp(self):
         super(TestWhenProjectIsFoundAndAppIsNot, self).setUp()
         self.myproject = create_project('myproject')
@@ -43,6 +44,7 @@ class TestWhenProjectIsFoundAndAppIsNot(WithDatabase):
 
 
 class TestWhenProjectIsFoundInNeighborhood(WithDatabase):
+
     def setUp(self):
         super(TestWhenProjectIsFoundInNeighborhood, self).setUp()
         self.myproject = create_project('myproject')
@@ -62,7 +64,8 @@ class TestWhenAppIsFoundByID(WithDatabase):
         super(TestWhenAppIsFoundByID, self).setUp()
         self.myproject = create_project('myproject')
         self.app_config = create_app_config(self.myproject, 'my_mounted_app')
-        set_context('myproject', app_config_id=self.app_config._id, neighborhood=self.myproject.neighborhood)
+        set_context('myproject', app_config_id=self.app_config._id,
+                    neighborhood=self.myproject.neighborhood)
 
     def test_that_it_sets_the_app(self):
         assert c.app is self.fake_app
@@ -78,7 +81,8 @@ class TestWhenAppIsFoundByMountPoint(WithDatabase):
         super(TestWhenAppIsFoundByMountPoint, self).setUp()
         self.myproject = create_project('myproject')
         self.app_config = create_app_config(self.myproject, 'my_mounted_app')
-        set_context('myproject', mount_point='my_mounted_app', neighborhood=self.myproject.neighborhood)
+        set_context('myproject', mount_point='my_mounted_app',
+                    neighborhood=self.myproject.neighborhood)
 
     def test_that_it_sets_the_app(self):
         assert c.app is self.fake_app
@@ -104,6 +108,7 @@ class TestWhenProjectIsNotFound(WithDatabase):
                       ObjectId(),
                       neighborhood=None)
 
+
 class TestWhenNeighborhoodIsNotFound(WithDatabase):
 
     def test_that_it_raises_an_exception(self):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_mixins.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_mixins.py b/Allura/allura/tests/unit/test_mixins.py
index a6c508f..cdc3aaa 100644
--- a/Allura/allura/tests/unit/test_mixins.py
+++ b/Allura/allura/tests/unit/test_mixins.py
@@ -56,7 +56,7 @@ class TestVotableArtifact(object):
         vote.vote_down(self.user2)
         assert vote.votes_down == 2
         assert vote.votes_down_users == [self.user1.username,
-                                        self.user2.username]
+                                         self.user2.username]
 
         vote.vote_down(self.user1)  # unvote user1
         assert vote.votes_down == 1
@@ -84,4 +84,4 @@ class TestVotableArtifact(object):
         assert vote.__json__() == {'votes_up': 0, 'votes_down': 1}
 
         vote.vote_up(self.user2)
-        assert vote.__json__() == {'votes_up': 1, 'votes_down': 1}
\ No newline at end of file
+        assert vote.__json__() == {'votes_up': 1, 'votes_down': 1}

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_package_path_loader.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_package_path_loader.py b/Allura/allura/tests/unit/test_package_path_loader.py
index a5055ac..61bd887 100644
--- a/Allura/allura/tests/unit/test_package_path_loader.py
+++ b/Allura/allura/tests/unit/test_package_path_loader.py
@@ -37,31 +37,31 @@ class TestPackagePathLoader(TestCase):
         ]
         for ep in eps:
             ep.name = ep.ep_name
-        resource_filename.side_effect = lambda m, r: 'path:'+m
+        resource_filename.side_effect = lambda m, r: 'path:' + m
 
         paths = PackagePathLoader()._load_paths()
 
         assert_equal(paths, [
-                ['site-theme', None],
-                ['ep0', 'path:eps.ep0'],
-                ['ep1', 'path:eps.ep1'],
-                ['ep2', 'path:eps.ep2'],
-                ['allura', '/'],
-            ])
+            ['site-theme', None],
+            ['ep0', 'path:eps.ep0'],
+            ['ep1', 'path:eps.ep1'],
+            ['ep2', 'path:eps.ep2'],
+            ['allura', '/'],
+        ])
         assert_equal(type(paths[0]), list)
         assert_equal(resource_filename.call_args_list, [
-                mock.call('eps.ep0', ''),
-                mock.call('eps.ep1', ''),
-                mock.call('eps.ep2', ''),
-            ])
+            mock.call('eps.ep0', ''),
+            mock.call('eps.ep1', ''),
+            mock.call('eps.ep2', ''),
+        ])
 
     @mock.patch('pkg_resources.iter_entry_points')
     def test_load_rules(self, iter_entry_points):
         eps = iter_entry_points.return_value.__iter__.return_value = [
-                mock.Mock(ep_name='ep0', rules=[('>', 'allura')]),
-                mock.Mock(ep_name='ep1', rules=[('=', 'allura')]),
-                mock.Mock(ep_name='ep2', rules=[('<', 'allura')]),
-            ]
+            mock.Mock(ep_name='ep0', rules=[('>', 'allura')]),
+            mock.Mock(ep_name='ep1', rules=[('=', 'allura')]),
+            mock.Mock(ep_name='ep2', rules=[('<', 'allura')]),
+        ]
         for ep in eps:
             ep.name = ep.ep_name
             ep.load.return_value.template_path_rules = ep.rules
@@ -72,8 +72,8 @@ class TestPackagePathLoader(TestCase):
         assert_equal(replacement_rules, {'allura': 'ep1'})
 
         eps = iter_entry_points.return_value.__iter__.return_value = [
-                mock.Mock(ep_name='ep0', rules=[('?', 'allura')]),
-            ]
+            mock.Mock(ep_name='ep0', rules=[('?', 'allura')]),
+        ]
         for ep in eps:
             ep.name = ep.ep_name
             ep.load.return_value.template_path_rules = ep.rules
@@ -84,63 +84,63 @@ class TestPackagePathLoader(TestCase):
         ppl._replace_signpost = mock.Mock()
         paths = [
                 ['site-theme', None],
-                ['ep0', '/ep0'],
-                ['ep1', '/ep1'],
-                ['ep2', '/ep2'],
-                ['allura', '/'],
-            ]
+            ['ep0', '/ep0'],
+            ['ep1', '/ep1'],
+            ['ep2', '/ep2'],
+            ['allura', '/'],
+        ]
         rules = {
-                'allura': 'ep2',
-                'site-theme': 'ep1',
-                'foo': 'ep1',
-                'ep0': 'bar',
-            }
+            'allura': 'ep2',
+            'site-theme': 'ep1',
+            'foo': 'ep1',
+            'ep0': 'bar',
+        }
 
         ppl._replace_signposts(paths, rules)
 
         assert_equal(paths, [
-                ['site-theme', '/ep1'],
-                ['ep0', '/ep0'],
-                ['allura', '/ep2'],
-            ]);
+            ['site-theme', '/ep1'],
+            ['ep0', '/ep0'],
+            ['allura', '/ep2'],
+        ])
 
     def test_sort_paths(self):
         paths = [
                 ['site-theme', None],
-                ['ep0', '/ep0'],
-                ['ep1', '/ep1'],
-                ['ep2', '/ep2'],
-                ['ep3', '/ep3'],
-                ['allura', '/'],
-            ]
+            ['ep0', '/ep0'],
+            ['ep1', '/ep1'],
+            ['ep2', '/ep2'],
+            ['ep3', '/ep3'],
+            ['allura', '/'],
+        ]
         rules = [
-                ('allura', 'ep0'),
-                ('ep3', 'ep1'),
-                ('ep2', 'ep1'),
-                ('ep4', 'ep1'),  # rules referencing missing paths
-                ('ep2', 'ep5'),
-            ]
+            ('allura', 'ep0'),
+            ('ep3', 'ep1'),
+            ('ep2', 'ep1'),
+            ('ep4', 'ep1'),  # rules referencing missing paths
+            ('ep2', 'ep5'),
+        ]
 
         PackagePathLoader()._sort_paths(paths, rules)
 
         assert_equal(paths, [
-                ['site-theme', None],
-                ['ep2', '/ep2'],
-                ['ep3', '/ep3'],
-                ['ep1', '/ep1'],
-                ['allura', '/'],
-                ['ep0', '/ep0'],
-            ])
+            ['site-theme', None],
+            ['ep2', '/ep2'],
+            ['ep3', '/ep3'],
+            ['ep1', '/ep1'],
+            ['allura', '/'],
+            ['ep0', '/ep0'],
+        ])
 
     def test_init_paths(self):
-        paths =  [
-                ['root', '/'],
-                ['none', None],
-                ['tail', '/tail'],
-            ]
+        paths = [
+            ['root', '/'],
+            ['none', None],
+            ['tail', '/tail'],
+        ]
         ppl = PackagePathLoader()
         ppl._load_paths = mock.Mock(return_value=paths)
-        ppl._load_rules = mock.Mock(return_value=('order_rules','repl_rules'))
+        ppl._load_rules = mock.Mock(return_value=('order_rules', 'repl_rules'))
         ppl._replace_signposts = mock.Mock()
         ppl._sort_paths = mock.Mock()
 
@@ -177,27 +177,33 @@ class TestPackagePathLoader(TestCase):
         output = ppl.get_source('env', 'allura.ext.admin:templates/audit.html')
 
         assert_equal(output, 'fs_load')
-        fs_loader().get_source.assert_called_once_with('env', 'override/allura/ext/admin/templates/audit.html')
+        fs_loader().get_source.assert_called_once_with(
+            'env', 'override/allura/ext/admin/templates/audit.html')
 
         fs_loader().get_source.reset_mock()
-        fs_loader().get_source.side_effect = [jinja2.TemplateNotFound('test'), 'fs_load']
+        fs_loader().get_source.side_effect = [
+            jinja2.TemplateNotFound('test'), 'fs_load']
 
         with mock.patch('pkg_resources.resource_filename') as rf:
             rf.return_value = 'resource'
             # no override, ':' in template
-            output = ppl.get_source('env', 'allura.ext.admin:templates/audit.html')
-            rf.assert_called_once_with('allura.ext.admin', 'templates/audit.html')
+            output = ppl.get_source(
+                'env', 'allura.ext.admin:templates/audit.html')
+            rf.assert_called_once_with(
+                'allura.ext.admin', 'templates/audit.html')
 
         assert_equal(output, 'fs_load')
         assert_equal(fs_loader().get_source.call_count, 2)
         fs_loader().get_source.assert_called_with('env', 'resource')
 
         fs_loader().get_source.reset_mock()
-        fs_loader().get_source.side_effect = [jinja2.TemplateNotFound('test'), 'fs_load']
+        fs_loader().get_source.side_effect = [
+            jinja2.TemplateNotFound('test'), 'fs_load']
 
         # no override, ':' not in template
         output = ppl.get_source('env', 'templates/audit.html')
 
         assert_equal(output, 'fs_load')
         assert_equal(fs_loader().get_source.call_count, 2)
-        fs_loader().get_source.assert_called_with('env', 'templates/audit.html')
+        fs_loader().get_source.assert_called_with(
+            'env', 'templates/audit.html')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_project.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_project.py b/Allura/allura/tests/unit/test_project.py
index 396fdae..b4108e0 100644
--- a/Allura/allura/tests/unit/test_project.py
+++ b/Allura/allura/tests/unit/test_project.py
@@ -23,6 +23,7 @@ from allura.app import SitemapEntry
 
 
 class TestProject(unittest.TestCase):
+
     def test_grouped_navbar_entries(self):
         p = M.Project()
         sitemap_entries = [
@@ -32,7 +33,8 @@ class TestProject(unittest.TestCase):
             SitemapEntry('subproject', url='subproject url'),
             SitemapEntry('features', url='features url', tool_name='Tickets'),
             SitemapEntry('help', url='help url', tool_name='Discussion'),
-            SitemapEntry('support reqs', url='support url', tool_name='Tickets'),
+            SitemapEntry('support reqs', url='support url',
+                         tool_name='Tickets'),
         ]
         p.url = Mock(return_value='proj_url/')
         p.sitemap = Mock(return_value=sitemap_entries)
@@ -57,7 +59,8 @@ class TestProject(unittest.TestCase):
             SitemapEntry('subproject', url='subproject url'),
             SitemapEntry('features', url='features url', tool_name='Tickets'),
             SitemapEntry('help', url='help url', tool_name='Discussion'),
-            SitemapEntry('support reqs', url='support url', tool_name='Tickets'),
+            SitemapEntry('support reqs', url='support url',
+                         tool_name='Tickets'),
         ]
         p.url = Mock(return_value='proj_url/')
         p.sitemap = Mock(return_value=sitemap_entries)
@@ -80,5 +83,6 @@ class TestProject(unittest.TestCase):
         self.assertIsNone(p.social_account('Twitter'))
 
         p.set_social_account('Twitter', 'http://twitter.com/allura')
-        self.assertEqual(p.social_account('Twitter').accounturl, 'http://twitter.com/allura')
-        self.assertEqual(p.twitter_handle, 'http://twitter.com/allura')
\ No newline at end of file
+        self.assertEqual(p.social_account('Twitter')
+                         .accounturl, 'http://twitter.com/allura')
+        self.assertEqual(p.twitter_handle, 'http://twitter.com/allura')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_repo.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_repo.py b/Allura/allura/tests/unit/test_repo.py
index c730d59..567b6cc 100644
--- a/Allura/allura/tests/unit/test_repo.py
+++ b/Allura/allura/tests/unit/test_repo.py
@@ -27,6 +27,7 @@ from allura.controllers.repository import topo_sort
 from allura.model.repository import zipdir, prefix_paths_union
 from alluratest.controller import setup_unit_test
 
+
 class TestCommitRunBuilder(unittest.TestCase):
 
     def setUp(self):
@@ -34,10 +35,10 @@ class TestCommitRunBuilder(unittest.TestCase):
         commits = [
             M.repo.CommitDoc.make(dict(
                 _id=str(i)))
-            for i in range(10) ]
-        for p,c in zip(commits, commits[1:]):
-            p.child_ids = [ c._id ]
-            c.parent_ids = [ p._id ]
+            for i in range(10)]
+        for p, c in zip(commits, commits[1:]):
+            p.child_ids = [c._id]
+            c.parent_ids = [p._id]
         for ci in commits:
             ci.m.save()
         self.commits = commits
@@ -73,7 +74,9 @@ class TestCommitRunBuilder(unittest.TestCase):
             crb.cleanup()
         self.assertEqual(M.repo.CommitRunDoc.m.count(), 1)
 
+
 class TestTopoSort(unittest.TestCase):
+
     def test_commit_dates_out_of_order(self):
         """Commits should be sorted by their parent/child relationships,
         regardless of the date on the commit.
@@ -102,7 +105,7 @@ class TestTopoSort(unittest.TestCase):
             'dev':        datetime.datetime(2012, 6, 1)}
         result = topo_sort(children, parents, dates, head_ids)
         self.assertEqual(list(result), ['dev', 'dev@{1}', 'master',
-            'master@{1}', 'master@{2}', 'master@{3}'])
+                                        'master@{1}', 'master@{2}', 'master@{3}'])
 
 
 def tree(name, id, trees=None, blobs=None):
@@ -142,6 +145,7 @@ class TestTree(unittest.TestCase):
 
 
 class TestBlob(unittest.TestCase):
+
     def test_context_no_create(self):
         blob = M.repo.Blob(Mock(), Mock(), Mock())
         blob.path = Mock(return_value='path')
@@ -208,6 +212,7 @@ class TestBlob(unittest.TestCase):
 
 
 class TestCommit(unittest.TestCase):
+
     def test_activity_extras(self):
         commit = M.repo.Commit()
         commit.shorthand_id = MagicMock(return_value='abcdef')
@@ -269,7 +274,8 @@ class TestCommit(unittest.TestCase):
         tree = commit.get_tree()
         commit.repo.compute_tree_new.assert_called_once_with(commit)
         assert not tree_get.called
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree'))
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -279,7 +285,8 @@ class TestCommit(unittest.TestCase):
         tree = commit.get_tree()
         assert not commit.repo.compute_tree_new.called
         assert not tree_get.called
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree2'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree2'))
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -288,11 +295,13 @@ class TestCommit(unittest.TestCase):
         c.model_cache.get.return_value = None
         tree_get.return_value = _tree
         tree = commit.get_tree()
-        c.model_cache.get.assert_called_once_with(M.repo.Tree, dict(_id='tree2'))
+        c.model_cache.get.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree2'))
         commit.repo.compute_tree_new.assert_called_once_with(commit)
         assert_equal(commit.tree_id, 'tree')
         tree_get.assert_called_once_with(_id='tree')
-        c.model_cache.set.assert_called_once_with(M.repo.Tree, dict(_id='tree'), _tree)
+        c.model_cache.set.assert_called_once_with(
+            M.repo.Tree, dict(_id='tree'), _tree)
         _tree.set_context.assert_called_once_with(commit)
         assert_equal(tree, _tree)
 
@@ -304,6 +313,7 @@ class TestCommit(unittest.TestCase):
 
 
 class TestZipDir(unittest.TestCase):
+
     @patch('allura.model.repository.Popen')
     @patch('allura.model.repository.tg')
     def test_popen_called(self, tg, popen):
@@ -314,14 +324,16 @@ class TestZipDir(unittest.TestCase):
         src = '/fake/path/to/repo'
         zipfile = '/fake/zip/file.tmp'
         zipdir(src, zipfile)
-        popen.assert_called_once_with(['/bin/zip', '-y', '-q', '-r', zipfile, 'repo'],
-                cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
+        popen.assert_called_once_with(
+            ['/bin/zip', '-y', '-q', '-r', zipfile, 'repo'],
+            cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
         popen.reset_mock()
         src = '/fake/path/to/repo/'
         zipdir(src, zipfile, exclude='file.txt')
         popen.assert_called_once_with(
-                ['/bin/zip', '-y', '-q', '-r', zipfile, 'repo', '-x', 'file.txt'],
-                cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
+            ['/bin/zip', '-y', '-q', '-r',
+             zipfile, 'repo', '-x', 'file.txt'],
+            cwd='/fake/path/to', stdout=PIPE, stderr=PIPE)
 
     @patch('allura.model.repository.Popen')
     @patch('allura.model.repository.tg')
@@ -335,14 +347,15 @@ class TestZipDir(unittest.TestCase):
             zipdir(src, zipfile)
         emsg = str(cm.exception)
         self.assertTrue(
-                "Command: "
-                "['/bin/zip', '-y', '-q', '-r', '/fake/zip/file.tmp', 'repo'] "
-                "returned non-zero exit code 1" in emsg)
+            "Command: "
+            "['/bin/zip', '-y', '-q', '-r', '/fake/zip/file.tmp', 'repo'] "
+            "returned non-zero exit code 1" in emsg)
         self.assertTrue("STDOUT: 1" in emsg)
         self.assertTrue("STDERR: 2" in emsg)
 
 
 class TestPrefixPathsUnion(unittest.TestCase):
+
     def test_disjoint(self):
         a = set(['a1', 'a2', 'a3'])
         b = set(['b1', 'b1/foo', 'b2'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_session.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_session.py b/Allura/allura/tests/unit/test_session.py
index 5b5449e..b079004 100644
--- a/Allura/allura/tests/unit/test_session.py
+++ b/Allura/allura/tests/unit/test_session.py
@@ -38,6 +38,7 @@ def test_extensions_cm():
 
 
 class TestBatchIndexer(TestCase):
+
     def setUp(self):
         session = mock.Mock()
         self.extcls = BatchIndexer
@@ -74,7 +75,8 @@ class TestBatchIndexer(TestCase):
         self.extcls.to_delete = del_index_ids
         self.extcls.to_add = set([4, 5, 6])
         self.ext.flush()
-        index_tasks.del_artifacts.post.assert_called_once_with(list(del_index_ids))
+        index_tasks.del_artifacts.post.assert_called_once_with(
+            list(del_index_ids))
         index_tasks.add_artifacts.post.assert_called_once_with([4, 5, 6])
         self.assertEqual(self.ext.to_delete, set())
         self.assertEqual(self.ext.to_add, set())
@@ -110,7 +112,7 @@ class TestBatchIndexer(TestCase):
         def on_post(chunk):
             if len(chunk) > 1:
                 raise pymongo.errors.InvalidDocument(
-                        "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
+                    "BSON document too large (16906035 bytes) - the connected server supports BSON document sizes up to 16777216 bytes.")
         index_tasks.add_artifacts.post.side_effect = on_post
         self.ext._post(index_tasks.add_artifacts, range(5))
         expected = [
@@ -124,7 +126,8 @@ class TestBatchIndexer(TestCase):
             mock.call([3]),
             mock.call([4])
         ]
-        self.assertEqual(expected, index_tasks.add_artifacts.post.call_args_list)
+        self.assertEqual(
+            expected, index_tasks.add_artifacts.post.call_args_list)
 
     @mock.patch('allura.tasks.index_tasks')
     def test__post_other_error(self, index_tasks):

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_sitemapentry.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_sitemapentry.py b/Allura/allura/tests/unit/test_sitemapentry.py
index f6e9317..bdd6399 100644
--- a/Allura/allura/tests/unit/test_sitemapentry.py
+++ b/Allura/allura/tests/unit/test_sitemapentry.py
@@ -22,6 +22,7 @@ from allura.app import SitemapEntry
 
 
 class TestSitemapEntry(unittest.TestCase):
+
     def test_matches_url(self):
         request = Mock(upath_info='/p/project/tool/artifact')
         s1 = SitemapEntry('tool', url='/p/project/tool')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/tests/unit/test_solr.py
----------------------------------------------------------------------
diff --git a/Allura/allura/tests/unit/test_solr.py b/Allura/allura/tests/unit/test_solr.py
index 2bc5cdb..02da952 100644
--- a/Allura/allura/tests/unit/test_solr.py
+++ b/Allura/allura/tests/unit/test_solr.py
@@ -27,6 +27,7 @@ from alluratest.controller import setup_basic_test
 from allura.lib.solr import Solr
 from allura.lib.search import solarize, search_app
 
+
 class TestSolr(unittest.TestCase):
 
     @mock.patch('allura.lib.solr.pysolr')
@@ -39,7 +40,8 @@ class TestSolr(unittest.TestCase):
 
         pysolr.reset_mock()
         solr = Solr(servers, 'server3', commit=False, commitWithin='10000')
-        calls = [mock.call('server1'), mock.call('server2'), mock.call('server3')]
+        calls = [mock.call('server1'), mock.call('server2'),
+                 mock.call('server3')]
         pysolr.Solr.assert_has_calls(calls)
         assert_equal(len(solr.push_pool), 2)
 
@@ -53,7 +55,7 @@ class TestSolr(unittest.TestCase):
         pysolr.reset_mock()
         solr.add('bar', somekw='value')
         calls = [mock.call('bar', commit=False,
-            commitWithin='10000', somekw='value')] * 2
+                           commitWithin='10000', somekw='value')] * 2
         pysolr.Solr().add.assert_has_calls(calls)
 
     @mock.patch('allura.lib.solr.pysolr')
@@ -115,7 +117,8 @@ class TestSolarize(unittest.TestCase):
         obj.index.return_value = {'text': '<script>alert(1)</script>'}
         assert_equal(solarize(obj), {'text': ''})
 
-        obj.index.return_value = {'text': '&lt;script&gt;alert(1)&lt;/script&gt;'}
+        obj.index.return_value = {'text':
+                                  '&lt;script&gt;alert(1)&lt;/script&gt;'}
         assert_equal(solarize(obj), {'text': '<script>alert(1)</script>'})
 
 
@@ -156,15 +159,17 @@ class TestSearch_app(unittest.TestCase):
         req.path = '/test/wiki/search'
         url_fn.side_effect = ['the-score-url', 'the-date-url']
         results = mock.Mock(hits=2, docs=[
-                {'id': 123, 'type_s':'WikiPage Snapshot', 'url_s':'/test/wiki/Foo', 'version_i':2},
-                {'id': 321, 'type_s':'Post'},
-            ], highlighting={
-                123: dict(title='some #ALLURA-HIGHLIGHT-START#Foo#ALLURA-HIGHLIGHT-END# stuff',
-                         text='scary <script>alert(1)</script> bar'),
-                321: dict(title='blah blah',
-                         text='less scary but still dangerous &lt;script&gt;alert(1)&lt;/script&gt; '
-                              'blah #ALLURA-HIGHLIGHT-START#bar#ALLURA-HIGHLIGHT-END# foo foo'),
-            },
+            {'id': 123, 'type_s': 'WikiPage Snapshot',
+             'url_s': '/test/wiki/Foo', 'version_i': 2},
+            {'id': 321, 'type_s': 'Post'},
+        ], highlighting={
+            123: dict(
+                title='some #ALLURA-HIGHLIGHT-START#Foo#ALLURA-HIGHLIGHT-END# stuff',
+                text='scary <script>alert(1)</script> bar'),
+            321: dict(title='blah blah',
+                      text='less scary but still dangerous &lt;script&gt;alert(1)&lt;/script&gt; '
+                      'blah #ALLURA-HIGHLIGHT-START#bar#ALLURA-HIGHLIGHT-END# foo foo'),
+        },
         )
         results.__iter__ = lambda self: iter(results.docs)
         solr_search.return_value = results
@@ -190,11 +195,11 @@ class TestSearch_app(unittest.TestCase):
                 'title_match': Markup('some <strong>Foo</strong> stuff'),
                 # HTML in the solr plaintext results get escaped
                 'text_match': Markup('scary &lt;script&gt;alert(1)&lt;/script&gt; bar'),
-                }, {
+            }, {
                 'id': 321,
                 'type_s': 'Post',
                 'title_match': Markup('blah blah'),
                 # highlighting in text
                 'text_match': Markup('less scary but still dangerous &amp;lt;script&amp;gt;alert(1)&amp;lt;/script&amp;gt; blah <strong>bar</strong> foo foo'),
-                }]
+            }]
         ))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/__init__.py b/Allura/allura/websetup/__init__.py
index e05e639..37b1586 100644
--- a/Allura/allura/websetup/__init__.py
+++ b/Allura/allura/websetup/__init__.py
@@ -30,6 +30,7 @@ log = logging.getLogger(__name__)
 from schema import setup_schema
 import bootstrap
 
+
 def setup_app(command, conf, vars):
     """Place any commands to setup allura here"""
     load_environment(conf.global_conf, conf.local_conf)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/bootstrap.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/bootstrap.py b/Allura/allura/websetup/bootstrap.py
index 9d09927..252bfc9 100644
--- a/Allura/allura/websetup/bootstrap.py
+++ b/Allura/allura/websetup/bootstrap.py
@@ -45,20 +45,25 @@ from forgewiki import model as WM
 
 log = logging.getLogger(__name__)
 
+
 def cache_test_data():
     log.info('Saving data to cache in .test-data')
     if os.path.exists('.test-data'):
         shutil.rmtree('.test-data')
-    os.system('mongodump -h 127.0.0.1:27018 -o .test-data > mongodump.log 2>&1')
+    os.system(
+        'mongodump -h 127.0.0.1:27018 -o .test-data > mongodump.log 2>&1')
+
 
 def restore_test_data():
     if os.path.exists('.test-data'):
         log.info('Restoring data from cache in .test-data')
-        rc = os.system('mongorestore -h 127.0.0.1:27018 --dir .test-data > mongorestore.log 2>&1')
+        rc = os.system(
+            'mongorestore -h 127.0.0.1:27018 --dir .test-data > mongorestore.log 2>&1')
         return rc == 0
     else:
         return False
 
+
 def bootstrap(command, conf, vars):
     """Place any commands to setup allura here"""
     # are we being called by the test suite?
@@ -66,6 +71,7 @@ def bootstrap(command, conf, vars):
 
     # if this is a test_run, skip user project creation to save time
     make_user_projects = not test_run
+
     def make_user(*args, **kw):
         kw.update(make_project=make_user_projects)
         return create_user(*args, **kw)
@@ -79,11 +85,11 @@ def bootstrap(command, conf, vars):
     ThreadLocalORMSession.close_all()
     c.queued_messages = defaultdict(list)
     c.user = c.project = c.app = None
-    database=conf.get('db_prefix', '') + 'project:test'
+    database = conf.get('db_prefix', '') + 'project:test'
     wipe_database()
     try:
         g.solr.delete(q='*:*')
-    except: # pragma no cover
+    except:  # pragma no cover
         log.error('SOLR server is %s', g.solr_server)
         log.error('Error clearing solr index')
     if asbool(conf.get('cache_test_data')):
@@ -102,31 +108,34 @@ def bootstrap(command, conf, vars):
     root = create_user('Root', make_project=False)
 
     n_projects = M.Neighborhood(name='Projects', url_prefix='/p/',
-                                features=dict(private_projects = True,
-                                              max_projects = None,
-                                              css = 'none',
-                                              google_analytics = False))
+                                features=dict(private_projects=True,
+                                              max_projects=None,
+                                              css='none',
+                                              google_analytics=False))
     n_users = M.Neighborhood(name='Users', url_prefix='/u/',
                              shortname_prefix='u/',
                              anchored_tools='profile:Profile,userstats:Statistics',
-                             features=dict(private_projects = True,
-                                           max_projects = None,
-                                           css = 'none',
-                                           google_analytics = False))
-    n_adobe = M.Neighborhood(name='Adobe', url_prefix='/adobe/', project_list_url='/adobe/',
-                             features=dict(private_projects = True,
-                                           max_projects = None,
-                                           css = 'custom',
-                                           google_analytics = True))
+                             features=dict(private_projects=True,
+                                           max_projects=None,
+                                           css='none',
+                                           google_analytics=False))
+    n_adobe = M.Neighborhood(
+        name='Adobe', url_prefix='/adobe/', project_list_url='/adobe/',
+        features=dict(private_projects=True,
+                      max_projects=None,
+                      css='custom',
+                      google_analytics=True))
     assert tg.config['auth.method'] == 'local'
     project_reg = plugin.ProjectRegistrationProvider.get()
-    p_projects = project_reg.register_neighborhood_project(n_projects, [root], allow_register=True)
+    p_projects = project_reg.register_neighborhood_project(
+        n_projects, [root], allow_register=True)
     p_users = project_reg.register_neighborhood_project(n_users, [root])
     p_adobe = project_reg.register_neighborhood_project(n_adobe, [root])
 
     def set_nbhd_wiki_content(nbhd_proj, content):
         wiki = nbhd_proj.app_instance('wiki')
-        page = WM.Page.query.get(app_config_id=wiki.config._id, title=wiki.root_page_name)
+        page = WM.Page.query.get(
+            app_config_id=wiki.config._id, title=wiki.root_page_name)
         page.text = content
 
     set_nbhd_wiki_content(p_projects, dedent('''
@@ -159,7 +168,8 @@ def bootstrap(command, conf, vars):
 
     # add the adobe icon
     file_name = 'adobe_icon.png'
-    file_path = os.path.join(allura.__path__[0],'public','nf','images',file_name)
+    file_path = os.path.join(
+        allura.__path__[0], 'public', 'nf', 'images', file_name)
     M.NeighborhoodFile.from_path(file_path, neighborhood_id=n_adobe._id)
 
     # Add some test users
@@ -170,14 +180,18 @@ def bootstrap(command, conf, vars):
     cat1 = M.ProjectCategory(name='clustering', label='Clustering')
 
     cat2 = M.ProjectCategory(name='communications', label='Communications')
-    cat2_1 = M.ProjectCategory(name='synchronization', label='Synchronization', parent_id=cat2._id)
-    cat2_2 = M.ProjectCategory(name='streaming', label='Streaming', parent_id=cat2._id)
+    cat2_1 = M.ProjectCategory(
+        name='synchronization', label='Synchronization', parent_id=cat2._id)
+    cat2_2 = M.ProjectCategory(
+        name='streaming', label='Streaming', parent_id=cat2._id)
     cat2_3 = M.ProjectCategory(name='fax', label='Fax', parent_id=cat2._id)
     cat2_4 = M.ProjectCategory(name='bbs', label='BBS', parent_id=cat2._id)
 
     cat3 = M.ProjectCategory(name='database', label='Database')
-    cat3_1 = M.ProjectCategory(name='front_ends', label='Front-Ends', parent_id=cat3._id)
-    cat3_2 = M.ProjectCategory(name='engines_servers', label='Engines/Servers', parent_id=cat3._id)
+    cat3_1 = M.ProjectCategory(
+        name='front_ends', label='Front-Ends', parent_id=cat3._id)
+    cat3_2 = M.ProjectCategory(
+        name='engines_servers', label='Engines/Servers', parent_id=cat3._id)
 
     log.info('Registering "regular users" (non-root) and default projects')
     # since this runs a lot for tests, separate test and default users and
@@ -190,7 +204,8 @@ def bootstrap(command, conf, vars):
         u_admin.claim_address('test-admin@users.localhost')
     else:
         u_admin = make_user('Admin 1', username='admin1')
-        # Admin1 is almost root, with admin access for Users and Projects neighborhoods
+        # Admin1 is almost root, with admin access for Users and Projects
+        # neighborhoods
         p_projects.add_user(u_admin, ['Admin'])
         p_users.add_user(u_admin, ['Admin'])
 
@@ -200,11 +215,12 @@ def bootstrap(command, conf, vars):
     p_adobe.add_user(u_admin, ['Admin'])
     p0 = n_projects.register_project('test', u_admin, 'Test Project')
     p1 = n_projects.register_project('test2', u_admin, 'Test 2')
-    p0._extra_tool_status = [ 'alpha', 'beta' ]
+    p0._extra_tool_status = ['alpha', 'beta']
 
-    sess = session(M.Neighborhood) # all the sessions are the same
+    sess = session(M.Neighborhood)  # all the sessions are the same
     for x in (n_adobe, n_projects, n_users, p_projects, p_users, p_adobe):
-        # Ming doesn't detect substructural changes in newly created objects (vs loaded from DB)
+        # Ming doesn't detect substructural changes in newly created objects
+        # (vs loaded from DB)
         state(x).status = 'dirty'
         # TODO: Hope that Ming can be improved to at least avoid stuff below
         sess.flush(x)
@@ -214,7 +230,7 @@ def bootstrap(command, conf, vars):
     if asbool(conf.get('load_test_data')):
         if asbool(conf.get('cache_test_data')):
             cache_test_data()
-    else: # pragma no cover
+    else:  # pragma no cover
         # regular first-time setup
         p0.add_user(u_admin, ['Admin'])
         log.info('Registering initial apps')
@@ -224,7 +240,8 @@ def bootstrap(command, conf, vars):
                     continue
                 p0.install_app(ep_name)
 
-    # reload our p0 project so that p0.app_configs is accurate with all the newly installed apps
+    # reload our p0 project so that p0.app_configs is accurate with all the
+    # newly installed apps
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
     p0 = M.Project.query.get(_id=p0._id)
@@ -232,13 +249,14 @@ def bootstrap(command, conf, vars):
     with h.push_config(c, user=u_admin):
         sub.install_app('wiki')
 
-
     ThreadLocalORMSession.flush_all()
     ThreadLocalORMSession.close_all()
 
+
 def wipe_database():
     conn = M.main_doc_session.bind.conn
-    create_trove_categories = CreateTroveCategoriesCommand('create_trove_categories')
+    create_trove_categories = CreateTroveCategoriesCommand(
+        'create_trove_categories')
     index = EnsureIndexCommand('ensure_index')
     if isinstance(conn, mim.Connection):
         clear_all_database_tables()
@@ -246,11 +264,13 @@ def wipe_database():
             db = conn[db]
     else:
         for database in conn.database_names():
-            if database not in ( 'allura', 'pyforge', 'project-data'): continue
+            if database not in ('allura', 'pyforge', 'project-data'):
+                continue
             log.info('Wiping database %s', database)
             db = conn[database]
             for coll in db.collection_names():
-                if coll.startswith('system.'): continue
+                if coll.startswith('system.'):
+                    continue
                 log.info('Dropping collection %s:%s', database, coll)
                 try:
                     db.drop_collection(coll)
@@ -260,7 +280,6 @@ def wipe_database():
     index.run([''])
 
 
-
 def clear_all_database_tables():
     conn = M.main_doc_session.bind.conn
     for db in conn.database_names():
@@ -282,6 +301,7 @@ def create_user(display_name, username=None, password='foo', make_project=False)
 
 
 class DBSession(Session):
+
     '''Simple session that takes a pymongo connection and a database name'''
 
     def __init__(self, db):
@@ -294,17 +314,20 @@ class DBSession(Session):
     def _impl(self, cls):
         return self.db[cls.__mongometa__.name]
 
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
+
+def pm(etype, value, tb):  # pragma no cover
+    import pdb
+    import traceback
     try:
-        from IPython.ipapi import make_session; make_session()
+        from IPython.ipapi import make_session
+        make_session()
         from IPython.Debugger import Pdb
         sys.stderr.write('Entering post-mortem IPDB shell\n')
         p = Pdb(color_scheme='Linux')
         p.reset()
         p.setup(None, tb)
         p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
+        sys.stderr.write('%s: %s\n' % (etype, value))
         p.cmdloop()
         p.forget()
         # p.interaction(None, tb)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/websetup/schema.py
----------------------------------------------------------------------
diff --git a/Allura/allura/websetup/schema.py b/Allura/allura/websetup/schema.py
index cdfdf42..d25128e 100644
--- a/Allura/allura/websetup/schema.py
+++ b/Allura/allura/websetup/schema.py
@@ -30,6 +30,7 @@ from paste.registry import Registry
 log = logging.getLogger(__name__)
 REGISTRY = Registry()
 
+
 def setup_schema(command, conf, vars):
     """Place any commands to setup allura here"""
     import ming
@@ -45,4 +46,6 @@ def setup_schema(command, conf, vars):
     # Nothing to do
     log.info('setup_schema called')
 
-class EmptyClass(object): pass
+
+class EmptyClass(object):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/docs/conf.py
----------------------------------------------------------------------
diff --git a/Allura/docs/conf.py b/Allura/docs/conf.py
index 7b20c15..9b8e40a 100644
--- a/Allura/docs/conf.py
+++ b/Allura/docs/conf.py
@@ -28,18 +28,20 @@
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 
-import sys, os
+import sys
+import os
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
+# sys.path.append(os.path.abspath('.'))
 
-# -- General configuration -----------------------------------------------------
+# -- General configuration -----------------------------------------------
 
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
+              'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['_templates']
@@ -104,7 +106,7 @@ pygments_style = 'sphinx'
 #modindex_common_prefix = []
 
 
-# -- Options for HTML output ---------------------------------------------------
+# -- Options for HTML output ---------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  Major themes that come with
 # Sphinx are currently 'default' and 'sphinxdoc'.
@@ -180,7 +182,7 @@ html_show_sourcelink = False
 htmlhelp_basename = 'alluradoc'
 
 
-# -- Options for LaTeX output --------------------------------------------------
+# -- Options for LaTeX output --------------------------------------------
 
 # The paper size ('letter' or 'a4').
 #latex_paper_size = 'letter'
@@ -191,8 +193,8 @@ htmlhelp_basename = 'alluradoc'
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'allura.tex', u'allura Documentation',
-   u'Mark Ramm, Wolf, Rick Copeland, Jonathan Beard', 'manual'),
+    ('index', 'allura.tex', u'allura Documentation',
+     u'Mark Ramm, Wolf, Rick Copeland, Jonathan Beard', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ez_setup/__init__.py
----------------------------------------------------------------------
diff --git a/Allura/ez_setup/__init__.py b/Allura/ez_setup/__init__.py
index f036aae..b8e23b3 100644
--- a/Allura/ez_setup/__init__.py
+++ b/Allura/ez_setup/__init__.py
@@ -33,7 +33,7 @@ This file can also be run as a script to install or upgrade setuptools.
 """
 import sys
 DEFAULT_VERSION = "0.6c7"
-DEFAULT_URL     = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
+DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
 
 md5_data = {
     'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
@@ -65,7 +65,9 @@ md5_data = {
     'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
 }
 
-import sys, os
+import sys
+import os
+
 
 def _validate_md5(egg_name, data):
     if egg_name in md5_data:
@@ -99,18 +101,20 @@ def use_setuptools(
         import setuptools
         if setuptools.__version__ == '0.0.1':
             print >>sys.stderr, (
-            "You have an obsolete version of setuptools installed.  Please\n"
-            "remove it from your system entirely before rerunning this script."
+                "You have an obsolete version of setuptools installed.  Please\n"
+                "remove it from your system entirely before rerunning this script."
             )
             sys.exit(2)
     except ImportError:
-        egg = download_setuptools(version, download_base, to_dir, download_delay)
+        egg = download_setuptools(
+            version, download_base, to_dir, download_delay)
         sys.path.insert(0, egg)
-        import setuptools; setuptools.bootstrap_install_from = egg
+        import setuptools
+        setuptools.bootstrap_install_from = egg
 
     import pkg_resources
     try:
-        pkg_resources.require("setuptools>="+version)
+        pkg_resources.require("setuptools>=" + version)
 
     except pkg_resources.VersionConflict, e:
         # XXX could we install in a subprocess here?
@@ -121,9 +125,10 @@ def use_setuptools(
         ) % (version, e.args[0])
         sys.exit(2)
 
+
 def download_setuptools(
     version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
-    delay = 15
+    delay=15
 ):
     """Download setuptools from a specified location and return its filename
 
@@ -132,8 +137,9 @@ def download_setuptools(
     with a '/'). `to_dir` is the directory where the egg will be downloaded.
     `delay` is the number of seconds to pause before an actual download attempt.
     """
-    import urllib2, shutil
-    egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
+    import urllib2
+    import shutil
+    egg_name = "setuptools-%s-py%s.egg" % (version, sys.version[:3])
     url = download_base + egg_name
     saveto = os.path.join(to_dir, egg_name)
     src = dst = None
@@ -155,19 +161,25 @@ I will start the download in %d seconds.
 
 and place it in this directory before rerunning this script.)
 ---------------------------------------------------------------------------""",
-                    version, download_base, delay, url
-                ); from time import sleep; sleep(delay)
+                         version, download_base, delay, url
+                         )
+                from time import sleep
+                sleep(delay)
             log.warn("Downloading %s", url)
             src = urllib2.urlopen(url)
             # Read/write all in one block, so we don't create a corrupt file
             # if the download is interrupted.
             data = _validate_md5(egg_name, src.read())
-            dst = open(saveto,"wb"); dst.write(data)
+            dst = open(saveto, "wb")
+            dst.write(data)
         finally:
-            if src: src.close()
-            if dst: dst.close()
+            if src:
+                src.close()
+            if dst:
+                dst.close()
     return os.path.realpath(saveto)
 
+
 def main(argv, version=DEFAULT_VERSION):
     """Install or upgrade setuptools and EasyInstall"""
 
@@ -177,9 +189,9 @@ def main(argv, version=DEFAULT_VERSION):
         egg = None
         try:
             egg = download_setuptools(version, delay=0)
-            sys.path.insert(0,egg)
+            sys.path.insert(0, egg)
             from setuptools.command.easy_install import main
-            return main(list(argv)+[egg])   # we're done here
+            return main(list(argv) + [egg])   # we're done here
         finally:
             if egg and os.path.exists(egg):
                 os.unlink(egg)
@@ -188,7 +200,7 @@ def main(argv, version=DEFAULT_VERSION):
             # tell the user to uninstall obsolete version
             use_setuptools(version)
 
-    req = "setuptools>="+version
+    req = "setuptools>=" + version
     import pkg_resources
     try:
         pkg_resources.require(req)
@@ -197,18 +209,17 @@ def main(argv, version=DEFAULT_VERSION):
             from setuptools.command.easy_install import main
         except ImportError:
             from easy_install import main
-        main(list(argv)+[download_setuptools(delay=0)])
-        sys.exit(0) # try to force an exit
+        main(list(argv) + [download_setuptools(delay=0)])
+        sys.exit(0)  # try to force an exit
     else:
         if argv:
             from setuptools.command.easy_install import main
             main(argv)
         else:
-            print "Setuptools version",version,"or greater has been installed."
+            print "Setuptools version", version, "or greater has been installed."
             print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
 
 
-
 def update_md5(filenames):
     """Update our built-in md5 registry"""
 
@@ -217,7 +228,7 @@ def update_md5(filenames):
 
     for name in filenames:
         base = os.path.basename(name)
-        f = open(name,'rb')
+        f = open(name, 'rb')
         md5_data[base] = md5(f.read()).hexdigest()
         f.close()
 
@@ -227,7 +238,9 @@ def update_md5(filenames):
 
     import inspect
     srcfile = inspect.getsourcefile(sys.modules[__name__])
-    f = open(srcfile, 'rb'); src = f.read(); f.close()
+    f = open(srcfile, 'rb')
+    src = f.read()
+    f.close()
 
     match = re.search("\nmd5_data = {\n([^}]+)}", src)
     if not match:
@@ -235,13 +248,13 @@ def update_md5(filenames):
         sys.exit(2)
 
     src = src[:match.start(1)] + repl + src[match.end(1):]
-    f = open(srcfile,'w')
+    f = open(srcfile, 'w')
     f.write(src)
     f.close()
 
 
-if __name__=='__main__':
-    if len(sys.argv)>2 and sys.argv[1]=='--md5update':
+if __name__ == '__main__':
+    if len(sys.argv) > 2 and sys.argv[1] == '--md5update':
         update_md5(sys.argv[2:])
     else:
         main(sys.argv[1:])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ldap-setup.py
----------------------------------------------------------------------
diff --git a/Allura/ldap-setup.py b/Allura/ldap-setup.py
index 3f1c090..4f58342 100644
--- a/Allura/ldap-setup.py
+++ b/Allura/ldap-setup.py
@@ -30,6 +30,7 @@ log = logging.getLogger('ldap-setup')
 
 config = ConfigParser()
 
+
 def main():
     config.read('.setup-scm-cache')
     if not config.has_section('scm'):
@@ -55,10 +56,12 @@ def main():
     os.chmod('/etc/ldap.secret', 0400)
     if get_value('add frontend ldif', 'y') == 'y':
         with tempfile(frontend_ldif, locals()) as name:
-            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' % (suffix, name))
+            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' %
+                (suffix, name))
     if get_value('add initial user/group', 'y') == 'y':
         with tempfile(initial_user_ldif, locals()) as name:
-            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' % (suffix, name))
+            run('ldapadd -c -x -D cn=admin,%s -W -f %s -y /etc/ldap.secret' %
+                (suffix, name))
     if get_value('setup ldap auth', 'y') == 'y':
         run('apt-get install libnss-ldap')
         run('dpkg-reconfigure ldap-auth-config')
@@ -76,18 +79,21 @@ def main():
         with open('/usr/share/ldapscripts/runtime.debian', 'w') as fp:
             fp.write(ldapscripts_debian)
 
+
 def get_value(key, default):
     try:
         default = config.get('scm', key)
     except NoOptionError:
         pass
     value = raw_input('%s? [%s]' % (key, default))
-    if not value: value = default
+    if not value:
+        value = default
     config.set('scm', key, value)
     with open('.setup-scm-cache', 'w') as fp:
         config.write(fp)
     return value
 
+
 def run(command):
     rc = os.system(command)
     if rc != 0:
@@ -95,6 +101,7 @@ def run(command):
     assert rc == 0
     return rc
 
+
 @contextmanager
 def tempfile(template, values):
     fd, name = mkstemp()
@@ -103,7 +110,7 @@ def tempfile(template, values):
     yield name
     os.remove(name)
 
-backend_ldif=string.Template('''
+backend_ldif = string.Template('''
 # Load dynamic backend modules
 dn: cn=module,cn=config
 objectClass: olcModuleList
@@ -134,7 +141,7 @@ olcAccess: to * by dn="cn=admin,$suffix" write by * read
 
 ''')
 
-frontend_ldif=string.Template('''
+frontend_ldif = string.Template('''
 # Create top-level object in domain
 dn: $suffix
 objectClass: top
@@ -167,7 +174,7 @@ objectClass: organizationalUnit
 ou: groups
 ''')
 
-initial_user_ldif=string.Template('''
+initial_user_ldif = string.Template('''
 dn: uid=john,ou=people,$suffix
 objectClass: inetOrgPerson
 objectClass: posixAccount
@@ -205,7 +212,7 @@ cn: example
 gidNumber: 10000
 ''')
 
-open_ldap_config=string.Template('''
+open_ldap_config = string.Template('''
 [open_ldap]
 nss_passwd=passwd: files ldap
 nss_group=group: files ldap
@@ -230,7 +237,7 @@ pam_session=session    required     pam_limits.so
         session    optional     pam_ldap.so
 ''')
 
-ldapscripts_conf=string.Template('''
+ldapscripts_conf = string.Template('''
 SERVER=127.0.0.1
 BINDDN='cn=admin,$suffix'
 BINDPWDFILE="/etc/ldapscripts/ldapscripts.passwd"
@@ -244,7 +251,7 @@ MIDSTART=10000
 ''')
 
 
-ldapscripts_debian='''
+ldapscripts_debian = '''
 ### Allura-customized
 ### This file predefine some ldapscripts variables for Debian boxes.
 #

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/ldap-userconfig.py
----------------------------------------------------------------------
diff --git a/Allura/ldap-userconfig.py b/Allura/ldap-userconfig.py
index ae796f8..bc36c1f 100644
--- a/Allura/ldap-userconfig.py
+++ b/Allura/ldap-userconfig.py
@@ -22,11 +22,13 @@ import sys
 import pwd
 import grp
 
+
 def main():
     command = sys.argv[1]
     uname = sys.argv[2]
     eval(command)(uname, *sys.argv[3:])
 
+
 def init(uname):
     home = os.path.join('/home', uname)
     ssh = os.path.join(home, '.ssh')
@@ -39,6 +41,7 @@ def init(uname):
     os.chown(home, u.pw_uid, g.gr_gid)
     os.chown(ssh, u.pw_uid, g.gr_gid)
 
+
 def upload(uname, pubkey):
     keyfile = os.path.join('/home', uname, '.ssh', 'authorized_keys')
     u = pwd.getpwnam(uname)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/setup.py
----------------------------------------------------------------------
diff --git a/Allura/setup.py b/Allura/setup.py
index 4f3cf84..f4ea03a 100644
--- a/Allura/setup.py
+++ b/Allura/setup.py
@@ -26,7 +26,7 @@ except ImportError:
 
 exec open('allura/version.py').read()
 
-PROJECT_DESCRIPTION='''
+PROJECT_DESCRIPTION = '''
 Allura is an open source implementation of a software "forge", a web site
 that manages source code repositories, bug reports, discussions, mailing
 lists, wiki pages, blogs and more for any number of individual projects.
@@ -43,7 +43,7 @@ setup(
     platforms=[
         'Linux',
         'MacOS X',
-        ],
+    ],
     classifiers=[
         'Development Status :: 4 - Beta',
         'Environment :: Web Environment',
@@ -53,7 +53,7 @@ setup(
         'Programming Language :: Python :: 2.6',
         'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
         'License :: OSI Approved :: Apache Software License',
-        ],
+    ],
     install_requires=[
         "TurboGears2",
         "pypeline",
@@ -72,7 +72,7 @@ setup(
         "feedparser >= 5.0.1",
         "oauth2 >= 1.2.0",
         "Ming >= 0.2.2dev-20110930",
-        ],
+    ],
     setup_requires=["PasteScript >= 1.7"],
     paster_plugins=['PasteScript', 'Pylons', 'TurboGears2', 'Ming'],
     packages=find_packages(exclude=['ez_setup']),
@@ -85,11 +85,11 @@ setup(
                              'templates/**.xml',
                              'templates/**.txt',
                              'public/*/*/*/*/*',
-                            ]},
+                             ]},
     message_extractors={'allura': [
-            ('**.py', 'python', None),
-            ('templates/**.html', 'genshi', None),
-            ('public/**', 'ignore', None)]},
+        ('**.py', 'python', None),
+        ('templates/**.html', 'genshi', None),
+        ('public/**', 'ignore', None)]},
 
     entry_points="""
     [paste.app_factory]

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/test-light.py
----------------------------------------------------------------------
diff --git a/Allura/test-light.py b/Allura/test-light.py
index f28eb3d..95e9f92 100644
--- a/Allura/test-light.py
+++ b/Allura/test-light.py
@@ -24,11 +24,14 @@ from allura.model.repo import CommitDoc, TreeDoc, TreesDoc, DiffInfoDoc
 from allura.model.repo import LastCommitDoc, CommitRunDoc
 from allura.model.repo_refresh import refresh_repo
 
+
 def main():
     if len(sys.argv) > 1:
         h.set_context('test')
-        c.project.install_app('Git', 'code', 'Code', init_from_url='/home/rick446/src/forge')
-        c.project.install_app('Hg', 'code2', 'Code2', init_from_url='/home/rick446/src/Kajiki')
+        c.project.install_app('Git', 'code', 'Code',
+                              init_from_url='/home/rick446/src/forge')
+        c.project.install_app('Hg', 'code2', 'Code2',
+                              init_from_url='/home/rick446/src/Kajiki')
     CommitDoc.m.remove({})
     TreeDoc.m.remove({})
     TreesDoc.m.remove({})

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/controller.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/controller.py b/AlluraTest/alluratest/controller.py
index 0017c48..ca75ae5 100644
--- a/AlluraTest/alluratest/controller.py
+++ b/AlluraTest/alluratest/controller.py
@@ -86,7 +86,8 @@ def setup_functional_test(config=None, app_name=DFL_APP_NAME):
     wsgiapp = loadapp('config:%s#%s' % (config, app_name),
                       relative_to=conf_dir)
     return wsgiapp
-setup_functional_test.__test__ = False  # sometimes __test__ above isn't sufficient
+# sometimes __test__ above isn't sufficient
+setup_functional_test.__test__ = False
 
 
 def setup_unit_test():
@@ -96,10 +97,11 @@ def setup_unit_test():
     except:
         pass
     REGISTRY.prepare()
-    REGISTRY.register(ew.widget_context, ew.core.WidgetContext('http', ew.ResourceManager()))
+    REGISTRY.register(ew.widget_context,
+                      ew.core.WidgetContext('http', ew.ResourceManager()))
     REGISTRY.register(g, Globals())
     REGISTRY.register(c, mock.Mock())
-    REGISTRY.register(url, lambda:None)
+    REGISTRY.register(url, lambda: None)
     REGISTRY.register(response, Response())
     REGISTRY.register(session, beaker.session.SessionObject({}))
     REGISTRY.register(allura.credentials, allura.lib.security.Credentials())
@@ -123,7 +125,8 @@ class TestController(object):
 
     def setUp(self):
         """Method called by nose before running each test"""
-        self.app = ValidatingTestApp(setup_functional_test(app_name=self.application_under_test))
+        self.app = ValidatingTestApp(
+            setup_functional_test(app_name=self.application_under_test))
         if self.validate_skip:
             self.app.validate_skip = self.validate_skip
         if asbool(tg.config.get('smtp.mock')):
@@ -164,8 +167,9 @@ class TestRestApiBase(TestController):
 
         return self._token_cache[username]
 
-    def _api_getpost(self, method, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def _api_getpost(
+            self, method, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         '''
         If you need to use one of the method kwargs as a URL parameter,
         pass params={...} as a dict instead of **kwargs
@@ -177,13 +181,16 @@ class TestRestApiBase(TestController):
         if status is None:
             status = [200, 201, 301, 302, 400, 403, 404]
         params = variabledecode.variable_encode(params, add_repetitions=False)
-        if api_key: params['api_key'] = api_key
-        if api_timestamp: params['api_timestamp'] = api_timestamp
-        if api_signature: params['api_signature'] = api_signature
+        if api_key:
+            params['api_key'] = api_key
+        if api_timestamp:
+            params['api_timestamp'] = api_timestamp
+        if api_signature:
+            params['api_signature'] = api_signature
 
         params = self.token(user).sign_request(path, params)
 
-        fn = self.app.post if method=='POST' else self.app.get
+        fn = self.app.post if method == 'POST' else self.app.get
 
         response = fn(
             str(path),
@@ -194,10 +201,12 @@ class TestRestApiBase(TestController):
         else:
             return response
 
-    def api_get(self, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def api_get(
+            self, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         return self._api_getpost('GET', path, api_key, api_timestamp, api_signature, wrap_args, user, status, **params)
 
-    def api_post(self, path, api_key=None, api_timestamp=None, api_signature=None,
-                 wrap_args=None, user='test-admin', status=None, **params):
+    def api_post(
+            self, path, api_key=None, api_timestamp=None, api_signature=None,
+            wrap_args=None, user='test-admin', status=None, **params):
         return self._api_getpost('POST', path, api_key, api_timestamp, api_signature, wrap_args, user, status, **params)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/test_syntax.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/test_syntax.py b/AlluraTest/alluratest/test_syntax.py
index b72052c..a4c03ee 100644
--- a/AlluraTest/alluratest/test_syntax.py
+++ b/AlluraTest/alluratest/test_syntax.py
@@ -22,6 +22,7 @@ import sys
 
 toplevel_dir = os.path.abspath(os.path.dirname(__file__) + "/../..")
 
+
 def run(cmd):
     proc = Popen(cmd, shell=True, cwd=toplevel_dir, stdout=PIPE, stderr=PIPE)
     # must capture & reprint stdount, so that nosetests can capture it
@@ -34,11 +35,14 @@ find_py = "find Allura Forge* -name '*.py'"
 
 # a recepe from itertools doc
 from itertools import izip_longest
+
+
 def grouper(n, iterable, fillvalue=None):
     "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
     args = [iter(iterable)] * n
     return izip_longest(fillvalue=fillvalue, *args)
 
+
 def test_pyflakes():
     # skip some that aren't critical errors
     skips = [
@@ -47,7 +51,8 @@ def test_pyflakes():
         'assigned to but never used',
         '__version__',
     ]
-    proc = Popen(find_py, shell=True, cwd=toplevel_dir, stdout=PIPE, stderr=PIPE)
+    proc = Popen(find_py, shell=True, cwd=toplevel_dir,
+                 stdout=PIPE, stderr=PIPE)
     (find_stdout, stderr) = proc.communicate()
     sys.stderr.write(stderr)
     assert proc.returncode == 0, proc.returncode
@@ -57,26 +62,30 @@ def test_pyflakes():
     all_files = [f for f in find_stdout.split('\n')
                  if '/migrations/' not in f and f.strip()]
     for files in grouper(20, all_files, fillvalue=''):
-        cmd = "pyflakes " + ' '.join(files) + " | grep -v '" + "' | grep -v '".join(skips) + "'"
-        #print 'Command was: %s' % cmd
+        cmd = "pyflakes " + \
+            ' '.join(files) + " | grep -v '" + \
+            "' | grep -v '".join(skips) + "'"
+        # print 'Command was: %s' % cmd
         retval = run(cmd)
         if retval != 1:
             print
-            #print 'Command was: %s' % cmd
+            # print 'Command was: %s' % cmd
             print 'Returned %s' % retval
             error = True
 
     if error:
         raise Exception('pyflakes failure, see stdout')
 
+
 def test_no_local_tz_functions():
-    if run(find_py + " | xargs grep '\.now(' ") not in [1,123]:
+    if run(find_py + " | xargs grep '\.now(' ") not in [1, 123]:
         raise Exception("These should use .utcnow()")
-    if run(find_py + " | xargs grep '\.fromtimestamp(' ") not in [1,123]:
+    if run(find_py + " | xargs grep '\.fromtimestamp(' ") not in [1, 123]:
         raise Exception("These should use .utcfromtimestamp()")
-    if run(find_py + " | xargs grep 'mktime(' ") not in [1,123]:
+    if run(find_py + " | xargs grep 'mktime(' ") not in [1, 123]:
         raise Exception("These should use calendar.timegm()")
 
+
 def test_no_prints():
     skips = [
         '/tests/',
@@ -92,6 +101,7 @@ def test_no_prints():
     if run(find_py + " | grep -v '" + "' | grep -v '".join(skips) + "' | xargs grep -v '^ *#' | grep 'print ' | grep -E -v '(pprint|#pragma: ?printok)' ") != 1:
         raise Exception("These should use logging instead of print")
 
+
 def test_no_tabs():
-    if run(find_py + " | xargs grep '	' ") not in [1,123]:
+    if run(find_py + " | xargs grep '	' ") not in [1, 123]:
         raise Exception('These should not use tab chars')

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/alluratest/validation.py
----------------------------------------------------------------------
diff --git a/AlluraTest/alluratest/validation.py b/AlluraTest/alluratest/validation.py
index 9fbdd84..6742594 100644
--- a/AlluraTest/alluratest/validation.py
+++ b/AlluraTest/alluratest/validation.py
@@ -49,7 +49,9 @@ from allura.lib import utils
 
 log = logging.getLogger(__name__)
 
+
 class Config(object):
+
     "Config to encapsulate flexible/complex test enabled/disabled rules."
     _instance = None
 
@@ -68,7 +70,8 @@ class Config(object):
         if not self.ini_config:
             from . import controller
             import ConfigParser
-            conf = ConfigParser.ConfigParser({'validate_html5': 'false', 'validate_inlinejs': 'false'})
+            conf = ConfigParser.ConfigParser(
+                {'validate_html5': 'false', 'validate_inlinejs': 'false'})
             conf.read(controller.get_config_file())
             self.ini_config = conf
         return self.ini_config
@@ -88,7 +91,8 @@ class Config(object):
         elif env_var is not None:
             return val_type in env_var.split(',')
 
-        enabled = self.test_ini.getboolean('validation', 'validate_' + val_type)
+        enabled = self.test_ini.getboolean(
+            'validation', 'validate_' + val_type)
         return enabled
 
     def fail_on_validation(self, val_type):
@@ -99,7 +103,8 @@ class Config(object):
 
 
 def report_validation_error(val_name, filename, message):
-    message = '%s Validation errors (%s):\n%s\n' % (val_name, filename, message)
+    message = '%s Validation errors (%s):\n%s\n' % (
+        val_name, filename, message)
     if Config.instance().fail_on_validation(val_name):
         ok_(False, message)
     else:
@@ -147,7 +152,7 @@ def validate_html5(html_or_response):
     else:
         html = html_or_response
     register_openers()
-    params = [("out","text"),("content",html)]
+    params = [("out", "text"), ("content", html)]
     datagen, headers = multipart_encode(params)
     request = urllib2.Request("http://html5.validator.nu/", datagen, headers)
     count = 3
@@ -162,7 +167,7 @@ def validate_html5(html_or_response):
                 sys.stderr.write('WARNING: ' + resp + '\n')
                 break
 
-    resp = resp.replace('“','"').replace('”','"').replace('–','-')
+    resp = resp.replace('“', '"').replace('”', '"').replace('–', '-')
 
     ignored_errors = [
         'Required attributes missing on element "object"',
@@ -175,7 +180,7 @@ def validate_html5(html_or_response):
 
     if 'Error:' in resp:
         fname = dump_to_file('html5-', html)
-        message = resp.decode('ascii','ignore')
+        message = resp.decode('ascii', 'ignore')
         report_validation_error('html5', fname, message)
 
 
@@ -207,8 +212,10 @@ def validate_js(html_or_response):
     basedir = path.dirname(path.abspath(__file__))
     jslint_dir = basedir + '/../jslint'
     fname = dump_to_file('jslint-', html)
-    cmd = 'java -jar ' + jslint_dir + '/js.jar '+ jslint_dir +'/jslint.js ' + fname
-    p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    cmd = 'java -jar ' + jslint_dir + '/js.jar ' + \
+        jslint_dir + '/jslint.js ' + fname
+    p = subprocess.Popen(cmd, shell=True,
+                         stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     stdout, stderr = p.communicate(html)
     if stdout.startswith('jslint: No problems found'):
         os.unlink(fname)
@@ -241,6 +248,7 @@ class AntiSpamTestApp(TestApp):
             kwargs['params'] = params
         return super(AntiSpamTestApp, self).post(*args, **kwargs)
 
+
 class PostParamCheckingTestApp(AntiSpamTestApp):
 
     def _validate_params(self, params, method):
@@ -251,9 +259,12 @@ class PostParamCheckingTestApp(AntiSpamTestApp):
             params = params.items()
         for k, v in params:
             if not isinstance(k, basestring):
-                raise TypeError('%s key %s is %s, not str' % (method, k, type(k)))
+                raise TypeError('%s key %s is %s, not str' %
+                                (method, k, type(k)))
             if not isinstance(v, (basestring, webtest.app.File)):
-                raise TypeError('%s key %s has value %s of type %s, not str. ' % (method, k, v, type(v)))
+                raise TypeError(
+                    '%s key %s has value %s of type %s, not str. ' %
+                    (method, k, v, type(v)))
 
     def get(self, *args, **kwargs):
         self._validate_params(kwargs.get('params'), 'get')
@@ -263,6 +274,7 @@ class PostParamCheckingTestApp(AntiSpamTestApp):
         self._validate_params(kwargs.get('params'), 'post')
         return super(PostParamCheckingTestApp, self).post(*args, **kwargs)
 
+
 class ValidatingTestApp(PostParamCheckingTestApp):
 
     # Subclasses may set this to True to skip validation altogether
@@ -286,7 +298,7 @@ class ValidatingTestApp(PostParamCheckingTestApp):
             pass
         elif content_type.startswith('application/json'):
             validate_json(content)
-        elif content_type.startswith(('application/x-javascript','application/javascript', 'text/javascript')):
+        elif content_type.startswith(('application/x-javascript', 'application/javascript', 'text/javascript')):
             validate_js(content)
         elif content_type.startswith('application/xml'):
             import feedparser

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/AlluraTest/setup.py
----------------------------------------------------------------------
diff --git a/AlluraTest/setup.py b/AlluraTest/setup.py
index 421cd39..9958a75 100644
--- a/AlluraTest/setup.py
+++ b/AlluraTest/setup.py
@@ -16,14 +16,16 @@
 #       under the License.
 
 from setuptools import setup, find_packages
-import sys, os
+import sys
+import os
 
 setup(name='AlluraTest',
       version='0.1',
       description="Allura testing support",
       long_description="""\
 """,
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+      classifiers=[],
       keywords='',
       author='',
       author_email='',
@@ -33,7 +35,7 @@ setup(name='AlluraTest',
       include_package_data=True,
       zip_safe=False,
       install_requires=[
-        "poster",
+          "poster",
           # -*- Extra requirements: -*-
       ]
       )

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/config/resources.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/config/resources.py b/ForgeActivity/forgeactivity/config/resources.py
index 11c100d..5d98bd3 100644
--- a/ForgeActivity/forgeactivity/config/resources.py
+++ b/ForgeActivity/forgeactivity/config/resources.py
@@ -17,6 +17,7 @@
 
 import pkg_resources
 
+
 def register_ew_resources(manager):
     manager.register_directory(
         'activity_js', pkg_resources.resource_filename('forgeactivity', 'widgets/resources/js'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/ForgeActivity/forgeactivity/main.py
----------------------------------------------------------------------
diff --git a/ForgeActivity/forgeactivity/main.py b/ForgeActivity/forgeactivity/main.py
index 3a60190..9188b66 100644
--- a/ForgeActivity/forgeactivity/main.py
+++ b/ForgeActivity/forgeactivity/main.py
@@ -38,6 +38,7 @@ log = logging.getLogger(__name__)
 
 
 class ForgeActivityApp(Application):
+
     """Project Activity page for projects."""
     __version__ = version.__version__
     default_mount_point = 'activity'
@@ -49,19 +50,22 @@ class ForgeActivityApp(Application):
         self.root = ForgeActivityController(self)
         self.api_root = ForgeActivityRestController(self)
 
-    def admin_menu(self): # pragma no cover
+    def admin_menu(self):  # pragma no cover
         return []
 
     def install(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
 
     def uninstall(self, project):
-        pass # pragma no cover
+        pass  # pragma no cover
+
 
 class W:
     follow_toggle = FollowToggle()
 
+
 class ForgeActivityController(BaseController):
+
     def __init__(self, app, *args, **kw):
         super(ForgeActivityController, self).__init__(*args, **kw)
         self.app = app
@@ -77,7 +81,8 @@ class ForgeActivityController(BaseController):
 
     def _get_activities_data(self, **kw):
         activity_enabled = config.get('activitystream.enabled', False)
-        activity_enabled = request.cookies.get('activitystream.enabled', activity_enabled)
+        activity_enabled = request.cookies.get(
+            'activitystream.enabled', activity_enabled)
         activity_enabled = asbool(activity_enabled)
         if not activity_enabled:
             raise exc.HTTPNotFound()
@@ -92,8 +97,8 @@ class ForgeActivityController(BaseController):
 
         following = g.director.is_connected(c.user, followee)
         timeline = g.director.get_timeline(followee, page=kw.get('page', 0),
-                limit=kw.get('limit', 100), actor_only=actor_only,
-                filter_func=perm_check(c.user))
+                                           limit=kw.get('limit', 100), actor_only=actor_only,
+                                           filter_func=perm_check(c.user))
         return dict(followee=followee, following=following, timeline=timeline)
 
     @expose('jinja:forgeactivity:templates/index.html')
@@ -108,11 +113,11 @@ class ForgeActivityController(BaseController):
         response.headers['Content-Type'] = ''
         response.content_type = 'application/xml'
         d = {
-                'title': 'Activity for %s' % data['followee'].shortname,
-                'link': h.absurl(self.app.url),
-                'description': 'Recent activity for %s' % data['followee'].shortname,
-                'language': u'en',
-            }
+            'title': 'Activity for %s' % data['followee'].shortname,
+            'link': h.absurl(self.app.url),
+            'description': 'Recent activity for %s' % data['followee'].shortname,
+            'language': u'en',
+        }
         if request.environ['PATH_INFO'].endswith('.atom'):
             feed = FG.Atom1Feed(**d)
         else:
@@ -121,23 +126,24 @@ class ForgeActivityController(BaseController):
             url = h.absurl(t.obj.activity_url.encode('utf-8'))
             feed.add_item(title=u'%s %s %s%s' % (
                                 t.actor.activity_name,
-                                t.verb,
-                                t.obj.activity_name,
-                                ' on %s' % t.target.activity_name if t.target.activity_name else '',
-                            ),
-                          link=url,
-                          pubdate=t.published,
-                          description=t.obj.activity_extras.get('summary'),
-                          unique_id=url,
-                          author_name=t.actor.activity_name,
-                          author_link=h.absurl(t.actor.activity_url))
+                t.verb,
+                t.obj.activity_name,
+                ' on %s' % t.target.activity_name if t.target.activity_name else '',
+            ),
+                link=url,
+                pubdate=t.published,
+                description=t.obj.activity_extras.get('summary'),
+                unique_id=url,
+                author_name=t.actor.activity_name,
+                author_link=h.absurl(t.actor.activity_url))
         return feed.writeString('utf-8')
 
     @expose('json:')
     @validate(W.follow_toggle)
     def follow(self, follow, **kw):
         activity_enabled = config.get('activitystream.enabled', False)
-        activity_enabled = request.cookies.get('activitystream.enabled', activity_enabled)
+        activity_enabled = request.cookies.get(
+            'activitystream.enabled', activity_enabled)
         activity_enabled = asbool(activity_enabled)
         if not activity_enabled:
             raise exc.HTTPNotFound()
@@ -167,6 +173,7 @@ class ForgeActivityController(BaseController):
 
 
 class ForgeActivityRestController(BaseController):
+
     def __init__(self, app, *args, **kw):
         super(ForgeActivityRestController, self).__init__(*args, **kw)
         self.app = app
@@ -175,17 +182,17 @@ class ForgeActivityRestController(BaseController):
     def index(self, **kw):
         data = self.app.root._get_activities_data(**kw)
         return {
-                'following': data['following'],
-                'followee': {
-                    'activity_name': data['followee'].shortname,
-                    'activity_url': data['followee'].url(),
-                    'activity_extras': {},
-                },
-                'timeline': [{
-                        'published': '%s UTC' % a.published,
-                        'actor': a.actor._deinstrument(),
-                        'verb': a.verb,
-                        'obj': a.obj._deinstrument(),
-                        'target': a.target._deinstrument(),
-                    } for a in data['timeline']],
-            }
+            'following': data['following'],
+            'followee': {
+                'activity_name': data['followee'].shortname,
+                'activity_url': data['followee'].url(),
+                'activity_extras': {},
+            },
+            'timeline': [{
+                'published': '%s UTC' % a.published,
+                'actor': a.actor._deinstrument(),
+                'verb': a.verb,
+                'obj': a.obj._deinstrument(),
+                'target': a.target._deinstrument(),
+            } for a in data['timeline']],
+        }