You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by jo...@apache.org on 2014/01/10 22:23:23 UTC

[27/36] PEP8 cleanup

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/gravatar.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/gravatar.py b/Allura/allura/lib/gravatar.py
index be8ad3c..0e03be2 100644
--- a/Allura/allura/lib/gravatar.py
+++ b/Allura/allura/lib/gravatar.py
@@ -15,9 +15,12 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-import re, urllib, hashlib
+import re
+import urllib
+import hashlib
+
+_wrapped_email = re.compile(r'.*<(.+)>')
 
-_wrapped_email=re.compile(r'.*<(.+)>')
 
 def id(email):
     """Turn an email address into a Gravatar id as per <http://gravatar.com/site/implement/url>
@@ -33,6 +36,7 @@ def id(email):
         email = match.group(1)
     return hashlib.md5(email.strip().lower().encode('utf8')).hexdigest()
 
+
 def url(email=None, gravatar_id=None, **kw):
     """Build a complete gravatar URL with our favorite defaults.
 
@@ -73,8 +77,10 @@ def url(email=None, gravatar_id=None, **kw):
     assert gravatar_id or email
     if gravatar_id is None:
         gravatar_id = id(email)
-    if 'r' not in kw and 'rating' not in kw: kw['r'] = 'pg'
+    if 'r' not in kw and 'rating' not in kw:
+        kw['r'] = 'pg'
     return ('https://secure.gravatar.com/avatar/%s?%s' % (gravatar_id, urllib.urlencode(kw)))
 
+
 def for_user(user):
     return url(user.get_pref('email_address'))

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/helpers.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/helpers.py b/Allura/allura/lib/helpers.py
index 0b4e2dc..ca1d386 100644
--- a/Allura/allura/lib/helpers.py
+++ b/Allura/allura/lib/helpers.py
@@ -67,8 +67,10 @@ re_project_name = re.compile(r'^[a-z][-a-z0-9]{2,14}$')
 # validates tool mount point names
 re_tool_mount_point = re.compile(r'^[a-z][-a-z0-9]{0,62}$')
 re_tool_mount_point_fragment = re.compile(r'[a-z][-a-z0-9]*')
-re_relaxed_tool_mount_point = re.compile(r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$')
-re_relaxed_tool_mount_point_fragment = re.compile(r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*')
+re_relaxed_tool_mount_point = re.compile(
+    r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$')
+re_relaxed_tool_mount_point_fragment = re.compile(
+    r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*')
 
 re_clean_vardec_key = re.compile(r'''\A
 ( # first part
@@ -101,6 +103,7 @@ re_angle_bracket_open = re.compile('<')
 re_angle_bracket_close = re.compile('>')
 md_chars_matcher_all = re.compile(r"([`\*_{}\[\]\(\)#!\\\.+-])")
 
+
 def make_safe_path_portion(ustr, relaxed=True):
     """Return an ascii representation of ``ustr`` that conforms to mount point
     naming :attr:`rules <re_tool_mount_point_fragment>`.
@@ -113,7 +116,7 @@ def make_safe_path_portion(ustr, relaxed=True):
 
     """
     regex = (re_relaxed_tool_mount_point_fragment if relaxed else
-                re_tool_mount_point_fragment)
+             re_tool_mount_point_fragment)
     ustr = really_unicode(ustr)
     s = ustr.encode('latin1', 'ignore')
     s = AsciiDammit.asciiDammit(s)
@@ -123,26 +126,31 @@ def make_safe_path_portion(ustr, relaxed=True):
     s = s.replace('--', '-')
     return s
 
+
 def monkeypatch(*objs):
     def patchem(func):
         for obj in objs:
             setattr(obj, func.__name__, func)
     return patchem
 
+
 def urlquote(url, safe="/"):
     try:
         return urllib.quote(str(url), safe=safe)
     except UnicodeEncodeError:
         return urllib.quote(url.encode('utf-8'), safe=safe)
 
+
 def urlquoteplus(url, safe=""):
     try:
         return urllib.quote_plus(str(url), safe=safe)
     except UnicodeEncodeError:
         return urllib.quote_plus(url.encode('utf-8'), safe=safe)
 
+
 def _attempt_encodings(s, encodings):
-    if s is None: return u''
+    if s is None:
+        return u''
     for enc in encodings:
         try:
             if enc is None:
@@ -154,6 +162,7 @@ def _attempt_encodings(s, encodings):
     # Return the repr of the str -- should always be safe
     return unicode(repr(str(s)))[1:-1]
 
+
 def really_unicode(s):
     # Try to guess the encoding
     def encodings():
@@ -164,6 +173,7 @@ def really_unicode(s):
         yield 'latin-1'
     return _attempt_encodings(s, encodings())
 
+
 def find_user(email=None, username=None, display_name=None):
     from allura import model as M
     user = None
@@ -175,6 +185,7 @@ def find_user(email=None, username=None, display_name=None):
         user = M.User.by_display_name(display_name)
     return user
 
+
 def find_project(url_path):
     from allura import model as M
     for n in M.Neighborhood.query.find():
@@ -182,45 +193,55 @@ def find_project(url_path):
             break
     else:
         return None, url_path
-    project_part = n.shortname_prefix + url_path[len(n.url_prefix):] # easily off-by-one, might be better to join together everything but url_prefix
+    # easily off-by-one, might be better to join together everything but
+    # url_prefix
+    project_part = n.shortname_prefix + url_path[len(n.url_prefix):]
     parts = project_part.split('/')
     length = len(parts)
     while length:
         shortname = '/'.join(parts[:length])
         p = M.Project.query.get(shortname=shortname, deleted=False,
                                 neighborhood_id=n._id)
-        if p: return p, parts[length:]
+        if p:
+            return p, parts[length:]
         length -= 1
     return None, url_path.split('/')
 
+
 def make_neighborhoods(ids):
     return _make_xs('Neighborhood', ids)
 
+
 def make_projects(ids):
     return _make_xs('Project', ids)
 
+
 def make_users(ids):
     return _make_xs('User', ids)
 
+
 def make_roles(ids):
     return _make_xs('ProjectRole', ids)
 
+
 def _make_xs(X, ids):
     from allura import model as M
     X = getattr(M, X)
     ids = list(ids)
     results = dict(
         (r._id, r)
-        for r in X.query.find(dict(_id={'$in':ids})))
+        for r in X.query.find(dict(_id={'$in': ids})))
     result = (results.get(i) for i in ids)
     return (r for r in result if r is not None)
 
+
 def make_app_admin_only(app):
     from allura.model.auth import ProjectRole
     admin_role = ProjectRole.by_name('Admin', app.project)
     for ace in [ace for ace in app.acl if ace.role_id != admin_role._id]:
         app.acl.remove(ace)
 
+
 @contextmanager
 def push_config(obj, **kw):
     saved_attrs = {}
@@ -239,12 +260,14 @@ def push_config(obj, **kw):
         for k in new_attrs:
             delattr(obj, k)
 
+
 def sharded_path(name, num_parts=2):
     parts = [
         name[:i + 1]
-        for i in range(num_parts) ]
+        for i in range(num_parts)]
     return '/'.join(parts)
 
+
 def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, neighborhood=None):
     from allura import model
     try:
@@ -258,19 +281,22 @@ def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, n
             n = model.Neighborhood.query.get(name=neighborhood)
             if n is None:
                 try:
-                    n = model.Neighborhood.query.get(_id=ObjectId(str(neighborhood)))
+                    n = model.Neighborhood.query.get(
+                        _id=ObjectId(str(neighborhood)))
                 except InvalidId:
                     pass
             if n is None:
-                raise exc.NoSuchNeighborhoodError("Couldn't find neighborhood %s" %
-                                      repr(neighborhood))
+                raise exc.NoSuchNeighborhoodError(
+                    "Couldn't find neighborhood %s" %
+                    repr(neighborhood))
             neighborhood = n
 
-        query = dict(shortname=project_shortname_or_id, neighborhood_id=neighborhood._id)
+        query = dict(shortname=project_shortname_or_id,
+                     neighborhood_id=neighborhood._id)
         p = model.Project.query.get(**query)
     if p is None:
         raise exc.NoSuchProjectError("Couldn't find project %s nbhd %s" %
-                                 (project_shortname_or_id, neighborhood))
+                                     (project_shortname_or_id, neighborhood))
     c.project = p
 
     if app_config_id is None:
@@ -281,6 +307,7 @@ def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, n
         app_config = model.AppConfig.query.get(_id=app_config_id)
         c.app = p.app_instance(app_config)
 
+
 @contextmanager
 def push_context(project_id, mount_point=None, app_config_id=None, neighborhood=None):
     project = getattr(c, 'project', ())
@@ -298,6 +325,7 @@ def push_context(project_id, mount_point=None, app_config_id=None, neighborhood=
         else:
             c.app = app
 
+
 def encode_keys(d):
     '''Encodes the unicode keys of d, making the result
     a valid kwargs argument'''
@@ -305,29 +333,34 @@ def encode_keys(d):
         (k.encode('utf-8'), v)
         for k, v in d.iteritems())
 
+
 def vardec(fun):
     def vardec_hook(remainder, params):
         new_params = variable_decode(dict(
-                (k, v) for k, v in params.items()
-                if re_clean_vardec_key.match(k)))
+            (k, v) for k, v in params.items()
+            if re_clean_vardec_key.match(k)))
         params.update(new_params)
     before_validate(vardec_hook)(fun)
     return fun
 
+
 def nonce(length=4):
     return sha1(ObjectId().binary + os.urandom(10)).hexdigest()[:length]
 
+
 def cryptographic_nonce(length=40):
     hex_format = '%.2x' * length
     return hex_format % tuple(map(ord, os.urandom(length)))
 
+
 def ago(start_time, show_date_after=7):
     """
     Return time since starting time as a rounded, human readable string.
     E.g., "3 hours ago"
     """
 
-    if start_time is None: return 'unknown'
+    if start_time is None:
+        return 'unknown'
     granularities = ['century', 'decade', 'year', 'month', 'day', 'hour',
                      'minute']
     end_time = datetime.utcnow()
@@ -343,15 +376,18 @@ def ago(start_time, show_date_after=7):
             break
     return ago + ' ago'
 
+
 def ago_ts(timestamp):
     return ago(datetime.utcfromtimestamp(timestamp))
 
+
 def ago_string(s):
     try:
         return ago(parse(s, ignoretz=True))
     except (ValueError, AttributeError):
         return 'unknown'
 
+
 class DateTimeConverter(FancyValidator):
 
     def _to_python(self, value, state):
@@ -363,7 +399,6 @@ class DateTimeConverter(FancyValidator):
             else:
                 raise
 
-
     def _from_python(self, value, state):
         return value.isoformat()
 
@@ -403,13 +438,14 @@ def diff_text(t1, t2, differ=None):
     result = []
     for tag, i1, i2, j1, j2 in differ.get_opcodes():
         if tag in ('delete', 'replace'):
-            result += [ '<del>' ] + t1_words[i1:i2] + [ '</del>' ]
+            result += ['<del>'] + t1_words[i1:i2] + ['</del>']
         if tag in ('insert', 'replace'):
-            result += [ '<ins>' ] + t2_words[j1:j2] + [ '</ins>' ]
+            result += ['<ins>'] + t2_words[j1:j2] + ['</ins>']
         if tag == 'equal':
             result += t1_words[i1:i2]
     return ' '.join(result).replace('\n', '<br/>\n')
 
+
 def gen_message_id(_id=None):
     if not _id:
         _id = nonce(40)
@@ -424,14 +460,18 @@ def gen_message_id(_id=None):
     return '%s@%s.sourceforge.net' % (
         addr, '.'.join(reversed(parts)))
 
+
 class ProxiedAttrMeta(type):
+
     def __init__(cls, name, bases, dct):
         for v in dct.itervalues():
             if isinstance(v, attrproxy):
                 v.cls = cls
 
+
 class attrproxy(object):
     cls = None
+
     def __init__(self, *attrs):
         self.attrs = attrs
 
@@ -448,12 +488,14 @@ class attrproxy(object):
 
     def __getattr__(self, name):
         if self.cls is None:
-            return promised_attrproxy(lambda:self.cls, name)
+            return promised_attrproxy(lambda: self.cls, name)
         return getattr(
             attrproxy(self.cls, *self.attrs),
             name)
 
+
 class promised_attrproxy(attrproxy):
+
     def __init__(self, promise, *attrs):
         super(promised_attrproxy, self).__init__(*attrs)
         self._promise = promise
@@ -465,14 +507,19 @@ class promised_attrproxy(attrproxy):
         cls = self._promise()
         return getattr(cls, name)
 
+
 class proxy(object):
+
     def __init__(self, obj):
         self._obj = obj
+
     def __getattr__(self, name):
         return getattr(self._obj, name)
+
     def __call__(self, *args, **kwargs):
         return self._obj(*args, **kwargs)
 
+
 def render_genshi_plaintext(template_name, **template_vars):
     assert os.path.exists(template_name)
     fd = open(template_name)
@@ -482,11 +529,12 @@ def render_genshi_plaintext(template_name, **template_vars):
         fd.close()
     filepath = os.path.dirname(template_name)
     tt = genshi.template.NewTextTemplate(tpl_text,
-            filepath=filepath, filename=template_name)
+                                         filepath=filepath, filename=template_name)
     stream = tt.generate(**template_vars)
     return stream.render(encoding='utf-8').decode('utf-8')
 
-site_url = None # cannot set it just yet since tg.config is empty
+site_url = None  # cannot set it just yet since tg.config is empty
+
 
 def full_url(url):
     """Make absolute URL from the relative one.
@@ -494,7 +542,8 @@ def full_url(url):
     global site_url
     if site_url is None:
         # XXX: add a separate tg option instead of re-using openid.realm
-        site_url = tg.config.get('openid.realm', 'https://newforge.sf.geek.net/')
+        site_url = tg.config.get(
+            'openid.realm', 'https://newforge.sf.geek.net/')
         site_url = site_url.replace('https:', 'http:')
         if not site_url.endswith('/'):
             site_url += '/'
@@ -502,26 +551,30 @@ def full_url(url):
         url = url[1:]
     return site_url + url
 
+
 @tg.expose(content_type='text/plain')
 def json_validation_error(controller, **kwargs):
     result = dict(status='Validation Error',
-                errors=c.validation_exception.unpack_errors(),
-                value=c.validation_exception.value,
-                params=kwargs)
+                  errors=c.validation_exception.unpack_errors(),
+                  value=c.validation_exception.value,
+                  params=kwargs)
     response.status = 400
     return json.dumps(result, indent=2)
 
+
 def pop_user_notifications(user=None):
     from allura import model as M
     if user is None:
         user = c.user
     mbox = M.Mailbox.query.get(user_id=user._id, is_flash=True)
     if mbox:
-        notifications = M.Notification.query.find(dict(_id={'$in':mbox.queue}))
+        notifications = M.Notification.query.find(
+            dict(_id={'$in': mbox.queue}))
         mbox.queue = []
         mbox.queue_empty = True
         for n in notifications:
-            M.Notification.query.remove({'_id': n._id}) # clean it up so it doesn't hang around
+            # clean it up so it doesn't hang around
+            M.Notification.query.remove({'_id': n._id})
             yield n
 
 
@@ -533,11 +586,12 @@ def config_with_prefix(d, prefix):
     return dict((k[plen:], v) for k, v in d.iteritems()
                 if k.startswith(prefix))
 
+
 @contextmanager
 def twophase_transaction(*engines):
     connections = [
         e.contextual_connect()
-        for e in engines ]
+        for e in engines]
     txns = []
     to_rollback = []
     try:
@@ -557,6 +611,7 @@ def twophase_transaction(*engines):
             txn.rollback()
         raise
 
+
 class log_action(object):
     extra_proto = dict(
         action=None,
@@ -617,7 +672,8 @@ class log_action(object):
                 result['username'] = '*system'
             try:
                 result['url'] = request.url
-                ip_address = request.headers.get('X_FORWARDED_FOR', request.remote_addr)
+                ip_address = request.headers.get(
+                    'X_FORWARDED_FOR', request.remote_addr)
                 if ip_address is not None:
                     ip_address = ip_address.split(',')[0].strip()
                     result['ip_address'] = ip_address
@@ -627,9 +683,11 @@ class log_action(object):
                 pass
             return result
         except:
-            self._logger.warning('Error logging to rtstats, some info may be missing', exc_info=True)
+            self._logger.warning(
+                'Error logging to rtstats, some info may be missing', exc_info=True)
             return result
 
+
 def paging_sanitizer(limit, page, total_count, zero_based_pages=True):
     """Return limit, page - both converted to int and constrained to
     valid ranges based on total_count.
@@ -646,7 +704,9 @@ def paging_sanitizer(limit, page, total_count, zero_based_pages=True):
 def _add_inline_line_numbers_to_text(text):
     markup_text = '<div class="codehilite"><pre>'
     for line_num, line in enumerate(text.splitlines(), 1):
-        markup_text = markup_text + '<span id="l%s" class="code_block"><span class="lineno">%s</span> %s</span>' % (line_num, line_num, line)
+        markup_text = markup_text + \
+            '<span id="l%s" class="code_block"><span class="lineno">%s</span> %s</span>' % (
+                line_num, line_num, line)
     markup_text = markup_text + '</pre></div>'
     return markup_text
 
@@ -662,16 +722,21 @@ def _add_table_line_numbers_to_text(text):
         return '\n'.join(map(_prepend_whitespaces, range(start, max_num), [max_num] * l))
 
     lines = text.splitlines(True)
-    linenumbers = '<td class="linenos"><div class="linenodiv"><pre>' + _len_to_str_column(len(lines)) + '</pre></div></td>'
-    markup_text = '<table class="codehilitetable"><tbody><tr>' + linenumbers + '<td class="code"><div class="codehilite"><pre>'
+    linenumbers = '<td class="linenos"><div class="linenodiv"><pre>' + \
+        _len_to_str_column(len(lines)) + '</pre></div></td>'
+    markup_text = '<table class="codehilitetable"><tbody><tr>' + \
+        linenumbers + '<td class="code"><div class="codehilite"><pre>'
     for line_num, line in enumerate(lines, 1):
-        markup_text = markup_text + '<span id="l%s" class="code_block">%s</span>' % (line_num, line)
+        markup_text = markup_text + \
+            '<span id="l%s" class="code_block">%s</span>' % (line_num, line)
     markup_text = markup_text + '</pre></div></td></tr></tbody></table>'
     return markup_text
 
 
 INLINE = 'inline'
 TABLE = 'table'
+
+
 def render_any_markup(name, text, code_mode=False, linenumbers_style=TABLE):
     """
     renders markdown using allura enhacements if file is in markdown format
@@ -698,6 +763,8 @@ def render_any_markup(name, text, code_mode=False, linenumbers_style=TABLE):
 # copied from jinja2 dev
 # latest release, 2.6, implements this incorrectly
 # can remove and use jinja2 implementation after upgrading to 2.7
+
+
 def do_filesizeformat(value, binary=False):
     """Format the value like a 'human-readable' file size (i.e. 13 kB,
 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
@@ -763,6 +830,7 @@ def datetimeformat(value, format='%Y-%m-%d %H:%M:%S'):
 @contextmanager
 def log_output(log):
     class Writer(object):
+
         def __init__(self, func):
             self.func = func
             self.closed = False
@@ -783,6 +851,7 @@ def log_output(log):
         sys.stdout = _stdout
         sys.stderr = _stderr
 
+
 def topological_sort(items, partial_order):
     """Perform topological sort.
        items is a list of items to be sorted.
@@ -793,7 +862,8 @@ def topological_sort(items, partial_order):
 
        Modified from: http://www.bitformation.com/art/python_toposort.html
     """
-    # Original topological sort code written by Ofer Faigon (www.bitformation.com) and used with permission
+    # Original topological sort code written by Ofer Faigon
+    # (www.bitformation.com) and used with permission
 
     def add_arc(graph, fromnode, tonode):
         """Add an arc to a graph. Can create multiple arcs.
@@ -814,8 +884,8 @@ def topological_sort(items, partial_order):
     # Note that our representation does not contain reference loops to
     # cause GC problems even when the represented graph contains loops,
     # because we keep the node names rather than references to the nodes.
-    graph = defaultdict(lambda:[0])
-    for a,b in partial_order:
+    graph = defaultdict(lambda: [0])
+    for a, b in partial_order:
         add_arc(graph, a, b)
 
     # Step 2 - find all roots (nodes with zero incoming arcs).
@@ -923,6 +993,7 @@ def null_contextmanager(*args, **kw):
 
 
 class exceptionless(object):
+
     '''Decorator making the decorated function return 'error_result' on any
     exceptions rather than propagating exceptions up the stack
     '''
@@ -933,13 +1004,15 @@ class exceptionless(object):
 
     def __call__(self, fun):
         fname = 'exceptionless(%s)' % fun.__name__
+
         def inner(*args, **kwargs):
             try:
                 return fun(*args, **kwargs)
             except Exception as e:
                 if self.log:
-                    self.log.exception('Error calling %s(args=%s, kwargs=%s): %s',
-                            fname, args, kwargs, str(e))
+                    self.log.exception(
+                        'Error calling %s(args=%s, kwargs=%s): %s',
+                        fname, args, kwargs, str(e))
                 return self.error_result
         inner.__name__ = fname
         return inner
@@ -961,7 +1034,7 @@ def urlopen(url, retries=3, codes=(408,), timeout=None):
             return urllib2.urlopen(url, timeout=timeout)
         except (urllib2.HTTPError, socket.timeout) as e:
             if attempts < retries and (isinstance(e, socket.timeout) or
-                    e.code in codes):
+                                       e.code in codes):
                 attempts += 1
                 continue
             else:
@@ -971,7 +1044,9 @@ def urlopen(url, retries=3, codes=(408,), timeout=None):
                     url_string = url
                 if timeout is None:
                     timeout = socket.getdefaulttimeout()
-                log.exception('Failed after %s retries on url with a timeout of %s: %s: %s', attempts, timeout, url_string, e)
+                log.exception(
+                    'Failed after %s retries on url with a timeout of %s: %s: %s',
+                    attempts, timeout, url_string, e)
                 raise e
 
 
@@ -1014,9 +1089,11 @@ def iter_entry_points(group, *a, **kw):
 
     """
     def active_eps():
-        disabled = aslist(tg.config.get('disable_entry_points.' + group), sep=',')
+        disabled = aslist(
+            tg.config.get('disable_entry_points.' + group), sep=',')
         return [ep for ep in pkg_resources.iter_entry_points(group, *a, **kw)
                 if ep.name not in disabled]
+
     def unique_eps(entry_points):
         by_name = defaultdict(list)
         for ep in entry_points:
@@ -1027,6 +1104,7 @@ def iter_entry_points(group, *a, **kw):
                 yield eps[0]
             else:
                 yield subclass(eps)
+
     def subclass(entry_points):
         loaded = dict((ep, ep.load()) for ep in entry_points)
         for ep, cls in loaded.iteritems():
@@ -1035,7 +1113,7 @@ def iter_entry_points(group, *a, **kw):
             if all([issubclass(cls, other) for other in others]):
                 return ep
         raise ImportError('Ambiguous [allura] entry points detected. ' +
-                'Multiple entry points with name "%s".' % entry_points[0].name)
+                          'Multiple entry points with name "%s".' % entry_points[0].name)
     return iter(unique_eps(active_eps()) if group == 'allura' else active_eps())
 
 

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/macro.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/macro.py b/Allura/allura/lib/macro.py
index d4c974e..3f827ed 100644
--- a/Allura/allura/lib/macro.py
+++ b/Allura/allura/lib/macro.py
@@ -35,6 +35,8 @@ from . import security
 log = logging.getLogger(__name__)
 
 _macros = {}
+
+
 class macro(object):
 
     def __init__(self, context=None):
@@ -44,6 +46,7 @@ class macro(object):
         _macros[func.__name__] = (func, self._context)
         return func
 
+
 class parse(object):
 
     def __init__(self, context):
@@ -54,10 +57,13 @@ class parse(object):
             if s.startswith('quote '):
                 return '[[' + s[len('quote '):] + ']]'
             try:
-                parts = [ unicode(x, 'utf-8') for x in shlex.split(s.encode('utf-8')) ]
-                if not parts: return '[[' + s + ']]'
+                parts = [unicode(x, 'utf-8')
+                         for x in shlex.split(s.encode('utf-8'))]
+                if not parts:
+                    return '[[' + s + ']]'
                 macro = self._lookup_macro(parts[0])
-                if not macro: return  '[[' + s + ']]'
+                if not macro:
+                    return '[[' + s + ']]'
                 for t in parts[1:]:
                     if '=' not in t:
                         return '[-%s: missing =-]' % ' '.join(parts)
@@ -81,6 +87,7 @@ class parse(object):
         else:
             return None
 
+
 @macro('neighborhood-wiki')
 def neighborhood_feeds(tool_name, max_number=5, sort='pubdate'):
     from allura import model as M
@@ -91,17 +98,18 @@ def neighborhood_feeds(tool_name, max_number=5, sort='pubdate'):
             neighborhood_id=c.project.neighborhood._id))
     feed = feed.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=item.link,
-                title=item.title,
-                author=item.author_name,
-                ago=h.ago(item.pubdate),
-                description=g.markdown.cached_convert(item, 'description')))
+        href=item.link,
+        title=item.title,
+        author=item.author_name,
+        ago=h.ago(item.pubdate),
+        description=g.markdown.cached_convert(item, 'description')))
         for item in feed)
     feeds = NeighborhoodFeeds(feeds=output)
     g.resource_manager.register(feeds)
     response = feeds.display(feeds=output)
     return response
 
+
 @macro('neighborhood-wiki')
 def neighborhood_blog_posts(max_number=5, sort='timestamp', summary=False):
     from forgeblog import model as BM
@@ -111,47 +119,50 @@ def neighborhood_blog_posts(max_number=5, sort='timestamp', summary=False):
         state='published'))
     posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=post.url(),
-                title=post.title,
-                author=post.author().display_name,
-                ago=h.ago(post.timestamp),
-                description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
+        href=post.url(),
+        title=post.title,
+        author=post.author().display_name,
+        ago=h.ago(post.timestamp),
+        description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
         for post in posts if post.app and
-                             security.has_access(post, 'read', project=post.app.project)() and
-                             security.has_access(post.app.project, 'read', project=post.app.project)())
+        security.has_access(post, 'read', project=post.app.project)() and
+        security.has_access(post.app.project, 'read', project=post.app.project)())
 
     posts = BlogPosts(posts=output)
     g.resource_manager.register(posts)
     response = posts.display(posts=output)
     return response
 
+
 @macro()
 def project_blog_posts(max_number=5, sort='timestamp', summary=False, mount_point=None):
     from forgeblog import model as BM
     from allura.lib.widgets.macros import BlogPosts
     app_config_ids = []
     for conf in c.project.app_configs:
-        if conf.tool_name.lower() == 'blog' and (mount_point is None or conf.options.mount_point==mount_point):
+        if conf.tool_name.lower() == 'blog' and (mount_point is None or conf.options.mount_point == mount_point):
             app_config_ids.append(conf._id)
     posts = BM.BlogPost.query.find({
         'app_config_id': {'$in': app_config_ids},
-        'state':'published',
+        'state': 'published',
     })
     posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
     output = ((dict(
-                href=post.url(),
-                title=post.title,
-                author=post.author().display_name,
-                ago=h.ago(post.timestamp),
-                description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
+        href=post.url(),
+        title=post.title,
+        author=post.author().display_name,
+        ago=h.ago(post.timestamp),
+        description=summary and '&nbsp;' or g.markdown.cached_convert(post, 'text')))
         for post in posts if security.has_access(post, 'read', project=post.app.project)() and
-                             security.has_access(post.app.project, 'read', project=post.app.project)())
+        security.has_access(post.app.project, 'read', project=post.app.project)())
     posts = BlogPosts(posts=output)
     g.resource_manager.register(posts)
     response = posts.display(posts=output)
     return response
 
-def get_projects_for_macro(category=None, display_mode='grid', sort='last_updated',
+
+def get_projects_for_macro(
+        category=None, display_mode='grid', sort='last_updated',
         show_total=False, limit=100, labels='', award='', private=False,
         columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
         grid_view_tools='',
@@ -178,9 +189,9 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
             short=award)).first()
         if aw:
             ids = [grant.granted_to_project_id for grant in
-                M.AwardGrant.query.find(dict(
-                    granted_by_neighborhood_id=c.project.neighborhood_id,
-                    award_id=aw._id))]
+                   M.AwardGrant.query.find(dict(
+                       granted_by_neighborhood_id=c.project.neighborhood_id,
+                       award_id=aw._id))]
             if '_id' in q:
                 ids = list(set(q['_id']['$in']).intersection(ids))
             q['_id'] = {'$in': ids}
@@ -203,7 +214,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
         # Can't filter these with a mongo query directly - have to iterate
         # through and check the ACL of each project.
         for chunk in utils.chunked_find(M.Project, q, sort_key=sort_key,
-                sort_dir=sort_dir):
+                                        sort_dir=sort_dir):
             projects.extend([p for p in chunk if p.private])
         total = len(projects)
         if sort == 'random':
@@ -225,7 +236,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
             docs = list(collection.find(q, {'_id': 1}))
             if docs:
                 ids = [doc['_id'] for doc in
-                        random.sample(docs, min(limit, len(docs)))]
+                       random.sample(docs, min(limit, len(docs)))]
                 if '_id' in q:
                     ids = list(set(q['_id']['$in']).intersection(ids))
                 q['_id'] = {'$in': ids}
@@ -233,7 +244,7 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
                 random.shuffle(projects)
         else:
             projects = M.Project.query.find(q).limit(limit).sort(sort_key,
-                sort_dir).all()
+                                                                 sort_dir).all()
 
     pl = ProjectList()
     g.resource_manager.register(pl)
@@ -249,27 +260,29 @@ def get_projects_for_macro(category=None, display_mode='grid', sort='last_update
                 if h.has_access(p, 'read')():
                     total = total + 1
         response = '<p class="macro_projects_total">%s Projects</p>%s' % \
-                (total, response)
+            (total, response)
     return response
 
 
 @macro('neighborhood-wiki')
 def projects(category=None, display_mode='grid', sort='last_updated',
-        show_total=False, limit=100, labels='', award='', private=False,
-        columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
-        grid_view_tools=''):
+             show_total=False, limit=100, labels='', award='', private=False,
+             columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
+             grid_view_tools=''):
     initial_q = dict(neighborhood_id=c.project.neighborhood_id)
-    return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
-                   show_total=show_total, limit=limit, labels=labels, award=award, private=private,
-                   columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
-                   show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
-                   initial_q=initial_q)
+    return get_projects_for_macro(
+        category=category, display_mode=display_mode, sort=sort,
+        show_total=show_total, limit=limit, labels=labels, award=award, private=private,
+        columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
+        show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
+        initial_q=initial_q)
+
 
 @macro('userproject-wiki')
 def my_projects(category=None, display_mode='grid', sort='last_updated',
-        show_total=False, limit=100, labels='', award='', private=False,
-        columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
-        grid_view_tools=''):
+                show_total=False, limit=100, labels='', award='', private=False,
+                columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
+                grid_view_tools=''):
 
     myproj_user = c.project.user_project_of
     if myproj_user is None:
@@ -280,11 +293,13 @@ def my_projects(category=None, display_mode='grid', sort='last_updated',
         ids.append(p._id)
 
     initial_q = dict(_id={'$in': ids})
-    return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
-                   show_total=show_total, limit=limit, labels=labels, award=award, private=private,
-                   columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
-                   show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
-                   initial_q=initial_q)
+    return get_projects_for_macro(
+        category=category, display_mode=display_mode, sort=sort,
+        show_total=show_total, limit=limit, labels=labels, award=award, private=private,
+        columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
+        show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
+        initial_q=initial_q)
+
 
 @macro()
 def project_screenshots():
@@ -294,6 +309,7 @@ def project_screenshots():
     response = ps.display(project=c.project)
     return response
 
+
 @macro()
 def gittip_button(username):
     from allura.lib.widgets.macros import GittipButton
@@ -302,7 +318,10 @@ def gittip_button(username):
     response = button.display(username=username)
     return response
 
-# FIXME: this is SourceForge specific - need to provide a way for macros to come from other packages
+# FIXME: this is SourceForge specific - need to provide a way for macros
+# to come from other packages
+
+
 @macro()
 def download_button():
     from allura.lib.widgets.macros import DownloadButton
@@ -311,7 +330,8 @@ def download_button():
         res_mgr = g.resource_manager
     except TypeError:
         # e.g. "TypeError: No object (name: widget_context) has been registered for this thread"
-        # this is an ugly way to check to see if we're outside of a web request and avoid errors
+        # this is an ugly way to check to see if we're outside of a web request
+        # and avoid errors
         return '[[download_button]]'
     else:
         res_mgr.register(button)
@@ -341,6 +361,7 @@ def include(ref=None, **kw):
     response = sb.display(artifact=artifact, attrs=kw)
     return response
 
+
 @macro()
 def img(src=None, **kw):
     attrs = ('%s="%s"' % t for t in kw.iteritems())
@@ -351,19 +372,21 @@ def img(src=None, **kw):
     else:
         return '<img src="./attachment/%s" %s/>' % (src, ' '.join(attrs))
 
+
 @macro()
 def project_admins():
     admins = c.project.users_with_role('Admin')
     from allura.lib.widgets.macros import ProjectAdmins
     output = ((dict(
-            url=user.url(),
-            name=user.display_name))
+        url=user.url(),
+        name=user.display_name))
         for user in admins)
     users = ProjectAdmins(users=output)
     g.resource_manager.register(users)
     response = users.display(users=output)
     return response
 
+
 @macro()
 def members(limit=20):
     from allura.lib.widgets.macros import Members
@@ -371,10 +394,10 @@ def members(limit=20):
     admins = set(c.project.users_with_role('Admin'))
     members = sorted(c.project.users(), key=attrgetter('display_name'))
     output = [dict(
-            url=user.url(),
-            name=user.display_name,
-            admin=' (admin)' if user in admins else '',
-            )
+        url=user.url(),
+        name=user.display_name,
+        admin=' (admin)' if user in admins else '',
+    )
         for user in members[:limit]]
 
     over_limit = len(members) > limit
@@ -383,10 +406,12 @@ def members(limit=20):
     response = users.display(users=output, over_limit=over_limit)
     return response
 
+
 @macro()
 def embed(url=None):
     consumer = oembed.OEmbedConsumer()
-    endpoint = oembed.OEmbedEndpoint('http://www.youtube.com/oembed', ['http://*.youtube.com/*', 'https://*.youtube.com/*'])
+    endpoint = oembed.OEmbedEndpoint(
+        'http://www.youtube.com/oembed', ['http://*.youtube.com/*', 'https://*.youtube.com/*'])
     consumer.addEndpoint(endpoint)
     try:
         return jinja2.Markup('<div class="grid-20">%s</div>' % consumer.embed(url)['html'])

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/mail_util.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/mail_util.py b/Allura/allura/lib/mail_util.py
index d7d0b4b..5ec98da 100644
--- a/Allura/allura/lib/mail_util.py
+++ b/Allura/allura/lib/mail_util.py
@@ -38,14 +38,16 @@ RE_MESSAGE_ID = re.compile(r'<(?:[^>]*/)?([^>]*)>')
 config = ConfigProxy(
     common_suffix='forgemail.domain',
     return_path='forgemail.return_path')
-EMAIL_VALIDATOR=fev.Email(not_empty=True)
+EMAIL_VALIDATOR = fev.Email(not_empty=True)
+
 
 def Header(text, *more_text):
     '''Helper to make sure we encode headers properly'''
     if isinstance(text, header.Header):
         return text
     # email.header.Header handles str vs unicode differently
-    # see http://docs.python.org/library/email.header.html#email.header.Header.append
+    # see
+    # http://docs.python.org/library/email.header.html#email.header.Header.append
     if type(text) != unicode:
         raise TypeError('This must be unicode: %r' % text)
     head = header.Header(text)
@@ -55,6 +57,7 @@ def Header(text, *more_text):
         head.append(m)
     return head
 
+
 def AddrHeader(fromaddr):
     '''Accepts any of:
         Header() instance
@@ -63,9 +66,9 @@ def AddrHeader(fromaddr):
     '''
     if isinstance(fromaddr, basestring) and ' <' in fromaddr:
         name, addr = fromaddr.rsplit(' <', 1)
-        addr = '<' + addr # restore the char we just split off
+        addr = '<' + addr  # restore the char we just split off
         addrheader = Header(name, addr)
-        if str(addrheader).startswith('=?'): # encoding escape chars
+        if str(addrheader).startswith('=?'):  # encoding escape chars
             # then quoting the name is no longer necessary
             name = name.strip('"')
             addrheader = Header(name, addr)
@@ -111,6 +114,7 @@ def parse_address(addr):
             raise exc.AddressException, 'Unknown tool: ' + domain
     return userpart, project, app
 
+
 def parse_message(data):
     # Parse the email to its constituent parts
     parser = email.feedparser.FeedParser()
@@ -149,10 +153,12 @@ def parse_message(data):
             result['payload'] = result['payload'].decode(charset)
     return result
 
+
 def identify_sender(peer, email_address, headers, msg):
     from allura import model as M
     # Dumb ID -- just look for email address claimed by a particular user
-    addr = M.EmailAddress.query.get(_id=M.EmailAddress.canonical(email_address))
+    addr = M.EmailAddress.query.get(
+        _id=M.EmailAddress.canonical(email_address))
     if addr and addr.claimed_by_user_id:
         return addr.claimed_by_user()
     from_address = headers.get('From', '').strip()
@@ -163,12 +169,14 @@ def identify_sender(peer, email_address, headers, msg):
         return addr.claimed_by_user()
     return M.User.anonymous()
 
+
 def encode_email_part(content, content_type):
     try:
         return MIMEText(content.encode('ascii'), content_type, 'ascii')
     except:
         return MIMEText(content.encode('utf-8'), content_type, 'utf-8')
 
+
 def make_multipart_message(*parts):
     msg = MIMEMultipart('related')
     msg.preamble = 'This is a multi-part message in MIME format.'
@@ -178,18 +186,24 @@ def make_multipart_message(*parts):
         alt.attach(part)
     return msg
 
+
 def _parse_message_id(msgid):
-    if msgid is None: return []
-    return [ mo.group(1)
-             for mo in RE_MESSAGE_ID.finditer(msgid) ]
+    if msgid is None:
+        return []
+    return [mo.group(1)
+            for mo in RE_MESSAGE_ID.finditer(msgid)]
+
 
 def _parse_smtp_addr(addr):
     addr = str(addr)
     addrs = _parse_message_id(addr)
-    if addrs and addrs[0]: return addrs[0]
-    if '@' in addr: return addr
+    if addrs and addrs[0]:
+        return addrs[0]
+    if '@' in addr:
+        return addr
     return u'noreply@in.sf.net'
 
+
 def isvalid(addr):
     '''return True if addr is a (possibly) valid email address, false
     otherwise'''
@@ -199,13 +213,15 @@ def isvalid(addr):
     except fev.Invalid:
         return False
 
+
 class SMTPClient(object):
 
     def __init__(self):
         self._client = None
 
-    def sendmail(self, addrs, fromaddr, reply_to, subject, message_id, in_reply_to, message,
-                 sender=None, references=None, cc=None, to=None):
+    def sendmail(
+            self, addrs, fromaddr, reply_to, subject, message_id, in_reply_to, message,
+            sender=None, references=None, cc=None, to=None):
         if not addrs:
             return
         if to:
@@ -232,7 +248,7 @@ class SMTPClient(object):
             message['References'] = Header(*references)
         content = message.as_string()
         smtp_addrs = map(_parse_smtp_addr, addrs)
-        smtp_addrs = [ a for a in smtp_addrs if isvalid(a) ]
+        smtp_addrs = [a for a in smtp_addrs if isvalid(a)]
         if not smtp_addrs:
             log.warning('No valid addrs in %s, so not sending mail',
                         map(unicode, addrs))
@@ -263,7 +279,8 @@ class SMTPClient(object):
                 timeout=float(tg.config.get('smtp_timeout', 10)),
             )
         if tg.config.get('smtp_user', None):
-            smtp_client.login(tg.config['smtp_user'], tg.config['smtp_password'])
+            smtp_client.login(tg.config['smtp_user'],
+                              tg.config['smtp_password'])
         if asbool(tg.config.get('smtp_tls', False)):
             smtp_client.starttls()
         self._client = smtp_client

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/markdown_extensions.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/markdown_extensions.py b/Allura/allura/lib/markdown_extensions.py
index 8a959b9..a7af428 100644
--- a/Allura/allura/lib/markdown_extensions.py
+++ b/Allura/allura/lib/markdown_extensions.py
@@ -31,15 +31,16 @@ from allura.lib.utils import ForgeHTMLSanitizer
 
 log = logging.getLogger(__name__)
 
-PLAINTEXT_BLOCK_RE = re.compile( \
+PLAINTEXT_BLOCK_RE = re.compile(
     r'(?P<bplain>\[plain\])(?P<code>.*?)(?P<eplain>\[\/plain\])',
-    re.MULTILINE|re.DOTALL
-    )
+    re.MULTILINE | re.DOTALL
+)
 
 MACRO_PATTERN = r'\[\[([^\]\[]+)\]\]'
 
 
 class CommitMessageExtension(markdown.Extension):
+
     """Markdown extension for processing commit messages.
 
     People don't expect their commit messages to be parsed as Markdown. This
@@ -61,6 +62,7 @@ class CommitMessageExtension(markdown.Extension):
     the :class:`PatternReplacingProcessor` preprocessor.
 
     """
+
     def __init__(self, app):
         markdown.Extension.__init__(self)
         self.app = app
@@ -75,13 +77,13 @@ class CommitMessageExtension(markdown.Extension):
         # remove all inlinepattern processors except short refs and links
         md.inlinePatterns.clear()
         md.inlinePatterns["link"] = markdown.inlinepatterns.LinkPattern(
-                markdown.inlinepatterns.LINK_RE, md)
+            markdown.inlinepatterns.LINK_RE, md)
         md.inlinePatterns['short_reference'] = ForgeLinkPattern(
-                markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
+            markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
         # remove all default block processors except for paragraph
         md.parser.blockprocessors.clear()
         md.parser.blockprocessors['paragraph'] = \
-                markdown.blockprocessors.ParagraphProcessor(md.parser)
+            markdown.blockprocessors.ParagraphProcessor(md.parser)
         # wrap artifact link text in square brackets
         self.forge_link_tree_processor = ForgeLinkTreeProcessor(md)
         md.treeprocessors['links'] = self.forge_link_tree_processor
@@ -96,6 +98,7 @@ class CommitMessageExtension(markdown.Extension):
 
 
 class Pattern(object):
+
     """Base class for regex patterns used by the :class:`PatternReplacingProcessor`.
 
     Subclasses must define :attr:`pattern` (a compiled regex), and
@@ -116,6 +119,7 @@ class Pattern(object):
 
 
 class TracRef1(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         #100 (ticket 100)
@@ -128,12 +132,13 @@ class TracRef1(Pattern):
         shortlink = M.Shortlink.lookup(match.group(1))
         if shortlink and not getattr(shortlink.ref.artifact, 'deleted', False):
             return '[{ref}]({url})'.format(
-                    ref=match.group(1),
-                    url=shortlink.url)
+                ref=match.group(1),
+                url=shortlink.url)
         return match.group()
 
 
 class TracRef2(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         ticket:100
@@ -141,22 +146,23 @@ class TracRef2(Pattern):
 
     """
     pattern = re.compile(
-            Pattern.BEGIN + r'((comment:(\d+):)?(ticket:)(\d+))' + Pattern.END)
+        Pattern.BEGIN + r'((comment:(\d+):)?(ticket:)(\d+))' + Pattern.END)
 
     def repl(self, match):
         shortlink = M.Shortlink.lookup('#' + match.group(6))
         if shortlink and not getattr(shortlink.ref.artifact, 'deleted', False):
             url = shortlink.url
             if match.group(4):
-                slug = self.get_comment_slug(shortlink.ref.artifact, match.group(4))
+                slug = self.get_comment_slug(
+                    shortlink.ref.artifact, match.group(4))
                 slug = '#' + slug if slug else ''
                 url = url + slug
 
             return '{front}[{ref}]({url}){back}'.format(
-                    front=match.group(1),
-                    ref=match.group(2),
-                    url=url,
-                    back=match.group(7))
+                front=match.group(1),
+                ref=match.group(2),
+                url=url,
+                back=match.group(7))
         return match.group()
 
     def get_comment_slug(self, ticket, comment_num):
@@ -173,10 +179,11 @@ class TracRef2(Pattern):
             status={'$in': ['ok', 'pending']})).sort('timestamp')
 
         if comment_num <= comments.count():
-            return comments.all()[comment_num-1].slug
+            return comments.all()[comment_num - 1].slug
 
 
 class TracRef3(Pattern):
+
     """Replaces Trac-style short refs with links. Example patterns::
 
         source:trunk/server/file.c@123#L456 (rev 123, lineno 456)
@@ -185,7 +192,7 @@ class TracRef3(Pattern):
 
     """
     pattern = re.compile(
-            Pattern.BEGIN + r'((source:)([^@#\s]+)(@(\w+))?(#L(\d+))?)' + Pattern.END)
+        Pattern.BEGIN + r'((source:)([^@#\s]+)(@(\w+))?(#L(\d+))?)' + Pattern.END)
 
     def __init__(self, app):
         super(Pattern, self).__init__()
@@ -195,22 +202,23 @@ class TracRef3(Pattern):
         if not self.app:
             return match.group()
         file, rev, lineno = (
-                match.group(4),
-                match.group(6) or 'HEAD',
-                '#l' + match.group(8) if match.group(8) else '')
+            match.group(4),
+            match.group(6) or 'HEAD',
+            '#l' + match.group(8) if match.group(8) else '')
         url = '{app_url}{rev}/tree/{file}{lineno}'.format(
-                app_url=self.app.url,
-                rev=rev,
-                file=file,
-                lineno=lineno)
+            app_url=self.app.url,
+            rev=rev,
+            file=file,
+            lineno=lineno)
         return '{front}[{ref}]({url}){back}'.format(
-                front=match.group(1),
-                ref=match.group(2),
-                url=url,
-                back=match.group(9))
+            front=match.group(1),
+            ref=match.group(2),
+            url=url,
+            back=match.group(9))
 
 
 class PatternReplacingProcessor(markdown.preprocessors.Preprocessor):
+
     """A Markdown preprocessor that searches the source lines for patterns and
     replaces matches with alternate text.
 
@@ -238,23 +246,33 @@ class ForgeExtension(markdown.Extension):
 
     def extendMarkdown(self, md, md_globals):
         md.registerExtension(self)
-        # allow markdown within e.g. <div markdown>...</div>  More info at: https://github.com/waylan/Python-Markdown/issues/52
+        # allow markdown within e.g. <div markdown>...</div>  More info at:
+        # https://github.com/waylan/Python-Markdown/issues/52
         md.preprocessors['html_block'].markdown_in_raw = True
         md.preprocessors['fenced-code'] = FencedCodeProcessor()
-        md.preprocessors.add('plain_text_block', PlainTextPreprocessor(md), "_begin")
-        md.preprocessors.add('macro_include', ForgeMacroIncludePreprocessor(md), '_end')
-        # this has to be before the 'escape' processor, otherwise weird placeholders are inserted for escaped chars within urls, and then the autolink can't match the whole url
-        md.inlinePatterns.add('autolink_without_brackets', AutolinkPattern(r'(http(?:s?)://[a-zA-Z0-9./\-\\_%?&=+#;~:!]+)', md), '<escape')
+        md.preprocessors.add('plain_text_block',
+                             PlainTextPreprocessor(md), "_begin")
+        md.preprocessors.add(
+            'macro_include', ForgeMacroIncludePreprocessor(md), '_end')
+        # this has to be before the 'escape' processor, otherwise weird
+        # placeholders are inserted for escaped chars within urls, and then the
+        # autolink can't match the whole url
+        md.inlinePatterns.add('autolink_without_brackets', AutolinkPattern(
+            r'(http(?:s?)://[a-zA-Z0-9./\-\\_%?&=+#;~:!]+)', md), '<escape')
         # replace the link pattern with our extended version
-        md.inlinePatterns['link'] = ForgeLinkPattern(markdown.inlinepatterns.LINK_RE, md, ext=self)
-        md.inlinePatterns['short_reference'] = ForgeLinkPattern(markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
+        md.inlinePatterns['link'] = ForgeLinkPattern(
+            markdown.inlinepatterns.LINK_RE, md, ext=self)
+        md.inlinePatterns['short_reference'] = ForgeLinkPattern(
+            markdown.inlinepatterns.SHORT_REF_RE, md, ext=self)
         # macro must be processed before links
-        md.inlinePatterns.add('macro', ForgeMacroPattern(MACRO_PATTERN, md, ext=self), '<link')
+        md.inlinePatterns.add(
+            'macro', ForgeMacroPattern(MACRO_PATTERN, md, ext=self), '<link')
         self.forge_link_tree_processor = ForgeLinkTreeProcessor(md)
         md.treeprocessors['links'] = self.forge_link_tree_processor
         # Sanitize HTML
         md.postprocessors['sanitize_html'] = HTMLSanitizer()
-        # Rewrite all relative links that don't start with . to have a '../' prefix
+        # Rewrite all relative links that don't start with . to have a '../'
+        # prefix
         md.postprocessors['rewrite_relative_links'] = RelativeLinkRewriter(
             make_absolute=self._is_email)
         # Put a class around markdown content for custom css
@@ -334,6 +352,7 @@ class ForgeLinkPattern(markdown.inlinepatterns.LinkPattern):
 
 
 class PlainTextPreprocessor(markdown.preprocessors.Preprocessor):
+
     '''
     This was used earlier for [plain] tags that the Blog tool's rss importer
     created, before html2text did good escaping of all special markdown chars.
@@ -347,7 +366,8 @@ class PlainTextPreprocessor(markdown.preprocessors.Preprocessor):
             for m in res:
                 code = self._escape(m.group('code'))
                 placeholder = self.markdown.htmlStash.store(code, safe=True)
-                text = '%s%s%s'% (text[:m.start()], placeholder, text[m.end():])
+                text = '%s%s%s' % (
+                    text[:m.start()], placeholder, text[m.end():])
                 break
             else:
                 break
@@ -393,6 +413,7 @@ class ForgeMacroPattern(markdown.inlinepatterns.Pattern):
 
 
 class ForgeLinkTreeProcessor(markdown.treeprocessors.Treeprocessor):
+
     '''Wraps artifact links with []'''
 
     def __init__(self, parent):
@@ -448,7 +469,8 @@ class RelativeLinkRewriter(markdown.postprocessors.Postprocessor):
 
     def _rewrite(self, tag, attr):
         val = tag.get(attr)
-        if val is None: return
+        if val is None:
+            return
         if ' ' in val:
             # Don't urllib.quote to avoid possible double-quoting
             # just make sure no spaces
@@ -458,18 +480,22 @@ class RelativeLinkRewriter(markdown.postprocessors.Postprocessor):
             if 'sf.net' in val or 'sourceforge.net' in val:
                 return
             else:
-                tag['rel']='nofollow'
+                tag['rel'] = 'nofollow'
                 return
-        if val.startswith('/'): return
-        if val.startswith('.'): return
-        if val.startswith('mailto:'): return
-        if val.startswith('#'): return
+        if val.startswith('/'):
+            return
+        if val.startswith('.'):
+            return
+        if val.startswith('mailto:'):
+            return
+        if val.startswith('#'):
+            return
         tag[attr] = '../' + val
 
     def _rewrite_abs(self, tag, attr):
         self._rewrite(tag, attr)
         val = tag.get(attr)
-        val = urljoin(config.get('base_url', 'http://sourceforge.net/'),val)
+        val = urljoin(config.get('base_url', 'http://sourceforge.net/'), val)
         tag[attr] = val
 
 
@@ -478,7 +504,7 @@ class HTMLSanitizer(markdown.postprocessors.Postprocessor):
     def run(self, text):
         try:
             p = ForgeHTMLSanitizer('utf-8')
-        except TypeError: # $@%## pre-released versions from SOG
+        except TypeError:  # $@%## pre-released versions from SOG
             p = ForgeHTMLSanitizer('utf-8', '')
         p.feed(text.encode('utf-8'))
         return unicode(p.output(), 'utf-8')
@@ -487,7 +513,8 @@ class HTMLSanitizer(markdown.postprocessors.Postprocessor):
 class AutolinkPattern(markdown.inlinepatterns.Pattern):
 
     def __init__(self, pattern, markdown_instance=None):
-        markdown.inlinepatterns.Pattern.__init__(self, pattern, markdown_instance)
+        markdown.inlinepatterns.Pattern.__init__(
+            self, pattern, markdown_instance)
         # override the complete regex, requiring the preceding text (.*?) to end
         # with whitespace or beginning of line "\s|^"
         self.compiled_re = re.compile("^(.*?\s|^)%s(.*?)$" % pattern,
@@ -497,7 +524,8 @@ class AutolinkPattern(markdown.inlinepatterns.Pattern):
         old_link = mo.group(2)
         result = markdown.util.etree.Element('a')
         result.text = old_link
-        # since this is run before the builtin 'escape' processor, we have to do our own unescaping
+        # since this is run before the builtin 'escape' processor, we have to
+        # do our own unescaping
         for char in markdown.Markdown.ESCAPED_CHARS:
             old_link = old_link.replace('\\' + char, char)
         result.set('href', old_link)
@@ -505,6 +533,7 @@ class AutolinkPattern(markdown.inlinepatterns.Pattern):
 
 
 class ForgeMacroIncludePreprocessor(markdown.preprocessors.Preprocessor):
+
     '''Join include statements to prevent extra <br>'s inserted by nl2br extension.
 
     Converts:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/oid_helper.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/oid_helper.py b/Allura/allura/lib/oid_helper.py
index bf16dd8..8e41a1f 100644
--- a/Allura/allura/lib/oid_helper.py
+++ b/Allura/allura/lib/oid_helper.py
@@ -30,8 +30,9 @@ log = logging.getLogger(__name__)
 from openid import oidutil
 oidutil.log = log.info
 
+
 def verify_oid(oid_url, failure_redirect=None, return_to=None,
-                  **kw):
+               **kw):
     '''Step 1 of OID verification -- redirect to provider site'''
     log.info('Trying to login via %s', oid_url)
     realm = config.get('openid.realm', 'http://localhost:8080/')
@@ -43,7 +44,7 @@ def verify_oid(oid_url, failure_redirect=None, return_to=None,
         log.exception('Error in openid login')
         flash(str(ex[0]), 'error')
         redirect(failure_redirect)
-    if req is None: # pragma no cover
+    if req is None:  # pragma no cover
         flash('No openid services found for <code>%s</code>' % oid_url,
               'error')
         redirect(failure_redirect)
@@ -54,7 +55,8 @@ def verify_oid(oid_url, failure_redirect=None, return_to=None,
         session.save()
         redirect(redirect_url)
     else:
-        return dict(kw, form=req.formMarkup(realm, return_to=return_to))    
+        return dict(kw, form=req.formMarkup(realm, return_to=return_to))
+
 
 def process_oid(failure_redirect=None):
     oidconsumer = consumer.Consumer(g.oid_session(), g.oid_store)
@@ -84,7 +86,7 @@ def process_oid(failure_redirect=None):
             # way their account with you is not compromised if their
             # i-name registration expires and is bought by someone else.
             message += ("  This is an i-name, and its persistent ID is %s"
-                        % info.endpoint.canonicalID )
+                        % info.endpoint.canonicalID)
         flash(message, 'info')
     elif info.status == consumer.CANCEL:
         # cancelled
@@ -109,5 +111,6 @@ def process_oid(failure_redirect=None):
         flash(message, 'error')
         redirect(failure_redirect)
     session.save()
-    oid_obj = M.OpenId.upsert(info.identity_url, display_identifier=display_identifier)
+    oid_obj = M.OpenId.upsert(
+        info.identity_url, display_identifier=display_identifier)
     return oid_obj

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/package_path_loader.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/package_path_loader.py b/Allura/allura/lib/package_path_loader.py
index 8272acf..9e6548a 100644
--- a/Allura/allura/lib/package_path_loader.py
+++ b/Allura/allura/lib/package_path_loader.py
@@ -132,20 +132,21 @@ from allura.lib.helpers import topological_sort, iter_entry_points
 
 
 class PackagePathLoader(jinja2.BaseLoader):
+
     def __init__(self, override_entrypoint='allura.theme.override',
-                default_paths=None,
-                override_root='override',
-                ):
+                 default_paths=None,
+                 override_root='override',
+                 ):
         '''
         Set up initial values... defaults are for Allura.
         '''
         # TODO: How does one handle project-theme?
         if default_paths is None:
             default_paths = [
-                    #['project-theme', None],
-                    ['site-theme', None],
-                    ['allura', '/'],
-                ]
+                #['project-theme', None],
+                ['site-theme', None],
+                ['allura', '/'],
+            ]
 
         self.override_entrypoint = override_entrypoint
         self.default_paths = default_paths
@@ -161,9 +162,9 @@ class PackagePathLoader(jinja2.BaseLoader):
         """
         paths = self.default_paths[:]  # copy default_paths
         paths[-1:0] = [  # insert all eps just before last item, by default
-                [ep.name, pkg_resources.resource_filename(ep.module_name, "")]
-                for ep in iter_entry_points(self.override_entrypoint)
-            ]
+            [ep.name, pkg_resources.resource_filename(ep.module_name, "")]
+            for ep in iter_entry_points(self.override_entrypoint)
+        ]
         return paths
 
     def _load_rules(self):
@@ -213,7 +214,8 @@ class PackagePathLoader(jinja2.BaseLoader):
         path 'a' should come before path 'b'.
         """
         names = [p[0] for p in paths]
-        # filter rules that reference non-existent paths to prevent "loops" in the graph
+        # filter rules that reference non-existent paths to prevent "loops" in
+        # the graph
         rules = [r for r in rules if r[0] in names and r[1] in names]
         ordered_paths = topological_sort(names, rules)
         if ordered_paths is None:

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/patches.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/patches.py b/Allura/allura/lib/patches.py
index 804c947..19f91b3 100644
--- a/Allura/allura/lib/patches.py
+++ b/Allura/allura/lib/patches.py
@@ -26,6 +26,7 @@ import simplejson
 
 from allura.lib import helpers as h
 
+
 def apply():
     old_lookup_template_engine = tg.decorators.Decoration.lookup_template_engine
 
@@ -67,7 +68,7 @@ def apply():
         '''Monkey-patched to use 301 redirects for SEO'''
         response_type = getattr(request, 'response_type', None)
         if (request.method == 'GET' and request.path.endswith('/')
-                and not response_type and len(request.params)==0):
+                and not response_type and len(request.params) == 0):
             raise webob.exc.HTTPMovedPermanently(location=request.url[:-1])
         return func(*args, **kwargs)
 
@@ -77,18 +78,17 @@ def apply():
         '''Monkey-patched to use 301 redirects for SEO'''
         response_type = getattr(request, 'response_type', None)
         if (request.method == 'GET' and not(request.path.endswith('/'))
-                and not response_type and len(request.params)==0):
-            raise webob.exc.HTTPMovedPermanently(location=request.url+'/')
+                and not response_type and len(request.params) == 0):
+            raise webob.exc.HTTPMovedPermanently(location=request.url + '/')
         return func(*args, **kwargs)
 
-
     # http://blog.watchfire.com/wfblog/2011/10/json-based-xss-exploitation.html
     # change < to its unicode escape when rendering JSON out of turbogears
     # This is to avoid IE9 and earlier, which don't know the json content type
     # and may attempt to render JSON data as HTML if the URL ends in .html
-    
     original_tg_jsonify_GenericJSON_encode = tg.jsonify.GenericJSON.encode
-    escape_pattern_with_lt = re.compile(simplejson.encoder.ESCAPE.pattern.rstrip(']') + '<' + ']')
+    escape_pattern_with_lt = re.compile(
+        simplejson.encoder.ESCAPE.pattern.rstrip(']') + '<' + ']')
 
     @h.monkeypatch(tg.jsonify.GenericJSON)
     def encode(self, o):
@@ -96,8 +96,8 @@ def apply():
         # encode_basestring_ascii() and encode_basestring_ascii may likely be c-compiled
         # and thus not monkeypatchable
         with h.push_config(self, ensure_ascii=False), \
-             h.push_config(simplejson.encoder, ESCAPE=escape_pattern_with_lt), \
-             mock.patch.dict(simplejson.encoder.ESCAPE_DCT, {'<': r'\u003C'}):
+                h.push_config(simplejson.encoder, ESCAPE=escape_pattern_with_lt), \
+                mock.patch.dict(simplejson.encoder.ESCAPE_DCT, {'<': r'\u003C'}):
             return original_tg_jsonify_GenericJSON_encode(self, o)
 
 
@@ -106,11 +106,13 @@ def apply():
 # over and over
 old_controller_call = tg.controllers.DecoratedController._call
 
+
 def newrelic():
     @h.monkeypatch(tg.controllers.DecoratedController,
                    tg.controllers.decoratedcontroller.DecoratedController)
     def _call(self, controller, *args, **kwargs):
         '''Set NewRelic transaction name to actual controller name'''
         import newrelic.agent
-        newrelic.agent.set_transaction_name(newrelic.agent.callable_name(controller))
+        newrelic.agent.set_transaction_name(
+            newrelic.agent.callable_name(controller))
         return old_controller_call(self, controller, *args, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/c93733ac/Allura/allura/lib/plugin.py
----------------------------------------------------------------------
diff --git a/Allura/allura/lib/plugin.py b/Allura/allura/lib/plugin.py
index e809fed..809d895 100644
--- a/Allura/allura/lib/plugin.py
+++ b/Allura/allura/lib/plugin.py
@@ -54,7 +54,9 @@ from paste.deploy.converters import asbool
 
 log = logging.getLogger(__name__)
 
+
 class AuthenticationProvider(object):
+
     '''
     An interface to provide authentication services for Allura.
 
@@ -115,7 +117,8 @@ class AuthenticationProvider(object):
 
     def login(self, user=None):
         try:
-            if user is None: user = self._login()
+            if user is None:
+                user = self._login()
             self.session['userid'] = user._id
             self.session.save()
             g.zarkov_event('login', user=user)
@@ -210,7 +213,9 @@ class AuthenticationProvider(object):
         '''
         raise NotImplementedError, 'user_registration_date'
 
+
 class LocalAuthenticationProvider(AuthenticationProvider):
+
     '''
     Stores user passwords on the User model, in mongo.  Uses per-user salt and
     SHA-256 encryption.
@@ -232,11 +237,14 @@ class LocalAuthenticationProvider(AuthenticationProvider):
         return user
 
     def _validate_password(self, user, password):
-        if user is None: return False
-        if not user.password: return False
-        salt = str(user.password[6:6+user.SALT_LEN])
+        if user is None:
+            return False
+        if not user.password:
+            return False
+        salt = str(user.password[6:6 + user.SALT_LEN])
         check = self._encode_password(password, salt)
-        if check != user.password: return False
+        if check != user.password:
+            return False
         return True
 
     def by_username(self, username):
@@ -273,7 +281,9 @@ class LocalAuthenticationProvider(AuthenticationProvider):
             return user._id.generation_time
         return datetime.utcnow()
 
+
 class LdapAuthenticationProvider(AuthenticationProvider):
+
     def register_user(self, user_doc):
         from allura import model as M
         password = user_doc['password'].encode('utf-8')
@@ -289,7 +299,7 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             ldif_u = modlist.addModlist(dict(
                 uid=uname,
                 userPassword=password,
-                objectClass=['account', 'posixAccount' ],
+                objectClass=['account', 'posixAccount'],
                 cn=display_name,
                 uidNumber=uid,
                 gidNumber='10001',
@@ -307,7 +317,8 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             if asbool(config.get('auth.ldap.use_schroot', True)):
                 argv = ('schroot -d / -c %s -u root /ldap-userconfig.py init %s' % (
                     config['auth.ldap.schroot_name'], user_doc['username'])).split()
-                p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+                p = subprocess.Popen(
+                    argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                 rc = p.wait()
                 if rc != 0:
                     log.error('Error creating home directory for %s',
@@ -321,8 +332,9 @@ class LdapAuthenticationProvider(AuthenticationProvider):
                 raise NotImplemented, 'SSH keys are not supported'
 
             argv = ('schroot -d / -c %s -u root /ldap-userconfig.py upload %s' % (
-                config['auth.ldap.schroot_name'], username)).split() + [ pubkey ]
-            p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+                config['auth.ldap.schroot_name'], username)).split() + [pubkey]
+            p = subprocess.Popen(
+                argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
             rc = p.wait()
             if rc != 0:
                 errmsg = p.stdout.read()
@@ -339,15 +351,18 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             dn = 'uid=%s,%s' % (user.username, config['auth.ldap.suffix'])
             con = ldap.initialize(config['auth.ldap.server'])
             con.bind_s(dn, old_password.encode('utf-8'))
-            con.modify_s(dn, [(ldap.MOD_REPLACE, 'userPassword', new_password.encode('utf-8'))])
+            con.modify_s(
+                dn, [(ldap.MOD_REPLACE, 'userPassword', new_password.encode('utf-8'))])
             con.unbind_s()
         except ldap.INVALID_CREDENTIALS:
             raise exc.HTTPUnauthorized()
 
     def _login(self):
         from allura import model as M
-        user = M.User.query.get(username=self.request.params['username'], disabled=False)
-        if user is None: raise exc.HTTPUnauthorized()
+        user = M.User.query.get(
+            username=self.request.params['username'], disabled=False)
+        if user is None:
+            raise exc.HTTPUnauthorized()
         try:
             dn = 'uid=%s,%s' % (user.username, config['auth.ldap.suffix'])
             con = ldap.initialize(config['auth.ldap.server'])
@@ -369,7 +384,9 @@ class LdapAuthenticationProvider(AuthenticationProvider):
             return user._id.generation_time
         return datetime.utcnow()
 
+
 class ProjectRegistrationProvider(object):
+
     '''
     Project registration services for Allura.  This is a full implementation
     and the default.  Extend this class with your own if you need to add more
@@ -392,7 +409,8 @@ class ProjectRegistrationProvider(object):
     def __init__(self):
         from allura.lib.widgets import forms
         self.add_project_widget = forms.NeighborhoodAddProjectForm
-        self.shortname_validator = forms.NeighborhoodProjectShortNameValidator()
+        self.shortname_validator = forms.NeighborhoodProjectShortNameValidator(
+        )
 
     @classmethod
     def get(cls):
@@ -423,7 +441,8 @@ class ProjectRegistrationProvider(object):
         rate_limits = json.loads(config.get('project.rate_limits', '{}'))
         for rate, count in rate_limits.items():
             user_age = now - user._id.generation_time
-            user_age = (user_age.microseconds + (user_age.seconds + user_age.days * 24 * 3600) * 10**6) / 10**6
+            user_age = (user_age.microseconds +
+                        (user_age.seconds + user_age.days * 24 * 3600) * 10 ** 6) / 10 ** 6
             if user_age < int(rate) and project_count >= count:
                 raise forge_exc.ProjectRatelimitError()
 
@@ -432,14 +451,15 @@ class ProjectRegistrationProvider(object):
         shortname = '--init--'
         name = 'Home Project for %s' % neighborhood.name
         p = M.Project(neighborhood_id=neighborhood._id,
-                    shortname=shortname,
-                    name=name,
-                    short_description='',
-                    description=('You can edit this description in the admin page'),
-                    homepage_title = '# ' + name,
-                    last_updated = datetime.utcnow(),
-                    is_nbhd_project=True,
-                    is_root=True)
+                      shortname=shortname,
+                      name=name,
+                      short_description='',
+                      description=(
+                          'You can edit this description in the admin page'),
+                      homepage_title = '# ' + name,
+                      last_updated = datetime.utcnow(),
+                      is_nbhd_project=True,
+                      is_root=True)
         try:
             p.configure_project(
                 users=users,
@@ -461,7 +481,8 @@ class ProjectRegistrationProvider(object):
         '''Register a new project in the neighborhood.  The given user will
         become the project's superuser.
         '''
-        self.validate_project(neighborhood, shortname, project_name, user, user_project, private_project)
+        self.validate_project(neighborhood, shortname,
+                              project_name, user, user_project, private_project)
         return self._create_project(neighborhood, shortname, project_name, user, user_project, private_project, apps)
 
     def validate_project(self, neighborhood, shortname, project_name, user, user_project, private_project):
@@ -472,16 +493,18 @@ class ProjectRegistrationProvider(object):
 
         # Check for private project rights
         if neighborhood.features['private_projects'] == False and private_project:
-            raise ValueError("You can't create private projects for %s neighborhood" % neighborhood.name)
+            raise ValueError(
+                "You can't create private projects for %s neighborhood" %
+                neighborhood.name)
 
         # Check for project limit creation
         nb_max_projects = neighborhood.get_max_projects()
         if nb_max_projects is not None:
             count = M.Project.query.find(dict(
-                    neighborhood_id=neighborhood._id,
-                    deleted=False,
-                    is_nbhd_project=False,
-                    )).count()
+                neighborhood_id=neighborhood._id,
+                deleted=False,
+                is_nbhd_project=False,
+            )).count()
             if count >= nb_max_projects:
                 log.exception('Error registering project %s' % project_name)
                 raise forge_exc.ProjectOverlimitError()
@@ -492,11 +515,14 @@ class ProjectRegistrationProvider(object):
             check_shortname = shortname.replace('u/', '', 1)
         else:
             check_shortname = shortname
-        self.shortname_validator.to_python(check_shortname, neighborhood=neighborhood)
+        self.shortname_validator.to_python(
+            check_shortname, neighborhood=neighborhood)
 
-        p = M.Project.query.get(shortname=shortname, neighborhood_id=neighborhood._id)
+        p = M.Project.query.get(
+            shortname=shortname, neighborhood_id=neighborhood._id)
         if p:
-            raise forge_exc.ProjectConflict('%s already exists in nbhd %s' % (shortname, neighborhood._id))
+            raise forge_exc.ProjectConflict(
+                '%s already exists in nbhd %s' % (shortname, neighborhood._id))
 
     def _create_project(self, neighborhood, shortname, project_name, user, user_project, private_project, apps):
         '''
@@ -507,18 +533,20 @@ class ProjectRegistrationProvider(object):
 
         project_template = neighborhood.get_project_template()
         p = M.Project(neighborhood_id=neighborhood._id,
-                    shortname=shortname,
-                    name=project_name,
-                    short_description='',
-                    description=('You can edit this description in the admin page'),
-                    homepage_title=shortname,
-                    last_updated = datetime.utcnow(),
-                    is_nbhd_project=False,
-                    is_root=True)
+                      shortname=shortname,
+                      name=project_name,
+                      short_description='',
+                      description=(
+                          'You can edit this description in the admin page'),
+                      homepage_title=shortname,
+                      last_updated = datetime.utcnow(),
+                      is_nbhd_project=False,
+                      is_root=True)
         p.configure_project(
             users=[user],
             is_user_project=user_project,
-            is_private_project=private_project or project_template.get('private', False),
+            is_private_project=private_project or project_template.get(
+                'private', False),
             apps=apps or [] if 'tools' in project_template else None)
 
         # Setup defaults from neighborhood project template if applicable
@@ -527,22 +555,25 @@ class ProjectRegistrationProvider(object):
             for obj in project_template['groups']:
                 name = obj.get('name')
                 permissions = set(obj.get('permissions', [])) & \
-                              set(p.permissions)
+                    set(p.permissions)
                 usernames = obj.get('usernames', [])
                 # Must provide a group name
-                if not name: continue
+                if not name:
+                    continue
                 # If the group already exists, we'll add users to it,
                 # but we won't change permissions on the group
                 group = M.ProjectRole.by_name(name, project=p)
                 if not group:
                     # If creating a new group, *must* specify permissions
-                    if not permissions: continue
+                    if not permissions:
+                        continue
                     group = M.ProjectRole(project_id=p._id, name=name)
                     p.acl += [M.ACE.allow(group._id, perm)
-                            for perm in permissions]
+                              for perm in permissions]
                 for username in usernames:
                     guser = M.User.by_username(username)
-                    if not (guser and guser._id): continue
+                    if not (guser and guser._id):
+                        continue
                     pr = M.ProjectRole.by_user(guser, project=p, upsert=True)
                     if group._id not in pr.roles:
                         pr.roles.append(group._id)
@@ -553,19 +584,20 @@ class ProjectRegistrationProvider(object):
                 for k, v in tool_options.iteritems():
                     if isinstance(v, basestring):
                         tool_options[k] = \
-                                string.Template(v).safe_substitute(
-                                    p.__dict__.get('root_project', {}))
+                            string.Template(v).safe_substitute(
+                                p.__dict__.get('root_project', {}))
                 if p.app_instance(tool) is None:
                     app = p.install_app(tool,
-                        mount_label=tool_config['label'],
-                        mount_point=tool_config['mount_point'],
-                        ordinal=i + offset,
-                    **tool_options)
+                                        mount_label=tool_config['label'],
+                                        mount_point=tool_config['mount_point'],
+                                        ordinal=i + offset,
+                                        **tool_options)
                     if tool == 'wiki':
                         from forgewiki import model as WM
                         text = tool_config.get('home_text',
-                            '[[members limit=20]]\n[[download_button]]')
-                        WM.Page.query.get(app_config_id=app.config._id).text = text
+                                               '[[members limit=20]]\n[[download_button]]')
+                        WM.Page.query.get(
+                            app_config_id=app.config._id).text = text
 
         if 'tool_order' in project_template:
             for i, tool in enumerate(project_template['tool_order']):
@@ -576,9 +608,11 @@ class ProjectRegistrationProvider(object):
             for trove_type in project_template['trove_cats'].keys():
                 troves = getattr(p, 'trove_%s' % trove_type)
                 for trove_id in project_template['trove_cats'][trove_type]:
-                    troves.append(M.TroveCategory.query.get(trove_cat_id=trove_id)._id)
+                    troves.append(
+                        M.TroveCategory.query.get(trove_cat_id=trove_id)._id)
         if 'icon' in project_template:
-            icon_file = StringIO(urlopen(project_template['icon']['url']).read())
+            icon_file = StringIO(
+                urlopen(project_template['icon']['url']).read())
             M.ProjectFile.save_image(
                 project_template['icon']['filename'], icon_file,
                 square=True, thumbnail_size=(48, 48),
@@ -592,14 +626,15 @@ class ProjectRegistrationProvider(object):
             home_app = p.app_instance('wiki')
             home_page = WM.Page.query.get(app_config_id=home_app.config._id)
             home_page.text = ("This is the personal project of %s."
-            " This project is created automatically during user registration"
-            " as an easy place to store personal data that doesn't need its own"
-            " project such as cloned repositories.") % user.display_name
+                              " This project is created automatically during user registration"
+                              " as an easy place to store personal data that doesn't need its own"
+                              " project such as cloned repositories.") % user.display_name
 
         # clear the RoleCache for the user so this project will
         # be picked up by user.my_projects()
         g.credentials.clear_user(user._id, None)  # unnamed roles for this user
-        g.credentials.clear_user(user._id, p._id)  # named roles for this project + user
+        # named roles for this project + user
+        g.credentials.clear_user(user._id, p._id)
         with h.push_config(c, project=p, user=user):
             ThreadLocalORMSession.flush_all()
             # have to add user to context, since this may occur inside auth code
@@ -611,13 +646,14 @@ class ProjectRegistrationProvider(object):
         from allura import model as M
         assert h.re_project_name.match(name), 'Invalid subproject shortname'
         shortname = project.shortname + '/' + name
-        ordinal = int(project.ordered_mounts(include_hidden=True)[-1]['ordinal']) + 1
+        ordinal = int(project.ordered_mounts(include_hidden=True)
+                      [-1]['ordinal']) + 1
         sp = M.Project(
             parent_id=project._id,
             neighborhood_id=project.neighborhood_id,
             shortname=shortname,
             name=project_name or name,
-            last_updated = datetime.utcnow(),
+            last_updated=datetime.utcnow(),
             is_root=False,
             ordinal=ordinal,
         )
@@ -644,7 +680,9 @@ class ProjectRegistrationProvider(object):
            It should be overridden for your specific envirnoment'''
         return None
 
+
 class ThemeProvider(object):
+
     '''
     Theme information for Allura.  This is a full implementation
     and the default.  Extend this class with your own if you need to add more
@@ -881,7 +919,7 @@ class ThemeProvider(object):
             return None
         cookie = request.cookies.get('site-notification', '').split('-')
         if len(cookie) == 3 and cookie[0] == str(note._id):
-            views = asint(cookie[1])+1
+            views = asint(cookie[1]) + 1
             closed = asbool(cookie[2])
         else:
             views = 1
@@ -889,15 +927,18 @@ class ThemeProvider(object):
         if closed or note.impressions > 0 and views > note.impressions:
             return None
         response.set_cookie(
-                'site-notification',
-                '-'.join(map(str, [note._id, views, closed])),
-                max_age=timedelta(days=365))
+            'site-notification',
+            '-'.join(map(str, [note._id, views, closed])),
+            max_age=timedelta(days=365))
         return note
 
+
 class LocalProjectRegistrationProvider(ProjectRegistrationProvider):
     pass
 
+
 class UserPreferencesProvider(object):
+
     '''
     An interface for user preferences, like display_name and email_address
 
@@ -937,7 +978,9 @@ class UserPreferencesProvider(object):
         '''
         raise NotImplementedError, 'find_by_display_name'
 
+
 class LocalUserPreferencesProvider(UserPreferencesProvider):
+
     '''
     The default UserPreferencesProvider, storing preferences on the User object
     in mongo.
@@ -959,11 +1002,12 @@ class LocalUserPreferencesProvider(UserPreferencesProvider):
         from allura import model as M
         name_regex = re.compile('(?i)%s' % re.escape(name))
         users = M.User.query.find(dict(
-                display_name=name_regex)).sort('username').all()
+            display_name=name_regex)).sort('username').all()
         return users
 
 
 class AdminExtension(object):
+
     """
     A base class for extending the admin areas in Allura.
 
@@ -994,7 +1038,9 @@ class AdminExtension(object):
         """
         pass
 
+
 class ImportIdConverter(object):
+
     '''
     An interface to convert to and from import_id values for indexing,
     searching, or displaying.
@@ -1021,7 +1067,7 @@ class ImportIdConverter(object):
 
     def expand(self, source_id, app_instance):
         import_id = {
-                'source_id': source_id,
-            }
+            'source_id': source_id,
+        }
         import_id.update(app_instance.config.options.get('import_id', {}))
         return import_id