You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by he...@apache.org on 2015/05/29 22:40:30 UTC
[08/45] allura git commit: [#7878] Used 2to3 to see what issues would
come up
http://git-wip-us.apache.org/repos/asf/allura/blob/d52f8e2a/tests/test_globals.py
----------------------------------------------------------------------
diff --git a/tests/test_globals.py b/tests/test_globals.py
new file mode 100644
index 0000000..5a5f4b4
--- /dev/null
+++ b/tests/test_globals.py
@@ -0,0 +1,858 @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import re
+import os
+import allura
+import unittest
+import hashlib
+import datetime as dt
+from mock import patch, Mock
+
+from bson import ObjectId
+from nose.tools import with_setup, assert_equal, assert_in, assert_not_in
+from pylons import tmpl_context as c, app_globals as g
+import tg
+
+from ming.orm import ThreadLocalORMSession
+from alluratest.controller import (
+ setup_basic_test,
+ setup_global_objects,
+ setup_unit_test,
+ setup_functional_test,
+ setup_trove_categories,
+)
+
+from allura import model as M
+from allura.lib import helpers as h
+from allura.lib.app_globals import ForgeMarkdown, NeighborhoodCache
+from allura.tests import decorators as td
+
+from forgewiki import model as WM
+from forgeblog import model as BM
+
+
+def setUp():
+ """Method called by nose once before running the package. Some functions need it run again to reset data"""
+ setup_basic_test()
+ setup_unit_test()
+ setup_with_tools()
+
+
+def tearDown():
+ setUp()
+
+
+@td.with_wiki
+def setup_with_tools():
+ setup_global_objects()
+
+
+@td.with_wiki
+def test_app_globals():
+ with h.push_context('test', 'wiki', neighborhood='Projects'):
+ assert g.app_static(
+ 'css/wiki.css') == '/nf/_static_/wiki/css/wiki.css', g.app_static('css/wiki.css')
+ assert g.url(
+ '/foo', a='foo bar') == 'http://localhost/foo?a=foo+bar', g.url('/foo', a='foo bar')
+ assert g.url('/foo') == 'http://localhost/foo', g.url('/foo')
+
+
+@with_setup(setUp)
+def test_macro_projects():
+ file_name = 'neo-icon-set-454545-256x350.png'
+ file_path = os.path.join(
+ allura.__path__[0], 'nf', 'allura', 'images', file_name)
+
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ c.project = p_test
+ icon_file = open(file_path)
+ M.ProjectFile.save_image(
+ file_name, icon_file, content_type='image/png',
+ square=True, thumbnail_size=(48, 48),
+ thumbnail_meta=dict(project_id=c.project._id, category='icon'))
+ icon_file.close()
+ p_test2 = M.Project.query.get(
+ shortname='test2', neighborhood_id=p_nbhd._id)
+ c.project = p_test2
+ icon_file = open(file_path)
+ M.ProjectFile.save_image(
+ file_name, icon_file, content_type='image/png',
+ square=True, thumbnail_size=(48, 48),
+ thumbnail_meta=dict(project_id=c.project._id, category='icon'))
+ icon_file.close()
+ p_sub1 = M.Project.query.get(
+ shortname='test/sub1', neighborhood_id=p_nbhd._id)
+ c.project = p_sub1
+ icon_file = open(file_path)
+ M.ProjectFile.save_image(
+ file_name, icon_file, content_type='image/png',
+ square=True, thumbnail_size=(48, 48),
+ thumbnail_meta=dict(project_id=c.project._id, category='icon'))
+ icon_file.close()
+ p_test.labels = ['test', 'root']
+ p_sub1.labels = ['test', 'sub1']
+ # Make one project private
+ p_test.private = False
+ p_sub1.private = False
+ p_test2.private = True
+
+ ThreadLocalORMSession.flush_all()
+
+ with h.push_config(c,
+ project=p_nbhd.neighborhood_project,
+ user=M.User.by_username('test-admin')):
+ r = g.markdown_wiki.convert('[[projects]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects labels=root]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' not in r, r
+ r = g.markdown_wiki.convert('[[projects labels=sub1]]')
+ assert 'alt="Test Project Logo"' not in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects labels=test]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects labels=test,root]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' not in r, r
+ r = g.markdown_wiki.convert('[[projects labels=test,sub1]]')
+ assert 'alt="Test Project Logo"' not in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects labels=root|sub1]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects labels=test,root|root,sub1]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' not in r, r
+ r = g.markdown_wiki.convert('[[projects labels=test,root|test,sub1]]')
+ assert 'alt="Test Project Logo"' in r, r
+ assert 'alt="A Subproject Logo"' in r, r
+ r = g.markdown_wiki.convert('[[projects show_total=True sort=random]]')
+ assert '<p class="macro_projects_total">3 Projects' in r, r
+ r = g.markdown_wiki.convert(
+ '[[projects show_total=True private=True sort=random]]')
+ assert '<p class="macro_projects_total">1 Projects' in r, r
+ assert 'alt="Test 2 Logo"' in r, r
+ assert 'alt="Test Project Logo"' not in r, r
+ assert 'alt="A Subproject Logo"' not in r, r
+
+ r = g.markdown_wiki.convert('[[projects show_proj_icon=True]]')
+ assert 'alt="Test Project Logo"' in r
+ r = g.markdown_wiki.convert('[[projects show_proj_icon=False]]')
+ assert 'alt="Test Project Logo"' not in r
+
+
+def test_macro_gittip_button():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ with h.push_config(c, project=p_test):
+ r = g.markdown_wiki.convert('[[gittip_button username=test]]')
+ assert_equal(
+ r, '<div class="markdown_content"><p><iframe height="22pt" src="https://www.gittip.com/test/widget.html" style="border: 0; margin: 0; padding: 0;" width="48pt"></iframe>\n</p></div>')
+
+
+def test_macro_neighborhood_feeds():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ with h.push_context('--init--', 'wiki', neighborhood='Projects'):
+ r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]')
+ assert 'Home modified by' in r, r
+ orig_len = len(r)
+ # Make project private & verify we don't see its new feed items
+ anon = M.User.anonymous()
+ p_test.acl.insert(0, M.ACE.deny(
+ M.ProjectRole.anonymous(p_test)._id, 'read'))
+ ThreadLocalORMSession.flush_all()
+ pg = WM.Page.query.get(title='Home', app_config_id=c.app.config._id)
+ pg.text = 'Change'
+ with h.push_config(c, user=M.User.by_username('test-admin')):
+ pg.commit()
+ r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]')
+ new_len = len(r)
+ assert new_len == orig_len
+ p = BM.BlogPost(title='test me',
+ neighborhood_id=p_test.neighborhood_id)
+ p.text = 'test content'
+ p.state = 'published'
+ p.make_slug()
+ with h.push_config(c, user=M.User.by_username('test-admin')):
+ p.commit()
+ ThreadLocalORMSession.flush_all()
+ with h.push_config(c, user=anon):
+ r = g.markdown_wiki.convert('[[neighborhood_blog_posts]]')
+ assert 'test content' in r
+
+
+@with_setup(setUp)
+def test_macro_members():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ p_test.add_user(M.User.by_username('test-user'), ['Developer'])
+ p_test.add_user(M.User.by_username('test-user-0'), ['Member'])
+ ThreadLocalORMSession.flush_all()
+ r = g.markdown_wiki.convert('[[members limit=2]]')
+ assert_equal(r, '<div class="markdown_content"><h6>Project Members:</h6>\n'
+ '<ul class="md-users-list">\n'
+ '<li><a href="/u/test-admin">Test Admin</a> (admin)</li><li><a href="/u/test-user">Test User</a></li>\n'
+ '<li class="md-users-list-more"><a href="/p/test/_members">All Members</a></li>\n'
+ '</ul>\n'
+ '</div>')
+
+
+@with_setup(setUp)
+def test_macro_members_escaping():
+ user = M.User.by_username('test-admin')
+ user.display_name = 'Test Admin <script>'
+ r = g.markdown_wiki.convert('[[members]]')
+ assert_equal(r, '<div class="markdown_content"><h6>Project Members:</h6>\n'
+ '<ul class="md-users-list">\n'
+ '<li><a href="/u/test-admin">Test Admin <script></a> (admin)</li>\n'
+ '</ul>\n</div>')
+
+
+@with_setup(setUp)
+def test_macro_project_admins():
+ user = M.User.by_username('test-admin')
+ user.display_name = 'Test Ådmin <script>'
+ with h.push_context('test', neighborhood='Projects'):
+ r = g.markdown_wiki.convert('[[project_admins]]')
+ assert_equal(
+ r, '<div class="markdown_content"><h6>Project Admins:</h6>\n<ul class="md-users-list">\n<li><a href="/u/test-admin">Test \xc5dmin <script></a></li>\n</ul>\n</div>')
+
+
+@with_setup(setUp)
+def test_macro_project_admins_one_br():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ p_test.add_user(M.User.by_username('test-user'), ['Admin'])
+ ThreadLocalORMSession.flush_all()
+ with h.push_config(c, project=p_test):
+ r = g.markdown_wiki.convert('[[project_admins]]\n[[download_button]]')
+
+ assert not '</a><br /><br /><a href=' in r, r
+ assert '</a></li><li><a href=' in r, r
+
+
+@td.with_wiki
+def test_macro_include_no_extra_br():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ wiki = p_test.app_instance('wiki')
+ with h.push_context(p_test._id, app_config_id=wiki.config._id):
+ p = WM.Page.upsert(title='Include_1')
+ p.text = 'included page 1'
+ p.commit()
+ p = WM.Page.upsert(title='Include_2')
+ p.text = 'included page 2'
+ p.commit()
+ p = WM.Page.upsert(title='Include_3')
+ p.text = 'included page 3'
+ p.commit()
+ ThreadLocalORMSession.flush_all()
+ md = '[[include ref=Include_1]]\n[[include ref=Include_2]]\n[[include ref=Include_3]]'
+ html = g.markdown_wiki.convert(md)
+
+ expected_html = '''
+<div class="markdown_content">
+<p>
+<div><div class="markdown_content"><p>included page 1</p></div></div>
+<div><div class="markdown_content"><p>included page 2</p></div></div>
+<div><div class="markdown_content"><p>included page 3</p></div></div>
+</p>
+<p></p>
+</div>
+'''.strip().replace('\n', '')
+ assert html.strip().replace('\n', '') == expected_html, html
+
+@with_setup(setUp, tearDown)
+@td.with_wiki
+@td.with_tool('test', 'Wiki', 'wiki2')
+def test_macro_include_permissions():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ p_test = M.Project.query.get(shortname='test', neighborhood_id=p_nbhd._id)
+ wiki = p_test.app_instance('wiki')
+ wiki2 = p_test.app_instance('wiki2')
+ with h.push_context(p_test._id, app_config_id=wiki.config._id):
+ p = WM.Page.upsert(title='CanRead')
+ p.text = 'Can see this!'
+ p.commit()
+ ThreadLocalORMSession.flush_all()
+
+ with h.push_context(p_test._id, app_config_id=wiki2.config._id):
+ role = M.ProjectRole.by_name('*anonymous')._id
+ read_perm = M.ACE.allow(role, 'read')
+ acl = c.app.config.acl
+ if read_perm in acl:
+ acl.remove(read_perm)
+ p = WM.Page.upsert(title='CanNotRead')
+ p.text = 'Can not see this!'
+ p.commit()
+ ThreadLocalORMSession.flush_all()
+
+ with h.push_context(p_test._id, app_config_id=wiki.config._id):
+ c.user = M.User.anonymous()
+ md = '[[include ref=CanRead]]\n[[include ref=wiki2:CanNotRead]]'
+ html = g.markdown_wiki.convert(md)
+ assert_in('Can see this!', html)
+ assert_not_in('Can not see this!', html)
+ assert_in("[[include: you don't have a read permission for wiki2:CanNotRead]]", html)
+
+
+@patch('oembed.OEmbedEndpoint.fetch')
+def test_macro_embed(oembed_fetch):
+ oembed_fetch.return_value = {
+ "html": '<iframe width="480" height="270" src="http://www.youtube.com/embed/kOLpSPEA72U?feature=oembed" frameborder="0" allowfullscreen></iframe>)',
+ "title": "Nature's 3D Printer: MIND BLOWING Cocoon in Rainforest - Smarter Every Day 94",
+ }
+ r = g.markdown_wiki.convert(
+ '[[embed url=http://www.youtube.com/watch?v=kOLpSPEA72U]]')
+ assert_in('<div class="grid-20"><iframe height="270" src="https://www.youtube.com/embed/kOLpSPEA72U?feature=oembed" width="480"></iframe>\n</div>',
+ r)
+
+
+def test_macro_embed_notsupported():
+ r = g.markdown_wiki.convert('[[embed url=http://vimeo.com/46163090]]')
+ assert_equal(
+ r, '<div class="markdown_content"><p>[[embed url=http://vimeo.com/46163090]]</p></div>')
+
+
+def test_markdown_toc():
+ with h.push_context('test', neighborhood='Projects'):
+ r = g.markdown_wiki.convert("""[TOC]
+
+# Header 1
+
+## Header 2""")
+ assert '''<ul>
+<li><a href="#header-1">Header 1</a><ul>
+<li><a href="#header-2">Header 2</a></li>
+</ul>
+</li>
+</ul>''' in r, r
+
+
+@td.with_wiki
+def test_wiki_artifact_links():
+ text = g.markdown.convert('See [18:13:49]')
+ assert 'See <span>[18:13:49]</span>' in text, text
+ with h.push_context('test', 'wiki', neighborhood='Projects'):
+ text = g.markdown.convert('Read [here](Home) about our project')
+ assert '<a class="" href="/p/test/wiki/Home">here</a>' in text, text
+ text = g.markdown.convert('[Go home](test:wiki:Home)')
+ assert '<a class="" href="/p/test/wiki/Home">Go home</a>' in text, text
+ text = g.markdown.convert('See [test:wiki:Home]')
+ assert '<a class="alink" href="/p/test/wiki/Home">[test:wiki:Home]</a>' in text, text
+
+
+def test_markdown_links():
+ with patch.dict(tg.config, {'nofollow_exempt_domains': 'foobar.net'}):
+ text = g.markdown.convert(
+ 'Read [here](http://foobar.net/) about our project')
+ assert_in('class="" href="http://foobar.net">here</a> about', text)
+
+ text = g.markdown.convert(
+ 'Read [here](http://foobar.net/) about our project')
+ assert_in('class="" href="http://foobar.net" rel="nofollow">here</a> about', text)
+
+ text = g.markdown.convert('Read [here](/p/foobar/blah) about our project')
+ assert_in('class="" href="/p/foobar/blah">here</a> about', text)
+
+ text = g.markdown.convert('Read <http://foobar.net/> about our project')
+ assert_in(
+ 'href="http://foobar.net" rel="nofollow">http://foobar.net/</a> about', text)
+
+
+def test_markdown_and_html():
+ with h.push_context('test', neighborhood='Projects'):
+ r = g.markdown_wiki.convert('<div style="float:left">blah</div>')
+ assert '<div style="float: left;">blah</div>' in r, r
+
+
+def test_markdown_within_html():
+ with h.push_context('test', neighborhood='Projects'):
+ r = g.markdown_wiki.convert(
+ '<div style="float:left" markdown>**blah**</div>')
+ assert '''<div style="float: left;">
+<p><strong>blah</strong></p>
+</div>''' in r, r
+
+
+def test_markdown_with_html_comments():
+ text = g.markdown.convert('test <!-- comment -->')
+ assert '<div class="markdown_content"><p>test </p></div>' == text, text
+
+
+def test_markdown_big_text():
+ '''If text is too big g.markdown.convert should return plain text'''
+ text = 'a' * 40001
+ assert_equal(g.markdown.convert(text), '<pre>%s</pre>' % text)
+ assert_equal(g.markdown_wiki.convert(text), '<pre>%s</pre>' % text)
+
+
+@td.with_wiki
+def test_markdown_basics():
+ with h.push_context('test', 'wiki', neighborhood='Projects'):
+ text = g.markdown.convert('# Foo!\n[Home]')
+ assert '<a class="alink" href="/p/test/wiki/Home">[Home]</a>' in text, text
+ text = g.markdown.convert('# Foo!\n[Rooted]')
+ assert '<a href=' not in text, text
+
+ assert '<br' in g.markdown.convert(
+ 'Multi\nLine'), g.markdown.convert('Multi\nLine')
+ assert '<br' not in g.markdown.convert('Multi\n\nLine')
+
+ g.markdown.convert("<class 'foo'>") # should not raise an exception
+ assert '<br>' not in g.markdown.convert('''# Header
+
+Some text in a regular paragraph
+
+ :::python
+ for i in range(10):
+ print i
+''')
+ assert_in('http://localhost:8080/', g.forge_markdown(email=True).convert('[Home]'))
+ assert 'class="codehilite"' in g.markdown.convert('''
+~~~~
+def foo(): pass
+~~~~''')
+
+
+def test_markdown_autolink():
+ tgt = 'http://everything2.com/?node=nate+oostendorp'
+ s = g.markdown.convert('This is %s' % tgt)
+ assert_equal(
+ s, '<div class="markdown_content"><p>This is <a href="%s" rel="nofollow">%s</a></p></div>' % (tgt, tgt))
+ assert '<a href=' in g.markdown.convert('This is http://domain.net')
+ # beginning of doc
+ assert_in('<a href=', g.markdown.convert('http://domain.net abc'))
+ # beginning of a line
+ assert_in('<br />\n<a href="http://',
+ g.markdown.convert('foobar\nhttp://domain.net abc'))
+ # no conversion of these urls:
+ assert_in('a blahttp://sdf.com z',
+ g.markdown.convert('a blahttp://sdf.com z'))
+ assert_in('literal <code>http://domain.net</code> literal',
+ g.markdown.convert('literal `http://domain.net` literal'))
+ assert_in('<pre>preformatted http://domain.net\n</pre>',
+ g.markdown.convert(' :::text\n'
+ ' preformatted http://domain.net'))
+
+
+def test_markdown_autolink_with_escape():
+ # \_ is unnecessary but valid markdown escaping and should be considered as a regular underscore
+ # (it occurs during html2text conversion during project migrations)
+ r = g.markdown.convert(
+ 'a http://www.phpmyadmin.net/home\_page/security/\#target b')
+ assert 'href="http://www.phpmyadmin.net/home_page/security/#target"' in r, r
+
+
+def test_markdown_invalid_script():
+ r = g.markdown.convert('<script>alert(document.cookies)</script>')
+ assert_equal('<div class="markdown_content"><script>alert(document.cookies)</script>\n</div>', r)
+
+
+def test_markdown_invalid_onerror():
+ r = g.markdown.convert('<img src=x onerror=alert(document.cookie)>')
+ assert_not_in('onerror', r)
+
+
+def test_markdown_invalid_tagslash():
+ r = g.markdown.convert('<div/onload><img src=x onerror=alert(document.cookie)>')
+ assert_not_in('onerror', r)
+
+
+@td.with_wiki
+def test_macro_include():
+ r = g.markdown.convert('[[include ref=Home id=foo]]')
+ assert '<div id="foo">' in r, r
+ assert 'href="../foo"' in g.markdown.convert('[My foo](foo)')
+ assert 'href="..' not in g.markdown.convert('[My foo](./foo)')
+
+
+def test_macro_nbhd_feeds():
+ with h.push_context('--init--', 'wiki', neighborhood='Projects'):
+ r = g.markdown_wiki.convert('[[neighborhood_feeds tool_name=wiki]]')
+ assert 'Home modified by ' in r, r
+ assert '<div class="markdown_content">' not in r
+
+
+def test_sort_alpha():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+
+ with h.push_context(p_nbhd.neighborhood_project._id):
+ r = g.markdown_wiki.convert('[[projects sort=alpha]]')
+ project_list = get_project_names(r)
+ assert project_list == sorted(project_list)
+
+
+def test_sort_registered():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+
+ with h.push_context(p_nbhd.neighborhood_project._id):
+ r = g.markdown_wiki.convert('[[projects sort=last_registered]]')
+ project_names = get_project_names(r)
+ ids = get_projects_property_in_the_same_order(project_names, '_id')
+ assert ids == sorted(ids, reverse=True)
+
+
+def test_sort_updated():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+
+ with h.push_context(p_nbhd.neighborhood_project._id):
+ r = g.markdown_wiki.convert('[[projects sort=last_updated]]')
+ project_names = get_project_names(r)
+ updated_at = get_projects_property_in_the_same_order(
+ project_names, 'last_updated')
+ assert updated_at == sorted(updated_at, reverse=True)
+
+
+@with_setup(setup_functional_test)
+def test_filtering():
+ # set up for test
+ from random import choice
+ setup_trove_categories()
+ random_trove = choice(M.TroveCategory.query.find().all())
+ test_project = M.Project.query.get(shortname='test')
+ test_project_troves = getattr(test_project, 'trove_' + random_trove.type)
+ test_project_troves.append(random_trove._id)
+ ThreadLocalORMSession.flush_all()
+
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ with h.push_config(c,
+ project=p_nbhd.neighborhood_project,
+ user=M.User.by_username('test-admin')):
+ r = g.markdown_wiki.convert(
+ '[[projects category="%s"]]' % random_trove.fullpath)
+ project_names = get_project_names(r)
+ assert_equal([test_project.name], project_names)
+
+
+def test_projects_macro():
+ two_column_style = 'width: 330px;'
+
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+ with h.push_config(c,
+ project=p_nbhd.neighborhood_project,
+ user=M.User.anonymous()):
+ # test columns
+ r = g.markdown_wiki.convert('[[projects display_mode=list columns=2]]')
+ assert two_column_style in r
+ r = g.markdown_wiki.convert('[[projects display_mode=list columns=3]]')
+ assert two_column_style not in r
+
+
+@td.with_wiki
+def test_limit_tools_macro():
+ p_nbhd = M.Neighborhood.query.get(name='Adobe')
+ with h.push_context(p_nbhd.neighborhood_project._id, 'wiki'):
+ r = g.markdown_wiki.convert('[[projects]]')
+ assert '<span>Admin</span>' in r
+ r = g.markdown_wiki.convert('[[projects grid_view_tools=wiki]]')
+ assert '<span>Admin</span>' not in r
+ r = g.markdown_wiki.convert('[[projects grid_view_tools=wiki,admin]]')
+ assert '<span>Admin</span>' in r
+
+
+@td.with_user_project('test-admin')
+@td.with_user_project('test-user-1')
+def test_myprojects_macro():
+ h.set_context('u/%s' % (c.user.username), 'wiki', neighborhood='Users')
+ r = g.markdown_wiki.convert('[[my_projects]]')
+ for p in c.user.my_projects():
+ if p.deleted or p.is_nbhd_project:
+ continue
+ proj_title = '<h2><a href="%s">%s</a></h2>' % (p.url().rstrip('/'), p.name)
+ assert_in(proj_title, r)
+
+ h.set_context('u/test-user-1', 'wiki', neighborhood='Users')
+ user = M.User.query.get(username='test-user-1')
+ r = g.markdown_wiki.convert('[[my_projects]]')
+ for p in user.my_projects():
+ if p.deleted or p.is_nbhd_project:
+ continue
+ proj_title = '<h2><a href="%s">%s</a></h2>' % (p.url().rstrip('/'), p.name)
+ assert_in(proj_title, r)
+
+
+@td.with_wiki
+def test_hideawards_macro():
+ p_nbhd = M.Neighborhood.query.get(name='Projects')
+
+ app_config_id = ObjectId()
+ award = M.Award(app_config_id=app_config_id)
+ award.short = 'Award short'
+ award.full = 'Award full'
+ award.created_by_neighborhood_id = p_nbhd._id
+
+ project = M.Project.query.get(
+ neighborhood_id=p_nbhd._id, shortname='test')
+
+ M.AwardGrant(
+ award=award,
+ award_url='http://award.org',
+ comment='Winner!',
+ granted_by_neighborhood=p_nbhd,
+ granted_to_project=project)
+
+ ThreadLocalORMSession.flush_all()
+
+ with h.push_context(p_nbhd.neighborhood_project._id):
+ r = g.markdown_wiki.convert('[[projects]]')
+ assert '<div class="feature">\n<a href="http://award.org" title="Winner!" rel="nofollow">Award short</a>\n</div>' in r, r
+ r = g.markdown_wiki.convert('[[projects show_awards_banner=False]]')
+ assert '<div class="feature">\n<a href="http://award.org" title="Winner!" rel="nofollow">Award short</a>\n</div>' not in r, r
+
+
+def get_project_names(r):
+ """
+ Extracts a list of project names from a wiki page HTML.
+ """
+ # projects short names are in h2 elements without any attributes
+ # there is one more h2 element, but it has `class` attribute
+ #re_proj_names = re.compile('<h2><a[^>]>(.+)<\/a><\/h2>')
+ re_proj_names = re.compile('<h2><a[^>]+>(.+)<\/a><\/h2>')
+ return [e for e in re_proj_names.findall(r)]
+
+
+def get_projects_property_in_the_same_order(names, prop):
+ """
+ Returns a list of projects properties `prop` in the same order as
+ project `names`.
+ It is required because results of the query are not in the same order as names.
+ """
+ projects = M.Project.query.find(dict(name={'$in': names})).all()
+ projects_dict = dict([(p['name'], p[prop]) for p in projects])
+ return [projects_dict[name] for name in names]
+
+
+class TestCachedMarkdown(unittest.TestCase):
+
+ def setUp(self):
+ self.md = ForgeMarkdown()
+ self.post = M.Post()
+ self.post.text = '**bold**'
+ self.expected_html = '<p><strong>bold</strong></p>'
+
+ def test_bad_source_field_name(self):
+ self.assertRaises(AttributeError, self.md.cached_convert,
+ self.post, 'no_such_field')
+
+ def test_missing_cache_field(self):
+ delattr(self.post, 'text_cache')
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
+ def test_non_ascii(self):
+ self.post.text = 'å∫ç'
+ expected = '<p>å∫ç</p>'
+ # test with empty cache
+ self.assertEqual(expected, self.md.cached_convert(self.post, 'text'))
+ # test with primed cache
+ self.assertEqual(expected, self.md.cached_convert(self.post, 'text'))
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
+ def test_empty_cache(self):
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+ self.assertEqual(html, self.post.text_cache.html)
+ self.assertEqual(hashlib.md5(self.post.text).hexdigest(),
+ self.post.text_cache.md5)
+ self.assertTrue(self.post.text_cache.render_time > 0)
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
+ def test_stale_cache(self):
+ old = self.md.cached_convert(self.post, 'text')
+ self.post.text = 'new, different source text'
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertNotEqual(old, html)
+ self.assertEqual(html, self.post.text_cache.html)
+ self.assertEqual(hashlib.md5(self.post.text).hexdigest(),
+ self.post.text_cache.md5)
+ self.assertTrue(self.post.text_cache.render_time > 0)
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
+ def test_valid_cache(self):
+ from jinja2 import Markup
+ self.md.cached_convert(self.post, 'text')
+ with patch.object(self.md, 'convert') as convert_func:
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+ self.assertIsInstance(html, Markup)
+ self.assertFalse(convert_func.called)
+ self.post.text = "text [[macro]] pass"
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertTrue(convert_func.called)
+
+ @patch.dict('allura.lib.app_globals.config', {})
+ def test_no_threshold_defined(self):
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+ self.assertIsNone(self.post.text_cache.md5)
+ self.assertIsNone(self.post.text_cache.html)
+ self.assertIsNone(self.post.text_cache.render_time)
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='foo')
+ def test_invalid_threshold(self):
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+ self.assertIsNone(self.post.text_cache.md5)
+ self.assertIsNone(self.post.text_cache.html)
+ self.assertIsNone(self.post.text_cache.render_time)
+
+ @patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='99999')
+ def test_render_time_below_threshold(self):
+ html = self.md.cached_convert(self.post, 'text')
+ self.assertEqual(html, self.expected_html)
+ self.assertIsNone(self.post.text_cache.md5)
+ self.assertIsNone(self.post.text_cache.html)
+ self.assertIsNone(self.post.text_cache.render_time)
+
+ @patch.dict('allura.lib.app_globals.config', {})
+ def test_all_expected_keys_exist_in_cache(self):
+ self.md.cached_convert(self.post, 'text')
+ required_keys = ['fix7528', 'html', 'md5', 'render_time']
+ keys = sorted(self.post.text_cache.keys())
+ self.assertEqual(required_keys, keys)
+
+
+class TestHandlePaging(unittest.TestCase):
+
+ def setUp(self):
+ prefs = {}
+ c.user = Mock()
+ def get_pref(name):
+ return prefs.get(name)
+ def set_pref(name, value):
+ prefs[name] = value
+ c.user.get_pref = get_pref
+ c.user.set_pref = set_pref
+
+ def test_with_limit(self):
+ self.assertEqual(g.handle_paging(10, 0), (10, 0, 0))
+ self.assertEqual(g.handle_paging(10, 2), (10, 2, 20))
+ # handle paging must not mess up user preferences
+ self.assertEqual(c.user.get_pref('results_per_page'), None)
+
+ def test_without_limit(self):
+ # default limit = 25
+ self.assertEqual(g.handle_paging(None, 0), (25, 0, 0))
+ self.assertEqual(g.handle_paging(None, 2), (25, 2, 50))
+ # handle paging must not mess up user preferences
+ self.assertEqual(c.user.get_pref('results_per_page'), None)
+
+ # user has page size preference
+ c.user.set_pref('results_per_page', 100)
+ self.assertEqual(g.handle_paging(None, 0), (100, 0, 0))
+ self.assertEqual(g.handle_paging(None, 2), (100, 2, 200))
+ # handle paging must not mess up user preferences
+ self.assertEqual(c.user.get_pref('results_per_page'), 100)
+
+ def test_without_limit_with_default(self):
+ # default limit is not used when explicitly provided
+ self.assertEqual(g.handle_paging(None, 0, 30), (30, 0, 0))
+ self.assertEqual(g.handle_paging(None, 2, 30), (30, 2, 60))
+ # handle paging must not mess up user preferences
+ self.assertEqual(c.user.get_pref('results_per_page'), None)
+
+ # user has page size preference, which is not affected by default
+ c.user.set_pref('results_per_page', 25)
+ self.assertEqual(g.handle_paging(None, 0, 30), (25, 0, 0))
+ self.assertEqual(g.handle_paging(None, 2, 30), (25, 2, 50))
+ # handle paging must not mess up user preferences
+ self.assertEqual(c.user.get_pref('results_per_page'), 25)
+
+
+class TestNeighborhoodCache(object):
+
+ @patch('allura.lib.app_globals.M', autospec=True)
+ @patch('allura.lib.app_globals.datetime', autospec=True)
+ def test_lookup(self, dt_mock, M):
+ dt_mock.datetime.utcnow.side_effect = [
+ dt.datetime(2015, 0o2, 0o5, 11, 32),
+ dt.datetime(2015, 0o2, 0o5, 11, 34),
+ ]
+ ret = M.Neighborhood.query.get.return_value
+ cache = NeighborhoodCache(30)
+ assert_equal(cache._data, {})
+
+ n = cache._lookup('/p/')
+ M.Neighborhood.query.get.assert_called_once_with(url_prefix='/p/')
+ assert_equal(n, ret)
+ assert_equal(cache._data, {'/p/': {
+ 'object': ret,
+ 'ts': dt.datetime(2015, 0o2, 0o5, 11, 32),
+ }})
+
+ # hits mongo every time
+ n = cache._lookup('/p/')
+ assert_equal(M.Neighborhood.query.get.call_count, 2)
+ assert_equal(n, ret)
+ assert_equal(cache._data, {'/p/': {
+ 'object': ret,
+ 'ts': dt.datetime(2015, 0o2, 0o5, 11, 34),
+ }})
+
+ @patch('allura.lib.app_globals.M', autospec=True)
+ @patch('allura.lib.app_globals.datetime', autospec=True)
+ def test_get(self, dt_mock, M):
+ dt_mock.datetime.utcnow.side_effect = [
+ dt.datetime(2015, 0o2, 0o5, 11, 32),
+ dt.datetime(2015, 0o2, 0o5, 11, 34),
+ ]
+ ret = M.Neighborhood.query.get.return_value
+ cache = NeighborhoodCache(30)
+ cache._expired = Mock(return_value=False)
+
+ n = cache.get('/p/')
+ M.Neighborhood.query.get.assert_called_once_with(url_prefix='/p/')
+ assert_equal(n, ret)
+
+ # don't hit mongo second time
+ n = cache.get('/p/')
+ assert_equal(M.Neighborhood.query.get.call_count, 1)
+ assert_equal(n, ret)
+
+ # and hits if cache is expired
+ cache._expired.return_value = True
+ n = cache.get('/p/')
+ assert_equal(M.Neighborhood.query.get.call_count, 2)
+ assert_equal(n, ret)
+
+ @patch('allura.lib.app_globals.datetime', autospec=True)
+ def test_expired(self, dt_mock):
+ dt_mock.timedelta = dt.timedelta # restore original
+ _now = dt.datetime(2015, 0o2, 0o5, 11, 53)
+ dt_mock.datetime.utcnow.return_value = _now
+
+ cache = NeighborhoodCache(0)
+ assert_equal(cache._expired({'ts': _now}), True)
+ assert_equal(cache._expired({'ts': _now - dt.timedelta(seconds=1)}), True)
+
+ cache = NeighborhoodCache(30)
+ assert_equal(cache._expired({'ts': _now - dt.timedelta(seconds=29)}), False)
+ assert_equal(cache._expired({'ts': _now - dt.timedelta(seconds=30)}), True)
http://git-wip-us.apache.org/repos/asf/allura/blob/d52f8e2a/tests/test_helpers.py
----------------------------------------------------------------------
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
new file mode 100644
index 0000000..768a53b
--- /dev/null
+++ b/tests/test_helpers.py
@@ -0,0 +1,568 @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from unittest import TestCase
+from os import path
+from datetime import datetime, timedelta
+import time
+
+from mock import Mock, patch
+from pylons import tmpl_context as c
+from nose.tools import eq_, assert_equals
+from IPython.testing.decorators import skipif, module_not_available
+from datadiff import tools as dd
+from webob import Request
+from webob.exc import HTTPUnauthorized
+from ming.orm import ThreadLocalORMSession
+from jinja2 import Markup
+
+from allura import model as M
+from allura.lib import helpers as h
+from allura.lib.search import inject_user
+from allura.lib.security import has_access
+from allura.lib.security import Credentials
+from allura.tests import decorators as td
+from alluratest.controller import setup_basic_test
+
+
+def setUp(self):
+ """Method called by nose before running each test"""
+ setup_basic_test()
+
+
+class TestMakeSafePathPortion(TestCase):
+
+ def setUp(self):
+ self.f = h.make_safe_path_portion
+
+ def test_no_latin1_chars(self):
+ s = self.f('Задачи', relaxed=False)
+ self.assertEqual(s, '')
+
+ def test_some_latin1_chars(self):
+ s = self.f('åß∂ƒ', relaxed=False)
+ self.assertEqual(s, 'ab')
+
+ def test_strict_mount_point_names(self):
+ s = self.f('1this+is.illegal', relaxed=False)
+ self.assertEqual(s, 'this-is-illegal')
+ s = self.f('this-1-is-legal', relaxed=False)
+ self.assertEqual(s, 'this-1-is-legal')
+ s = self.f('THIS-IS-Illegal', relaxed=False)
+ self.assertEqual(s, 'this-is-illegal')
+
+ def test_relaxed_mount_point_names(self):
+ s = self.f('1_this+is.legal')
+ self.assertEqual(s, '1_this+is.legal')
+ s = self.f('not*_legal')
+ self.assertEqual(s, 'not-legal')
+ s = self.f('THIS-IS-Illegal')
+ self.assertEqual(s, 'THIS-IS-Illegal')
+
+
+def test_escape_json():
+ inputdata = {"foo": "bar</script><img src=foobar onerror=alert(1)>"}
+ outputsample = '{"foo": "bar\\u003C/script>\\u003Cimg src=foobar onerror=alert(1)>"}'
+ outputdata = h.escape_json(inputdata)
+ assert_equals(outputdata, outputsample)
+
+def test_really_unicode():
+ here_dir = path.dirname(__file__)
+ s = h.really_unicode('\xef\xbb\xbf<?xml version="1.0" encoding="utf-8" ?>')
+ assert s.startswith('\ufeff')
+ s = h.really_unicode(
+ open(path.join(here_dir, 'data/unicode_test.txt')).read())
+ assert isinstance(s, str)
+ # try non-ascii string in legacy 8bit encoding
+ h.really_unicode('\u0410\u0401'.encode('cp1251'))
+ # ensure invalid encodings are handled gracefully
+ s = h._attempt_encodings('foo', ['LKDJFLDK'])
+ assert isinstance(s, str)
+
+
+def test_render_genshi_plaintext():
+ here_dir = path.dirname(__file__)
+ tpl = path.join(here_dir, 'data/genshi_hello_tmpl')
+ text = h.render_genshi_plaintext(tpl, object='world')
+ eq_('Hello, world!\n', text)
+
+
+def test_find_project():
+ proj, rest = h.find_project('/p/test/foo')
+ assert_equals(proj.shortname, 'test')
+ assert_equals(proj.neighborhood.name, 'Projects')
+ proj, rest = h.find_project('/p/testable/foo')
+ assert proj is None
+
+
+def test_make_users():
+ r = next(h.make_users([None]))
+ assert r.username == '*anonymous', r
+
+
+def test_make_roles():
+ h.set_context('test', 'wiki', neighborhood='Projects')
+ pr = M.ProjectRole.anonymous()
+ assert next(h.make_roles([pr._id])) == pr
+
+
+@td.with_wiki
+def test_make_app_admin_only():
+ h.set_context('test', 'wiki', neighborhood='Projects')
+ anon = M.User.anonymous()
+ dev = M.User.query.get(username='test-user')
+ admin = M.User.query.get(username='test-admin')
+ c.project.add_user(dev, ['Developer'])
+ ThreadLocalORMSession.flush_all()
+ Credentials.get().clear()
+ assert has_access(c.app, 'read', user=anon)()
+ assert has_access(c.app, 'read', user=dev)()
+ assert has_access(c.app, 'read', user=admin)()
+ assert not has_access(c.app, 'create', user=anon)()
+ assert has_access(c.app, 'create', user=dev)()
+ assert has_access(c.app, 'create', user=admin)()
+ assert c.app.is_visible_to(anon)
+ assert c.app.is_visible_to(dev)
+ assert c.app.is_visible_to(admin)
+ h.make_app_admin_only(c.app)
+ ThreadLocalORMSession.flush_all()
+ Credentials.get().clear()
+ assert not has_access(c.app, 'read', user=anon)()
+ assert not has_access(c.app, 'read', user=dev)()
+ assert has_access(c.app, 'read', user=admin)()
+ assert not has_access(c.app, 'create', user=anon)()
+ assert not has_access(c.app, 'create', user=dev)()
+ assert has_access(c.app, 'create', user=admin)()
+ assert not c.app.is_visible_to(anon)
+ assert not c.app.is_visible_to(dev)
+ assert c.app.is_visible_to(admin)
+
+
+@td.with_wiki
+def test_context_setters():
+ h.set_context('test', 'wiki', neighborhood='Projects')
+ assert c.project is not None
+ assert c.app is not None
+ cfg_id = c.app.config._id
+ h.set_context('test', app_config_id=cfg_id, neighborhood='Projects')
+ assert c.project is not None
+ assert c.app is not None
+ h.set_context('test', app_config_id=str(cfg_id), neighborhood='Projects')
+ assert c.project is not None
+ assert c.app is not None
+ c.project = c.app = None
+ with h.push_context('test', 'wiki', neighborhood='Projects'):
+ assert c.project is not None
+ assert c.app is not None
+ assert c.project == c.app == None
+ with h.push_context('test', app_config_id=cfg_id, neighborhood='Projects'):
+ assert c.project is not None
+ assert c.app is not None
+ assert c.project == c.app == None
+ with h.push_context('test', app_config_id=str(cfg_id), neighborhood='Projects'):
+ assert c.project is not None
+ assert c.app is not None
+ assert c.project == c.app == None
+ del c.project
+ del c.app
+ with h.push_context('test', app_config_id=str(cfg_id), neighborhood='Projects'):
+ assert c.project is not None
+ assert c.app is not None
+ assert not hasattr(c, 'project')
+ assert not hasattr(c, 'app')
+
+
+def test_encode_keys():
+ kw = h.encode_keys({'foo': 5})
+ assert type(list(kw.keys())[0]) != str
+
+
+def test_ago():
+ assert_equals(h.ago(datetime.utcnow() - timedelta(days=2)), '2 days ago')
+ assert_equals(h.ago_ts(time.time() - 60 * 60 * 2), '2 hours ago')
+ d_str = (datetime.utcnow() - timedelta(hours=3)).isoformat()
+ assert_equals(h.ago_string(d_str), '3 hours ago')
+ assert_equals(h.ago_string('bad format'), 'unknown')
+ assert_equals(h.ago_string(None), 'unknown')
+
+ monthish = datetime.utcnow() - timedelta(days=32)
+ assert 'ago' not in h.ago(monthish)
+ assert_equals(h.ago(monthish, show_date_after=90), '1 month ago')
+ assert_equals(h.ago(monthish, show_date_after=None), '1 month ago')
+
+
+def test_urlquote_unicode():
+ # No exceptions please
+ h.urlquote('\u0410')
+ h.urlquoteplus('\u0410')
+
+
+def test_sharded_path():
+ assert_equals(h.sharded_path('foobar'), 'f/fo')
+
+
+def test_paging_sanitizer():
+ test_data = {
+ # input (limit, page, total, zero-based): output (limit, page)
+ (0, 0, 0): (1, 0),
+ ('1', '1', 1): (1, 0),
+ (5, '10', 25): (5, 4),
+ ('5', 10, 25, False): (5, 5),
+ (5, '-1', 25): (5, 0),
+ ('5', -1, 25, False): (5, 1),
+ (5, '3', 25): (5, 3),
+ ('5', 3, 25, False): (5, 3)
+ }
+ for input, output in test_data.items():
+ assert (h.paging_sanitizer(*input)) == output
+
+
+def test_render_any_markup_empty():
+ assert_equals(h.render_any_markup('foo', ''), '<p><em>Empty File</em></p>')
+
+
+def test_render_any_markup_plain():
+ assert_equals(
+ h.render_any_markup(
+ 'readme.txt', '<b>blah</b>\n<script>alert(1)</script>\nfoo'),
+ '<pre><b>blah</b>\n<script>alert(1)</script>\nfoo</pre>')
+
+
+def test_render_any_markup_formatting():
+ assert_equals(h.render_any_markup('README.md', '### foo\n'
+ ' <script>alert(1)</script> bar'),
+ '<div class="markdown_content"><h3 id="foo">foo</h3>\n'
+ '<div class="codehilite"><pre><span class="nt">'
+ '<script></span>alert(1)<span class="nt">'
+ '</script></span> bar\n</pre></div>\n</div>')
+
+
+class AuditLogMock(Mock):
+ logs = list()
+
+ @classmethod
+ def log(cls, message):
+ cls.logs.append(message)
+
+
+@patch('allura.model.AuditLog', new=AuditLogMock)
+def test_log_if_changed():
+ artifact = Mock()
+ artifact.value = 'test'
+ # change
+ h.log_if_changed(artifact, 'value', 'changed', 'updated value')
+ assert artifact.value == 'changed'
+ assert len(AuditLogMock.logs) == 1
+ assert AuditLogMock.logs[0] == 'updated value'
+
+ # don't change
+ h.log_if_changed(artifact, 'value', 'changed', 'updated value')
+ assert artifact.value == 'changed'
+ assert len(AuditLogMock.logs) == 1
+ assert AuditLogMock.logs[0] == 'updated value'
+
+
+def test_get_tool_packages():
+ assert h.get_tool_packages('tickets') == ['forgetracker']
+ assert h.get_tool_packages('Tickets') == ['forgetracker']
+ assert h.get_tool_packages('wrong_tool') == []
+
+
+def test_get_first():
+ assert_equals(h.get_first({}, 'title'), None)
+ assert_equals(h.get_first({'title': None}, 'title'), None)
+ assert_equals(h.get_first({'title': 'Value'}, 'title'), 'Value')
+ assert_equals(h.get_first({'title': ['Value']}, 'title'), 'Value')
+ assert_equals(h.get_first({'title': []}, 'title'), None)
+ assert_equals(h.get_first({'title': ['Value']}, 'title'), 'Value')
+
+
+@patch('allura.lib.search.c')
+def test_inject_user(context):
+ user = Mock(username='user01')
+ assert_equals(inject_user(None, user), None)
+ assert_equals(inject_user('', user), '')
+ assert_equals(inject_user('query', user), 'query')
+ result = inject_user('reported_by_s:$USER OR assigned_to_s:$USER', user)
+ assert_equals(result, 'reported_by_s:"user01" OR assigned_to_s:"user01"')
+ context.user = Mock(username='admin1')
+ result = inject_user('reported_by_s:$USER OR assigned_to_s:$USER')
+ assert_equals(result, 'reported_by_s:"admin1" OR assigned_to_s:"admin1"')
+ context.user = Mock(username='*anonymous')
+ result = inject_user('reported_by_s:$USER OR assigned_to_s:$USER')
+ assert_equals(
+ result, 'reported_by_s:"*anonymous" OR assigned_to_s:"*anonymous"')
+
+
+def test_datetimeformat():
+ from datetime import date
+ assert h.datetimeformat(date(2013, 0o1, 0o1)) == '2013-01-01 00:00:00'
+
+
+def test_nl2br_jinja_filter():
+ assert_equals(h.nl2br_jinja_filter('foo<script>alert(1)</script>\nbar\nbaz'),
+ Markup('foo<script>alert(1)</script><br>\nbar<br>\nbaz'))
+
+
+def test_split_select_field_options():
+ assert_equals(h.split_select_field_options('"test message" test2'),
+ ['test message', 'test2'])
+ assert_equals(h.split_select_field_options('"test message test2'),
+ ['test', 'message', 'test2'])
+
+
+def test_notifications_disabled():
+ project = Mock(notifications_disabled=False)
+ with h.notifications_disabled(project):
+ assert_equals(project.notifications_disabled, True)
+ assert_equals(project.notifications_disabled, False)
+
+
+@skipif(module_not_available('html2text'))
+def test_plain2markdown_with_html2text():
+ """Test plain2markdown using html2text to escape markdown, if available."""
+ text = '''paragraph
+
+ 4 spaces before this
+
+ *blah*
+
+here's a <tag> that should be <b>preserved</b>
+Literal > Ò ¼ & & ሿ
+M & Ms - doesn't get escaped
+http://blah.com/?x=y&a=b - not escaped either
+'''
+
+ expected = '''paragraph
+
+4 spaces before this
+
+\*blah\*
+
+here's a <tag> that should be <b>preserved</b>
+Literal &gt; &Ograve; &frac14; &amp; &\#38; &\#x123F;
+M & Ms - doesn't get escaped
+http://blah.com/?x=y&a=b - not escaped either
+'''
+
+ dd.assert_equal(h.plain2markdown(text), expected)
+
+ dd.assert_equal(
+ h.plain2markdown('a foo bar\n\n code here?',
+ preserve_multiple_spaces=True),
+ 'a foo bar\n\n code here?')
+
+ dd.assert_equal(
+ h.plain2markdown('\ttab before (stuff)',
+ preserve_multiple_spaces=True),
+ ' tab before \(stuff\)')
+
+ dd.assert_equal(
+ h.plain2markdown('\ttab before (stuff)',
+ preserve_multiple_spaces=False),
+ 'tab before \(stuff\)')
+
+
+@td.without_module('html2text')
+def test_plain2markdown():
+ """Test plain2markdown using fallback regexp to escape markdown.
+
+ Potentially MD-special characters are aggresively escaped, as without
+ knowledge of the MD parsing rules it's better to be excessive but safe.
+ """
+ text = '''paragraph
+
+ 4 spaces before this
+
+ *blah*
+
+here's a <tag> that should be <b>preserved</b>
+Literal > Ò ¼ & & ሿ
+M & Ms - amp doesn't get escaped
+http://blah.com/?x=y&a=b - not escaped either
+back\\-slash escaped
+'''
+
+ expected = '''paragraph
+
+4 spaces before this
+
+\*blah\*
+
+here's a <tag> that should be <b>preserved</b>
+Literal &gt; &Ograve; &frac14; &amp; &\#38; &\#x123F;
+M & Ms \- amp doesn't get escaped
+http://blah\.com/?x=y&a=b \- not escaped either
+back\\\\\-slash escaped
+'''
+
+ dd.assert_equal(h.plain2markdown(text), expected)
+
+ dd.assert_equal(
+ h.plain2markdown('a foo bar\n\n code here?',
+ preserve_multiple_spaces=True),
+ 'a foo bar\n\n code here?')
+
+ dd.assert_equal(
+ h.plain2markdown('\ttab before (stuff)',
+ preserve_multiple_spaces=True),
+ ' tab before \(stuff\)')
+
+ dd.assert_equal(
+ h.plain2markdown('\ttab before (stuff)',
+ preserve_multiple_spaces=False),
+ 'tab before \(stuff\)')
+
+
+class TestUrlOpen(TestCase):
+
+ @patch('allura.lib.helpers.urllib2')
+ def test_no_error(self, urllib2):
+ r = h.urlopen('myurl')
+ self.assertEqual(r, urllib.request.urlopen.return_value)
+ urllib.request.urlopen.assert_called_once_with('myurl', timeout=None)
+
+ @patch('allura.lib.helpers.urllib2.urlopen')
+ def test_socket_timeout(self, urlopen):
+ import socket
+
+ def side_effect(url, timeout=None):
+ raise socket.timeout()
+ urlopen.side_effect = side_effect
+ self.assertRaises(socket.timeout, h.urlopen, 'myurl')
+ self.assertEqual(urlopen.call_count, 4)
+
+ @patch('allura.lib.helpers.urllib2.urlopen')
+ def test_handled_http_error(self, urlopen):
+ from urllib.error import HTTPError
+
+ def side_effect(url, timeout=None):
+ raise HTTPError('url', 408, 'timeout', None, None)
+ urlopen.side_effect = side_effect
+ self.assertRaises(HTTPError, h.urlopen, 'myurl')
+ self.assertEqual(urlopen.call_count, 4)
+
+ @patch('allura.lib.helpers.urllib2.urlopen')
+ def test_unhandled_http_error(self, urlopen):
+ from urllib.error import HTTPError
+
+ def side_effect(url, timeout=None):
+ raise HTTPError('url', 404, 'timeout', None, None)
+ urlopen.side_effect = side_effect
+ self.assertRaises(HTTPError, h.urlopen, 'myurl')
+ self.assertEqual(urlopen.call_count, 1)
+
+
+def test_absurl_no_request():
+ assert_equals(h.absurl('/p/test/foobar'), 'http://localhost/p/test/foobar')
+
+
+@patch.object(h, 'request',
+ new=Request.blank('/p/test/foobar', base_url='https://www.mysite.com/p/test/foobar'))
+def test_absurl_with_request():
+ assert_equals(h.absurl('/p/test/foobar'),
+ 'https://www.mysite.com/p/test/foobar')
+
+
+def test_daterange():
+ assert_equals(
+ list(h.daterange(datetime(2013, 1, 1), datetime(2013, 1, 4))),
+ [datetime(2013, 1, 1), datetime(2013, 1, 2), datetime(2013, 1, 3)])
+
+
+@patch.object(h, 'request',
+ new=Request.blank('/p/test/foobar', base_url='https://www.mysite.com/p/test/foobar'))
+def test_login_overlay():
+ with h.login_overlay():
+ raise HTTPUnauthorized()
+ with h.login_overlay(exceptions=['foo']):
+ raise HTTPUnauthorized()
+ with td.raises(HTTPUnauthorized):
+ with h.login_overlay(exceptions=['foobar']):
+ raise HTTPUnauthorized()
+
+
+class TestIterEntryPoints(TestCase):
+
+ def _make_ep(self, name, cls):
+ m = Mock()
+ m.name = name
+ m.load.return_value = cls
+ return m
+
+ @patch('allura.lib.helpers.pkg_resources')
+ @patch.dict(h.tg.config, {'disable_entry_points.allura': 'myapp'})
+ def test_disabled(self, pkg_resources):
+ pkg_resources.iter_entry_points.return_value = [
+ self._make_ep('myapp', object)]
+ self.assertEqual([], list(h.iter_entry_points('allura')))
+
+ @patch('allura.lib.helpers.pkg_resources')
+ def test_subclassed_ep(self, pkg_resources):
+ class App(object):
+ pass
+
+ class BetterApp(App):
+ pass
+
+ pkg_resources.iter_entry_points.return_value = [
+ self._make_ep('myapp', App),
+ self._make_ep('myapp', BetterApp)]
+
+ eps = list(h.iter_entry_points('allura'))
+ self.assertEqual(len(eps), 1)
+ self.assertEqual(BetterApp, eps[0].load())
+
+ @patch('allura.lib.helpers.pkg_resources')
+ def test_ambiguous_eps(self, pkg_resources):
+ class App(object):
+ pass
+
+ class BetterApp(App):
+ pass
+
+ class BestApp(object):
+ pass
+
+ pkg_resources.iter_entry_points.return_value = [
+ self._make_ep('myapp', App),
+ self._make_ep('myapp', BetterApp),
+ self._make_ep('myapp', BestApp)]
+
+ self.assertRaisesRegex(ImportError,
+ 'Ambiguous \[allura\] entry points detected. '
+ 'Multiple entry points with name "myapp".',
+ list, h.iter_entry_points('allura'))
+
+def test_get_user_status():
+ user = M.User.by_username('test-admin')
+ assert_equals(h.get_user_status(user), 'enabled')
+
+ user = Mock(disabled=True, pending=False)
+ assert_equals(h.get_user_status(user), 'disabled')
+
+ user = Mock(disabled=False, pending=True)
+ assert_equals(h.get_user_status(user), 'pending')
+
+ user = Mock(disabled=True, pending=True) # not an expected combination
+ assert_equals(h.get_user_status(user), 'disabled')
http://git-wip-us.apache.org/repos/asf/allura/blob/d52f8e2a/tests/test_mail_util.py
----------------------------------------------------------------------
diff --git a/tests/test_mail_util.py b/tests/test_mail_util.py
new file mode 100644
index 0000000..46b28c4
--- /dev/null
+++ b/tests/test_mail_util.py
@@ -0,0 +1,238 @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+from email.MIMEMultipart import MIMEMultipart
+from email.MIMEText import MIMEText
+
+import mock
+from nose.tools import raises, assert_equal, assert_false, assert_true
+from ming.orm import ThreadLocalORMSession
+
+from alluratest.controller import setup_basic_test, setup_global_objects
+from allura.lib.utils import ConfigProxy
+from allura.app import Application
+from allura.lib.mail_util import (
+ parse_address,
+ parse_message,
+ Header,
+ is_autoreply,
+ identify_sender,
+ _parse_message_id,
+)
+from allura.lib.exceptions import AddressException
+from allura.tests import decorators as td
+
+config = ConfigProxy(
+ common_suffix='forgemail.domain',
+ return_path='forgemail.return_path')
+
+
+class TestReactor(unittest.TestCase):
+
+ def setUp(self):
+ setup_basic_test()
+ setup_global_objects()
+ ThreadLocalORMSession.flush_all()
+ ThreadLocalORMSession.close_all()
+
+ @raises(AddressException)
+ def test_parse_address_bad_domain(self):
+ parse_address('foo@bar.com')
+
+ @raises(AddressException)
+ def test_parse_address_bad_project(self):
+ parse_address('foo@wiki.unicorns.p' + config.common_suffix)
+
+ @raises(AddressException)
+ def test_parse_address_missing_tool(self):
+ parse_address('foo@test.p' + config.common_suffix)
+
+ @raises(AddressException)
+ def test_parse_address_bad_tool(self):
+ parse_address('foo@hammer.test.p' + config.common_suffix)
+
+ @td.with_wiki
+ def test_parse_address_good(self):
+ topic, project, app = parse_address(
+ 'foo@wiki.test.p' + config.common_suffix)
+ assert_equal(topic, 'foo')
+ assert_equal(project.shortname, 'test')
+ assert_true(isinstance(app, Application))
+
+ def test_unicode_simple_message(self):
+ charset = 'utf-8'
+ msg1 = MIMEText('''По оживлённым берегам
+Громады стройные теснятся
+Дворцов и башен; корабли
+Толпой со всех концов земли
+К богатым пристаням стремятся;'''.encode(charset),
+ 'plain',
+ charset)
+ msg1['Message-ID'] = '<fo...@bar.com>'
+ s_msg = msg1.as_string()
+ msg2 = parse_message(s_msg)
+ assert isinstance(msg2['payload'], str)
+
+ def test_unicode_complex_message(self):
+ charset = 'utf-8'
+ p1 = MIMEText('''По оживлённым берегам
+Громады стройные теснятся
+Дворцов и башен; корабли
+Толпой со всех концов земли
+К богатым пристаням стремятся;'''.encode(charset),
+ 'plain',
+ charset)
+ p2 = MIMEText('''<p>По оживлённым берегам
+Громады стройные теснятся
+Дворцов и башен; корабли
+Толпой со всех концов земли
+К богатым пристаням стремятся;</p>'''.encode(charset),
+ 'plain',
+ charset)
+ msg1 = MIMEMultipart()
+ msg1['Message-ID'] = '<fo...@bar.com>'
+ msg1.attach(p1)
+ msg1.attach(p2)
+ s_msg = msg1.as_string()
+ msg2 = parse_message(s_msg)
+ for part in msg2['parts']:
+ if part['payload'] is None:
+ continue
+ assert isinstance(part['payload'], str)
+
+
+class TestHeader(object):
+
+ @raises(TypeError)
+ def test_bytestring(self):
+ our_header = Header('[asdf2:wiki] Discussion for Home page')
+ assert_equal(str(our_header), '[asdf2:wiki] Discussion for Home page')
+
+ def test_ascii(self):
+ our_header = Header('[asdf2:wiki] Discussion for Home page')
+ assert_equal(str(our_header), '[asdf2:wiki] Discussion for Home page')
+
+ def test_utf8(self):
+ our_header = Header('теснятся')
+ assert_equal(str(our_header), '=?utf-8?b?0YLQtdGB0L3Rj9GC0YHRjw==?=')
+
+ def test_name_addr(self):
+ our_header = Header('"теснятся"', '<da...@b.com>')
+ assert_equal(str(our_header),
+ '=?utf-8?b?ItGC0LXRgdC90Y/RgtGB0Y8i?= <da...@b.com>')
+
+
+class TestIsAutoreply(object):
+
+ def setUp(self):
+ self.msg = {'headers': {}}
+
+ def test_empty(self):
+ assert_false(is_autoreply(self.msg))
+
+ def test_gmail(self):
+ self.msg['headers']['Auto-Submitted'] = 'auto-replied'
+ self.msg['headers']['Precedence'] = 'bulk'
+ self.msg['headers']['X-Autoreply'] = 'yes'
+ assert_true(is_autoreply(self.msg))
+
+ def test_qmail(self):
+ self.msg['headers']['Delivered-To'] = 'Autoresponder'
+ assert_true(is_autoreply(self.msg))
+
+ def test_mailtraq(self):
+ self.msg['headers']['X-POST-MessageClass'] = '9; Autoresponder'
+ assert_true(is_autoreply(self.msg))
+
+ def test_firstclass(self):
+ self.msg['headers']['X-FC-MachineGenerated'] = 'true'
+ assert_true(is_autoreply(self.msg))
+
+ def test_domain_technologies_control(self):
+ self.msg['headers']['X-AutoReply-From'] = 'something'
+ self.msg['headers']['X-Mail-Autoreply'] = 'something'
+ assert_true(is_autoreply(self.msg))
+
+ def test_communicate_pro(self):
+ self.msg['headers']['X-Autogenerated'] = 'Forward'
+ assert_true(is_autoreply(self.msg))
+
+ def test_boxtrapper_cpanel(self):
+ self.msg['headers']['Preference'] = 'auto_reply'
+ self.msg['headers']['X-Precedence'] = 'auto_reply'
+ self.msg['headers']['X-Autorespond'] = 'auto_reply'
+ assert_true(is_autoreply(self.msg))
+
+ def test_return_path(self):
+ self.msg['headers']['Return-Path'] = '<>'
+ assert_true(is_autoreply(self.msg))
+
+
+class TestIdentifySender(object):
+
+ @mock.patch('allura.model.EmailAddress')
+ def test_arg(self, EA):
+ EA.canonical = lambda e: e
+ EA.get.side_effect = [
+ mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+ assert_equal(identify_sender(None, 'arg', None, None), 'user')
+ EA.get.assert_called_once_with(email='arg', confirmed=True)
+
+ @mock.patch('allura.model.EmailAddress')
+ def test_header(self, EA):
+ EA.canonical = lambda e: e
+ EA.get.side_effect = [
+ None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+ assert_equal(
+ identify_sender(None, 'arg', {'From': 'from'}, None), 'user')
+ assert_equal(EA.get.call_args_list,
+ [mock.call(email='arg', confirmed=True), mock.call(email='from')])
+
+ @mock.patch('allura.model.User')
+ @mock.patch('allura.model.EmailAddress')
+ def test_no_header(self, EA, User):
+ anon = User.anonymous()
+ EA.canonical = lambda e: e
+ EA.get.side_effect = [
+ None, mock.Mock(claimed_by_user_id=True, claimed_by_user=lambda:'user')]
+ assert_equal(identify_sender(None, 'arg', {}, None), anon)
+ assert_equal(EA.get.call_args_list, [mock.call(email='arg', confirmed=True)])
+
+ @mock.patch('allura.model.User')
+ @mock.patch('allura.model.EmailAddress')
+ def test_no_match(self, EA, User):
+ anon = User.anonymous()
+ EA.canonical = lambda e: e
+ EA.get.side_effect = [None, None]
+ assert_equal(
+ identify_sender(None, 'arg', {'From': 'from'}, None), anon)
+ assert_equal(EA.get.call_args_list,
+ [mock.call(email='arg', confirmed=True), mock.call(email='from')])
+
+
+def test_parse_message_id():
+ assert_equal(_parse_message_id('<de...@libjpeg-turbo.p.domain.net>, </p...@libjpeg-turbo.p.domain.net>'), [
+ 'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.domain.net',
+ 'de31888f6be2d87dc377d9e713876bb514548625.patches@libjpeg-turbo.p.domain.net',
+ ])
http://git-wip-us.apache.org/repos/asf/allura/blob/d52f8e2a/tests/test_markdown.py
----------------------------------------------------------------------
diff --git a/tests/test_markdown.py b/tests/test_markdown.py
new file mode 100644
index 0000000..3a519f1
--- /dev/null
+++ b/tests/test_markdown.py
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+import unittest
+import mock
+
+from allura.lib import markdown_extensions as mde
+
+
+class TestTracRef1(unittest.TestCase):
+
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_no_such_artifact(self, lookup):
+ lookup.return_value = None
+ self.assertEqual(mde.TracRef1().sub('#100'), '#100')
+
+ def test_skip_if_brackets(self):
+ self.assertEqual(mde.TracRef1().sub('[#100]'), '[#100]')
+ self.assertEqual(mde.TracRef1().sub('[r123]'), '[r123]')
+
+ def test_word_boundaries(self):
+ self.assertEqual(mde.TracRef1().sub('foo#100'), 'foo#100')
+ self.assertEqual(mde.TracRef1().sub('r123bar'), 'r123bar')
+
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_legit_refs(self, lookup):
+ shortlink = mock.Mock(url='/p/project/tool/artifact')
+ shortlink.ref.artifact.deleted = False
+ lookup.return_value = shortlink
+ self.assertEqual(mde.TracRef1().sub('#100'),
+ '[#100](/p/project/tool/artifact)')
+ self.assertEqual(mde.TracRef1().sub('r123'),
+ '[r123](/p/project/tool/artifact)')
+
+
+class TestTracRef2(unittest.TestCase):
+
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_no_such_artifact(self, lookup):
+ lookup.return_value = None
+ self.assertEqual(mde.TracRef2().sub('ticket:100'), 'ticket:100')
+
+ def test_word_boundaries(self):
+ self.assertEqual(mde.TracRef2().sub('myticket:100'), 'myticket:100')
+ self.assertEqual(mde.TracRef2().sub('ticket:100th'), 'ticket:100th')
+
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_legit_refs(self, lookup):
+ shortlink = mock.Mock(url='/p/project/tool/artifact/')
+ shortlink.ref.artifact.deleted = False
+ lookup.return_value = shortlink
+ pattern = mde.TracRef2()
+ pattern.get_comment_slug = lambda *args: 'abc'
+ self.assertEqual(pattern.sub('ticket:100'),
+ '[ticket:100](/p/project/tool/artifact/)')
+ self.assertEqual(pattern.sub('[ticket:100]'),
+ '[[ticket:100](/p/project/tool/artifact/)]')
+ self.assertEqual(pattern.sub('comment:13:ticket:100'),
+ '[comment:13:ticket:100](/p/project/tool/artifact/#abc)')
+ pattern.get_comment_slug = lambda *args: None
+ self.assertEqual(pattern.sub('comment:13:ticket:100'),
+ '[comment:13:ticket:100](/p/project/tool/artifact/)')
+
+
+class TestTracRef3(unittest.TestCase):
+
+ def test_no_app_context(self):
+ self.assertEqual(mde.TracRef3(None)
+ .sub('source:file.py'), 'source:file.py')
+
+ def test_legit_refs(self):
+ app = mock.Mock(url='/p/project/tool/')
+ self.assertEqual(mde.TracRef3(app).sub('source:file.py'),
+ '[source:file.py](/p/project/tool/HEAD/tree/file.py)')
+ self.assertEqual(mde.TracRef3(app).sub('source:file.py@123'),
+ '[source:file.py@123](/p/project/tool/123/tree/file.py)')
+ self.assertEqual(mde.TracRef3(app).sub('source:file.py@123#L456'),
+ '[source:file.py@123#L456](/p/project/tool/123/tree/file.py#l456)')
+ self.assertEqual(mde.TracRef3(app).sub('source:file.py#L456'),
+ '[source:file.py#L456](/p/project/tool/HEAD/tree/file.py#l456)')
+
+
+class TestPatternReplacingProcessor(unittest.TestCase):
+
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_run(self, lookup):
+ shortlink = mock.Mock(url='/p/project/tool/artifact')
+ shortlink.ref.artifact.deleted = False
+ lookup.return_value = shortlink
+ p = mde.PatternReplacingProcessor(mde.TracRef1(), mde.TracRef2())
+ res = p.run(['#100', 'ticket:100'])
+ self.assertEqual(res, [
+ '[#100](/p/project/tool/artifact)',
+ '[ticket:100](/p/project/tool/artifact)'])
+
+
+class TestCommitMessageExtension(unittest.TestCase):
+
+ @mock.patch('allura.lib.markdown_extensions.TracRef2.get_comment_slug')
+ @mock.patch('allura.lib.markdown_extensions.M.Shortlink.lookup')
+ def test_convert(self, lookup, get_comment_slug):
+ from allura.lib.app_globals import ForgeMarkdown
+
+ shortlink = mock.Mock(url='/p/project/tool/artifact/')
+ shortlink.ref.artifact.deleted = False
+ lookup.return_value = shortlink
+ get_comment_slug.return_value = 'abc'
+ app = mock.Mock(url='/p/project/tool/')
+
+ text = """\
+# Not A Heading #
+---
+* #100, r2
+* ticket:100
+* comment:13:ticket:2
+* source:test.py@2#L3
+
+Not *strong* or _underlined_."""
+
+ expected_html = """\
+<div class="markdown_content"><p># Not A Heading #<br>
+---<br>
+* <a href=/p/project/tool/artifact/>#100</a>, <a href=/p/project/tool/artifact/>r2</a><br>
+* <a href=/p/project/tool/artifact/>ticket:100</a><br>
+* <a href=/p/project/tool/artifact/#abc>comment:13:ticket:2</a><br>
+* <a href=/p/project/tool/2/tree/test.py#l3>source:test.py@2#L3</a></p>
+<p>Not *strong* or _underlined_.</div>"""
+
+ md = ForgeMarkdown(
+ extensions=[mde.CommitMessageExtension(app), 'nl2br'],
+ output_format='html4')
+ self.assertEqual(md.convert(text), expected_html)