You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bloodhound.apache.org by rj...@apache.org on 2014/11/15 02:14:53 UTC
svn commit: r1639823 [28/29] - in
/bloodhound/branches/trac-1.0.2-integration/trac: ./ contrib/
contrib/cgi-bin/ contrib/workflow/ doc/ doc/dev/ doc/utils/ sample-plugins/
sample-plugins/permissions/ sample-plugins/workflow/ trac/ trac/admin/
trac/admi...
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/git_fs.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/git_fs.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/git_fs.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/git_fs.py Sat Nov 15 01:14:46 2014
@@ -15,20 +15,25 @@
from __future__ import with_statement
from datetime import datetime
+import itertools
import os
import sys
from genshi.builder import tag
+from genshi.core import Markup
+from trac.cache import cached
from trac.config import BoolOption, IntOption, PathOption, Option
from trac.core import *
from trac.util import TracError, shorten_line
from trac.util.datefmt import FixedOffset, to_timestamp, format_datetime
-from trac.util.text import to_unicode
+from trac.util.text import to_unicode, exception_to_unicode
+from trac.util.translation import _
from trac.versioncontrol.api import Changeset, Node, Repository, \
IRepositoryConnector, NoSuchChangeset, \
NoSuchNode, IRepositoryProvider
-from trac.versioncontrol.cache import CachedRepository, CachedChangeset
+from trac.versioncontrol.cache import CACHE_YOUNGEST_REV, CachedRepository, \
+ CachedChangeset
from trac.versioncontrol.web_ui import IPropertyRenderer
from trac.web.chrome import Chrome
from trac.wiki import IWikiSyntaxProvider
@@ -37,10 +42,7 @@ from tracopt.versioncontrol.git import P
class GitCachedRepository(CachedRepository):
- """Git-specific cached repository.
-
- Passes through {display,short,normalize}_rev
- """
+ """Git-specific cached repository."""
def display_rev(self, rev):
return self.short_rev(rev)
@@ -50,15 +52,113 @@ class GitCachedRepository(CachedReposito
def normalize_rev(self, rev):
if not rev:
- return self.repos.get_youngest_rev()
+ return self.get_youngest_rev()
normrev = self.repos.git.verifyrev(rev)
if normrev is None:
raise NoSuchChangeset(rev)
return normrev
+ def get_youngest_rev(self):
+ # return None if repository is empty
+ return CachedRepository.get_youngest_rev(self) or None
+
+ def child_revs(self, rev):
+ return self.repos.child_revs(rev)
+
+ def get_changesets(self, start, stop):
+ for key, csets in itertools.groupby(
+ CachedRepository.get_changesets(self, start, stop),
+ key=lambda cset: cset.date):
+ csets = list(csets)
+ if len(csets) == 1:
+ yield csets[0]
+ continue
+ rev_csets = dict((cset.rev, cset) for cset in csets)
+ while rev_csets:
+ revs = [rev for rev in rev_csets
+ if not any(r in rev_csets
+ for r in self.repos.child_revs(rev))]
+ for rev in sorted(revs):
+ yield rev_csets.pop(rev)
+
def get_changeset(self, rev):
return GitCachedChangeset(self, self.normalize_rev(rev), self.env)
+ def sync(self, feedback=None, clean=False):
+ if clean:
+ self.remove_cache()
+
+ metadata = self.metadata
+ self.save_metadata(metadata)
+ meta_youngest = metadata.get(CACHE_YOUNGEST_REV)
+ repos = self.repos
+
+ def is_synced(rev):
+ for count, in self.env.db_query("""
+ SELECT COUNT(*) FROM revision WHERE repos=%s AND rev=%s
+ """, (self.id, rev)):
+ return count > 0
+ return False
+
+ def traverse(rev, seen, revs=None):
+ if revs is None:
+ revs = []
+ while True:
+ if rev in seen:
+ return revs
+ seen.add(rev)
+ if is_synced(rev):
+ return revs
+ revs.append(rev)
+ parent_revs = repos.parent_revs(rev)
+ if not parent_revs:
+ return revs
+ if len(parent_revs) == 1:
+ rev = parent_revs[0]
+ continue
+ idx = len(revs)
+ traverse(parent_revs.pop(), seen, revs)
+ for parent in parent_revs:
+ revs[idx:idx] = traverse(parent, seen)
+
+ while True:
+ repos.sync()
+ repos_youngest = repos.youngest_rev
+ updated = False
+ seen = set()
+
+ for rev in repos.git.all_revs():
+ if repos.child_revs(rev):
+ continue
+ revs = traverse(rev, seen) # topology ordered
+ while revs:
+ # sync revision from older revision to newer revision
+ rev = revs.pop()
+ self.log.info("Trying to sync revision [%s]", rev)
+ cset = repos.get_changeset(rev)
+ with self.env.db_transaction as db:
+ try:
+ self._insert_changeset(db, rev, cset)
+ updated = True
+ except self.env.db_exc.IntegrityError, e:
+ self.log.info('Revision %s already cached: %r',
+ rev, e)
+ db.rollback()
+ continue
+ if feedback:
+ feedback(rev)
+
+ if updated:
+ continue # sync again
+
+ if meta_youngest != repos_youngest:
+ with self.env.db_transaction as db:
+ db("""
+ UPDATE repository SET value=%s WHERE id=%s AND name=%s
+ """, (repos_youngest, self.id, CACHE_YOUNGEST_REV))
+ del self.metadata
+ return
+
class GitCachedChangeset(CachedChangeset):
"""Git-specific cached changeset.
@@ -250,7 +350,7 @@ class GitConnector(Component):
def rlookup_uid(_):
return None
- repos = GitRepository(dir, params, self.log,
+ repos = GitRepository(self.env, dir, params, self.log,
persistent_cache=self.persistent_cache,
git_bin=self.git_bin,
git_fs_encoding=self.git_fs_encoding,
@@ -320,9 +420,9 @@ class CsetPropertyRenderer(Component):
parent_links = intersperse(', ', \
((sha_link(rev),
' (',
- tag.a('diff',
- title="Diff against this parent (show the " \
- "changes merged from the other parents)",
+ tag.a(_("diff"),
+ title=_("Diff against this parent (show the "
+ "changes merged from the other parents)"),
href=context.href.changeset(current_sha, reponame,
old=rev)),
')')
@@ -330,15 +430,16 @@ class CsetPropertyRenderer(Component):
return tag(list(parent_links),
tag.br(),
- tag.span(tag("Note: this is a ",
- tag.strong("merge"), " changeset, "
- "the changes displayed below "
- "correspond to the merge itself."),
+ tag.span(Markup(_("Note: this is a <strong>merge"
+ "</strong> changeset, the "
+ "changes displayed below "
+ "correspond to the merge "
+ "itself.")),
class_='hint'),
tag.br(),
- tag.span(tag("Use the ", tag.tt("(diff)"),
- " links above to see all the changes "
- "relative to each parent."),
+ tag.span(Markup(_("Use the <tt>(diff)</tt> links "
+ "above to see all the changes "
+ "relative to each parent.")),
class_='hint'))
# simple non-merge commit
@@ -357,7 +458,7 @@ class CsetPropertyRenderer(Component):
class GitRepository(Repository):
"""Git repository"""
- def __init__(self, path, params, log,
+ def __init__(self, env, path, params, log,
persistent_cache=False,
git_bin='git',
git_fs_encoding='utf-8',
@@ -367,27 +468,43 @@ class GitRepository(Repository):
use_committer_time=False,
):
+ self.env = env
self.logger = log
self.gitrepo = path
self.params = params
+ self.persistent_cache = persistent_cache
self.shortrev_len = max(4, min(shortrev_len, 40))
self.rlookup_uid = rlookup_uid
self.use_committer_time = use_committer_time
self.use_committer_id = use_committer_id
try:
- self.git = PyGIT.StorageFactory(path, log, not persistent_cache,
- git_bin=git_bin,
- git_fs_encoding=git_fs_encoding) \
- .getInstance()
+ factory = PyGIT.StorageFactory(path, log, not persistent_cache,
+ git_bin=git_bin,
+ git_fs_encoding=git_fs_encoding)
+ self._git = factory.getInstance()
except PyGIT.GitError, e:
+ log.error(exception_to_unicode(e))
raise TracError("%s does not appear to be a Git "
"repository." % path)
- Repository.__init__(self, 'git:'+path, self.params, log)
+ Repository.__init__(self, 'git:' + path, self.params, log)
+ self._cached_git_id = str(self.id)
def close(self):
- self.git = None
+ self._git = None
+
+ @property
+ def git(self):
+ if self.persistent_cache:
+ return self._cached_git
+ else:
+ return self._git
+
+ @cached('_cached_git_id')
+ def _cached_git(self):
+ self._git.invalidate_rev_cache()
+ return self._git
def get_youngest_rev(self):
return self.git.youngest_rev()
@@ -434,6 +551,9 @@ class GitRepository(Repository):
"""GitChangeset factory method"""
return GitChangeset(self, rev)
+ def get_changeset_uid(self, rev):
+ return self.normalize_rev(rev)
+
def get_changes(self, old_path, old_rev, new_path, new_rev,
ignore_ancestry=0):
# TODO: handle renames/copies, ignore_ancestry
@@ -477,8 +597,8 @@ class GitRepository(Repository):
return self.git.children(rev)
def rev_older_than(self, rev1, rev2):
- rc = self.git.rev_is_anchestor_of(rev1, rev2)
- return rc
+ return self.git.rev_is_anchestor_of(self.normalize_rev(rev1),
+ self.normalize_rev(rev2))
# def clear(self, youngest_rev=None):
# self.youngest = None
@@ -493,6 +613,8 @@ class GitRepository(Repository):
if rev_callback:
revs = set(self.git.all_revs())
+ if self.persistent_cache:
+ del self._cached_git # invalidate persistent cache
if not self.git.sync():
return None # nothing expected to change
@@ -511,11 +633,16 @@ class GitNode(Node):
self.fs_sha = None # points to either tree or blobs
self.fs_perm = None
self.fs_size = None
- rev = rev and str(rev) or 'HEAD'
+ if rev:
+ rev = repos.normalize_rev(to_unicode(rev))
+ else:
+ rev = repos.youngest_rev
kind = Node.DIRECTORY
p = path.strip('/')
- if p: # ie. not the root-tree
+ if p: # ie. not the root-tree
+ if not rev:
+ raise NoSuchNode(path, rev)
if not ls_tree_info:
ls_tree_info = repos.git.ls_tree(rev, p) or None
if ls_tree_info:
@@ -574,6 +701,8 @@ class GitNode(Node):
self.repos.git.blame(self.rev,self.__git_path())]
def get_entries(self):
+ if not self.rev: # if empty repository
+ return
if not self.isdir:
return
@@ -599,6 +728,8 @@ class GitNode(Node):
return self.fs_size
def get_history(self, limit=None):
+ if not self.rev: # if empty repository
+ return
# TODO: find a way to follow renames/copies
for is_last, rev in _last_iterable(self.repos.git.history(self.rev,
self.__git_path(), limit)):
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/PyGIT.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/PyGIT.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/PyGIT.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/PyGIT.py Sat Nov 15 01:14:46 2014
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012 Edgewall Software
+# Copyright (C) 2012-2013 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
@@ -11,26 +11,38 @@
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
+from __future__ import with_statement
+
import os
-import shutil
import tempfile
import unittest
+from datetime import datetime
from subprocess import Popen, PIPE
+import trac.tests.compat
from trac.test import locate, EnvironmentStub
+from trac.tests.compat import rmtree
from trac.util import create_file
from trac.util.compat import close_fds
-from tracopt.versioncontrol.git.PyGIT import GitCore, Storage, parse_commit
+from trac.versioncontrol.api import Changeset, DbRepositoryProvider, \
+ RepositoryManager
+from tracopt.versioncontrol.git.git_fs import GitConnector
+from tracopt.versioncontrol.git.PyGIT import GitCore, GitError, Storage, \
+ StorageFactory, parse_commit
+from tracopt.versioncontrol.git.tests.git_fs import GitCommandMixin
+
+
+git_bin = None
class GitTestCase(unittest.TestCase):
def test_is_sha(self):
- self.assertTrue(not GitCore.is_sha('123'))
+ self.assertFalse(GitCore.is_sha('123'))
self.assertTrue(GitCore.is_sha('1a3f'))
self.assertTrue(GitCore.is_sha('f' * 40))
- self.assertTrue(not GitCore.is_sha('x' + 'f' * 39))
- self.assertTrue(not GitCore.is_sha('f' * 41))
+ self.assertFalse(GitCore.is_sha('x' + 'f' * 39))
+ self.assertFalse(GitCore.is_sha('f' * 41))
def test_git_version(self):
v = Storage.git_version()
@@ -91,17 +103,17 @@ prettier. I'll tell Ted to use nicer ta
msg, props = parse_commit(self.commit2240a7b)
self.assertTrue(msg)
self.assertTrue(props)
- self.assertEquals(
+ self.assertEqual(
['30aaca4582eac20a52ac7b2ec35bdb908133e5b1',
'5a0dc7365c240795bf190766eba7a27600be3b3e'],
props['parent'])
- self.assertEquals(
+ self.assertEqual(
['Linus Torvalds <to...@linux-foundation.org> 1323915958 -0800'],
props['author'])
- self.assertEquals(props['author'], props['committer'])
+ self.assertEqual(props['author'], props['committer'])
# Merge tag
- self.assertEquals(['''\
+ self.assertEqual(['''\
object 5a0dc7365c240795bf190766eba7a27600be3b3e
type commit
tag tytso-for-linus-20111214A
@@ -127,7 +139,7 @@ dQpo6WWG9HIJ23hOGAGR
-----END PGP SIGNATURE-----'''], props['mergetag'])
# Message
- self.assertEquals("""Merge tag 'tytso-for-linus-20111214' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso/ext4
+ self.assertEqual("""Merge tag 'tytso-for-linus-20111214' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso/ext4
* tag 'tytso-for-linus-20111214' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso/ext4:
ext4: handle EOF correctly in ext4_bio_write_page()
@@ -144,75 +156,260 @@ signature automatically. Yay. The bran
prettier. I'll tell Ted to use nicer tag names for future cases.""", msg)
-class UnicodeNameTestCase(unittest.TestCase):
+class NormalTestCase(unittest.TestCase, GitCommandMixin):
def setUp(self):
self.env = EnvironmentStub()
- self.repos_path = tempfile.mkdtemp(prefix='trac-gitrepos')
- self.git_bin = locate('git')
+ self.repos_path = tempfile.mkdtemp(prefix='trac-gitrepos-')
# create git repository and master branch
- self._git('init', self.repos_path)
+ self._git('init')
+ self._git('config', 'core.quotepath', 'true') # ticket:11198
+ self._git('config', 'user.name', "Joe")
+ self._git('config', 'user.email', "joe@example.com")
create_file(os.path.join(self.repos_path, '.gitignore'))
self._git('add', '.gitignore')
- self._git('commit', '-a', '-m', 'test')
+ self._git_commit('-a', '-m', 'test',
+ date=datetime(2013, 1, 1, 9, 4, 56))
def tearDown(self):
+ RepositoryManager(self.env).reload_repositories()
+ StorageFactory._clean()
+ self.env.reset_db()
if os.path.isdir(self.repos_path):
- shutil.rmtree(self.repos_path)
+ rmtree(self.repos_path)
+
+ def _factory(self, weak, path=None):
+ if path is None:
+ path = os.path.join(self.repos_path, '.git')
+ return StorageFactory(path, self.env.log, weak)
+
+ def _storage(self, path=None):
+ if path is None:
+ path = os.path.join(self.repos_path, '.git')
+ return Storage(path, self.env.log, git_bin, 'utf-8')
+
+ def test_control_files_detection(self):
+ # Exception not raised when path points to ctrl file dir
+ self.assertIsInstance(self._storage().repo, GitCore)
+ # Exception not raised when path points to parent of ctrl files dir
+ self.assertIsInstance(self._storage(self.repos_path).repo, GitCore)
+ # Exception raised when path points to dir with no ctrl files
+ path = tempfile.mkdtemp(dir=self.repos_path)
+ self.assertRaises(GitError, self._storage, path)
+ # Exception raised if a ctrl file is missing
+ os.remove(os.path.join(self.repos_path, '.git', 'HEAD'))
+ self.assertRaises(GitError, self._storage, self.repos_path)
+
+ def test_get_branches_with_cr_in_commitlog(self):
+ # regression test for #11598
+ message = 'message with carriage return'.replace(' ', '\r')
+
+ create_file(os.path.join(self.repos_path, 'ticket11598.txt'))
+ self._git('add', 'ticket11598.txt')
+ self._git_commit('-m', message,
+ date=datetime(2013, 5, 9, 11, 5, 21))
+
+ storage = self._storage()
+ branches = sorted(storage.get_branches())
+ self.assertEqual('master', branches[0][0])
+ self.assertEqual(1, len(branches))
+
+ if os.name == 'nt':
+ del test_get_branches_with_cr_in_commitlog
+
+ def test_rev_is_anchestor_of(self):
+ # regression test for #11215
+ path = os.path.join(self.repos_path, '.git')
+ DbRepositoryProvider(self.env).add_repository('gitrepos', path, 'git')
+ repos = self.env.get_repository('gitrepos')
+ parent_rev = repos.youngest_rev
+
+ create_file(os.path.join(self.repos_path, 'ticket11215.txt'))
+ self._git('add', 'ticket11215.txt')
+ self._git_commit('-m', 'ticket11215',
+ date=datetime(2013, 6, 27, 18, 26, 2))
+ repos.sync()
+ rev = repos.youngest_rev
+
+ self.assertNotEqual(rev, parent_rev)
+ self.assertFalse(repos.rev_older_than(None, None))
+ self.assertFalse(repos.rev_older_than(None, rev[:7]))
+ self.assertFalse(repos.rev_older_than(rev[:7], None))
+ self.assertTrue(repos.rev_older_than(parent_rev, rev))
+ self.assertTrue(repos.rev_older_than(parent_rev[:7], rev[:7]))
+ self.assertFalse(repos.rev_older_than(rev, parent_rev))
+ self.assertFalse(repos.rev_older_than(rev[:7], parent_rev[:7]))
+
+ def test_node_get_history_with_empty_commit(self):
+ # regression test for #11328
+ path = os.path.join(self.repos_path, '.git')
+ DbRepositoryProvider(self.env).add_repository('gitrepos', path, 'git')
+ repos = self.env.get_repository('gitrepos')
+ parent_rev = repos.youngest_rev
+
+ self._git_commit('-m', 'ticket:11328', '--allow-empty',
+ date=datetime(2013, 10, 15, 9, 46, 27))
+ repos.sync()
+ rev = repos.youngest_rev
+
+ node = repos.get_node('', rev)
+ self.assertEqual(rev, repos.git.last_change(rev, ''))
+ history = list(node.get_history())
+ self.assertEqual(u'', history[0][0])
+ self.assertEqual(rev, history[0][1])
+ self.assertEqual(Changeset.EDIT, history[0][2])
+ self.assertEqual(u'', history[1][0])
+ self.assertEqual(parent_rev, history[1][1])
+ self.assertEqual(Changeset.ADD, history[1][2])
+ self.assertEqual(2, len(history))
+
+ def test_sync_after_removing_branch(self):
+ self._git('checkout', '-b', 'b1', 'master')
+ self._git('checkout', 'master')
+ create_file(os.path.join(self.repos_path, 'newfile.txt'))
+ self._git('add', 'newfile.txt')
+ self._git_commit('-m', 'added newfile.txt to master',
+ date=datetime(2013, 12, 23, 6, 52, 23))
+
+ storage = self._storage()
+ storage.sync()
+ self.assertEqual(['b1', 'master'],
+ sorted(b[0] for b in storage.get_branches()))
+ self._git('branch', '-D', 'b1')
+ self.assertEqual(True, storage.sync())
+ self.assertEqual(['master'],
+ sorted(b[0] for b in storage.get_branches()))
+ self.assertEqual(False, storage.sync())
+
+ def test_turn_off_persistent_cache(self):
+ # persistent_cache is enabled
+ parent_rev = self._factory(False).getInstance().youngest_rev()
+
+ create_file(os.path.join(self.repos_path, 'newfile.txt'))
+ self._git('add', 'newfile.txt')
+ self._git_commit('-m', 'test_turn_off_persistent_cache',
+ date=datetime(2014, 1, 29, 13, 13, 25))
+
+ # persistent_cache is disabled
+ rev = self._factory(True).getInstance().youngest_rev()
+ self.assertNotEqual(rev, parent_rev)
- def _git(self, *args):
- args = [self.git_bin] + list(args)
- proc = Popen(args, stdout=PIPE, stderr=PIPE, close_fds=close_fds,
- cwd=self.repos_path)
- proc.wait()
- assert proc.returncode == 0
- return proc
+
+class UnicodeNameTestCase(unittest.TestCase, GitCommandMixin):
+
+ def setUp(self):
+ self.env = EnvironmentStub()
+ self.repos_path = tempfile.mkdtemp(prefix='trac-gitrepos-')
+ # create git repository and master branch
+ self._git('init')
+ self._git('config', 'core.quotepath', 'true') # ticket:11198
+ self._git('config', 'user.name', "Joé") # passing utf-8 bytes
+ self._git('config', 'user.email', "joe@example.com")
+ create_file(os.path.join(self.repos_path, '.gitignore'))
+ self._git('add', '.gitignore')
+ self._git_commit('-a', '-m', 'test',
+ date=datetime(2013, 1, 1, 9, 4, 57))
+
+ def tearDown(self):
+ self.env.reset_db()
+ if os.path.isdir(self.repos_path):
+ rmtree(self.repos_path)
def _storage(self):
path = os.path.join(self.repos_path, '.git')
- return Storage(path, self.env.log, self.git_bin, 'utf-8')
+ return Storage(path, self.env.log, git_bin, 'utf-8')
def test_unicode_verifyrev(self):
storage = self._storage()
self.assertNotEqual(None, storage.verifyrev(u'master'))
- self.assertEquals(None, storage.verifyrev(u'tété'))
+ self.assertIsNone(storage.verifyrev(u'tété'))
def test_unicode_filename(self):
create_file(os.path.join(self.repos_path, 'tickét.txt'))
self._git('add', 'tickét.txt')
- self._git('commit', '-m', 'unicode-filename')
+ self._git_commit('-m', 'unicode-filename', date='1359912600 +0100')
storage = self._storage()
filenames = sorted(fname for mode, type, sha, size, fname
in storage.ls_tree('HEAD'))
- self.assertEquals(unicode, type(filenames[0]))
- self.assertEquals(unicode, type(filenames[1]))
- self.assertEquals(u'.gitignore', filenames[0])
- self.assertEquals(u'tickét.txt', filenames[1])
+ self.assertEqual(unicode, type(filenames[0]))
+ self.assertEqual(unicode, type(filenames[1]))
+ self.assertEqual(u'.gitignore', filenames[0])
+ self.assertEqual(u'tickét.txt', filenames[1])
+ # check commit author, for good measure
+ self.assertEqual(u'Joé <jo...@example.com> 1359912600 +0100',
+ storage.read_commit(storage.head())[1]['author'][0])
def test_unicode_branches(self):
self._git('checkout', '-b', 'tickét10980', 'master')
storage = self._storage()
branches = sorted(storage.get_branches())
- self.assertEquals(unicode, type(branches[0][0]))
- self.assertEquals(unicode, type(branches[1][0]))
- self.assertEquals(u'master', branches[0][0])
- self.assertEquals(u'tickét10980', branches[1][0])
+ self.assertEqual(unicode, type(branches[0][0]))
+ self.assertEqual(unicode, type(branches[1][0]))
+ self.assertEqual(u'master', branches[0][0])
+ self.assertEqual(u'tickét10980', branches[1][0])
contains = sorted(storage.get_branch_contains(branches[1][1],
resolve=True))
- self.assertEquals(unicode, type(contains[0][0]))
- self.assertEquals(unicode, type(contains[1][0]))
- self.assertEquals(u'master', contains[0][0])
- self.assertEquals(u'tickét10980', contains[1][0])
+ self.assertEqual(unicode, type(contains[0][0]))
+ self.assertEqual(unicode, type(contains[1][0]))
+ self.assertEqual(u'master', contains[0][0])
+ self.assertEqual(u'tickét10980', contains[1][0])
def test_unicode_tags(self):
self._git('tag', 'täg-t10980', 'master')
storage = self._storage()
tags = tuple(storage.get_tags())
- self.assertEquals(unicode, type(tags[0]))
- self.assertEquals(u'täg-t10980', tags[0])
+ self.assertEqual(unicode, type(tags[0]))
+ self.assertEqual(u'täg-t10980', tags[0])
self.assertNotEqual(None, storage.verifyrev(u'täg-t10980'))
+ def test_ls_tree(self):
+ paths = [u'normal-path.txt',
+ u'tickét.tx\\t',
+ u'\a\b\t\n\v\f\r\x1b"\\.tx\\t']
+ for path in paths:
+ path_utf8 = path.encode('utf-8')
+ create_file(os.path.join(self.repos_path, path_utf8))
+ self._git('add', path_utf8)
+ self._git_commit('-m', 'ticket:11180 and ticket:11198',
+ date=datetime(2013, 4, 30, 13, 48, 57))
+
+ storage = self._storage()
+ rev = storage.head()
+ entries = storage.ls_tree(rev, '/')
+ self.assertEqual(4, len(entries))
+ self.assertEqual(u'\a\b\t\n\v\f\r\x1b"\\.tx\\t', entries[0][4])
+ self.assertEqual(u'.gitignore', entries[1][4])
+ self.assertEqual(u'normal-path.txt', entries[2][4])
+ self.assertEqual(u'tickét.tx\\t', entries[3][4])
+
+ def test_get_historian(self):
+ paths = [u'normal-path.txt',
+ u'tickét.tx\\t',
+ u'\a\b\t\n\v\f\r\x1b"\\.tx\\t']
+
+ for path in paths:
+ path_utf8 = path.encode('utf-8')
+ create_file(os.path.join(self.repos_path, path_utf8))
+ self._git('add', path_utf8)
+ self._git_commit('-m', 'ticket:11180 and ticket:11198',
+ date=datetime(2013, 4, 30, 17, 48, 57))
+
+ def validate(path, quotepath):
+ self._git('config', 'core.quotepath', quotepath)
+ storage = self._storage()
+ rev = storage.head()
+ with storage.get_historian('HEAD', path) as historian:
+ hrev = storage.last_change('HEAD', path, historian)
+ self.assertEquals(rev, hrev)
+
+ validate(paths[0], 'true')
+ validate(paths[0], 'false')
+ validate(paths[1], 'true')
+ validate(paths[1], 'false')
+ validate(paths[2], 'true')
+ validate(paths[2], 'false')
+
#class GitPerformanceTestCase(unittest.TestCase):
# """Performance test. Not really a unit test.
@@ -232,7 +429,7 @@ class UnicodeNameTestCase(unittest.TestC
# i = str(i)
# s = g.shortrev(i, min_len=4)
# self.assertTrue(i.startswith(s))
-# self.assertEquals(g.fullrev(s), i)
+# self.assertEqual(g.fullrev(s), i)
#
# iters = 1
# t = timeit.Timer("shortrev_test()",
@@ -260,7 +457,7 @@ class UnicodeNameTestCase(unittest.TestC
# t = open(__proc_statm)
# result = t.read().split()
# t.close()
-# assert len(result) == 7
+# self.assertEqual(7, len(result))
# return tuple([ __pagesize*int(p) for p in result ])
# except:
# raise RuntimeError("failed to get memory stats")
@@ -363,14 +560,16 @@ class UnicodeNameTestCase(unittest.TestC
def suite():
+ global git_bin
suite = unittest.TestSuite()
- git = locate("git")
- if git:
- suite.addTest(unittest.makeSuite(GitTestCase, 'test'))
- suite.addTest(unittest.makeSuite(TestParseCommit, 'test'))
+ git_bin = locate('git')
+ if git_bin:
+ suite.addTest(unittest.makeSuite(GitTestCase))
+ suite.addTest(unittest.makeSuite(TestParseCommit))
+ suite.addTest(unittest.makeSuite(NormalTestCase))
if os.name != 'nt':
# Popen doesn't accept unicode path and arguments on Windows
- suite.addTest(unittest.makeSuite(UnicodeNameTestCase, 'test'))
+ suite.addTest(unittest.makeSuite(UnicodeNameTestCase))
else:
print("SKIP: tracopt/versioncontrol/git/tests/PyGIT.py (git cli "
"binary, 'git', not found)")
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/__init__.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/__init__.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/__init__.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/git/tests/__init__.py Sat Nov 15 01:14:46 2014
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012 Edgewall Software
+# Copyright (C) 2012-2013 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
@@ -13,12 +13,13 @@
import unittest
-from tracopt.versioncontrol.git.tests import PyGIT
+from tracopt.versioncontrol.git.tests import PyGIT, git_fs
def suite():
suite = unittest.TestSuite()
suite.addTest(PyGIT.suite())
+ suite.addTest(git_fs.suite())
return suite
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_fs.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_fs.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_fs.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_fs.py Sat Nov 15 01:14:46 2014
@@ -47,11 +47,15 @@ Warning:
those properties...
"""
+from __future__ import with_statement
+
import os.path
+import re
import weakref
import posixpath
+from urllib import quote
-from trac.config import ListOption
+from trac.config import ListOption, ChoiceOption
from trac.core import *
from trac.env import ISystemInfoProvider
from trac.versioncontrol import Changeset, Node, Repository, \
@@ -59,19 +63,25 @@ from trac.versioncontrol import Changese
NoSuchChangeset, NoSuchNode
from trac.versioncontrol.cache import CachedRepository
from trac.util import embedded_numbers
+from trac.util.concurrency import threading
from trac.util.text import exception_to_unicode, to_unicode
from trac.util.translation import _
-from trac.util.datefmt import from_utimestamp
+from trac.util.datefmt import from_utimestamp, to_datetime, utc
application_pool = None
+application_pool_lock = threading.Lock()
def _import_svn():
- global fs, repos, core, delta, _kindmap
+ global fs, repos, core, delta, _kindmap, _svn_uri_canonicalize
from svn import fs, repos, core, delta
_kindmap = {core.svn_node_dir: Node.DIRECTORY,
core.svn_node_file: Node.FILE}
+ try:
+ _svn_uri_canonicalize = core.svn_uri_canonicalize # Subversion 1.7+
+ except AttributeError:
+ _svn_uri_canonicalize = lambda v: v
# Protect svn.core methods from GC
Pool.apr_pool_clear = staticmethod(core.apr_pool_clear)
Pool.apr_pool_destroy = staticmethod(core.apr_pool_destroy)
@@ -150,19 +160,21 @@ class Pool(object):
"""Create a new memory pool"""
global application_pool
- self._parent_pool = parent_pool or application_pool
- # Create pool
- if self._parent_pool:
- self._pool = core.svn_pool_create(self._parent_pool())
- else:
- # If we are an application-level pool,
- # then initialize APR and set this pool
- # to be the application-level pool
- core.apr_initialize()
- application_pool = self
+ with application_pool_lock:
+ self._parent_pool = parent_pool or application_pool
+
+ # Create pool
+ if self._parent_pool:
+ self._pool = core.svn_pool_create(self._parent_pool())
+ else:
+ # If we are an application-level pool,
+ # then initialize APR and set this pool
+ # to be the application-level pool
+ core.apr_initialize()
+ self._pool = core.svn_pool_create(None)
+ application_pool = self
- self._pool = core.svn_pool_create(None)
self._mark_valid()
def __call__(self):
@@ -265,6 +277,17 @@ class SubversionConnector(Component):
Example: `/tags/*, /projectAlpha/tags/A-1.0, /projectAlpha/tags/A-v1.1`
""")
+ eol_style = ChoiceOption(
+ 'svn', 'eol_style', ['native', 'LF', 'CRLF', 'CR'], doc=
+ """End-of-Line character sequences when `svn:eol-style` property is
+ `native`.
+
+ If `native` (the default), substitute with the native EOL marker on
+ the server. Otherwise, if `LF`, `CRLF` or `CR`, substitute with the
+ specified EOL marker.
+
+ (''since 1.0.2'')""")
+
error = None
def __init__(self):
@@ -307,6 +330,7 @@ class SubversionConnector(Component):
'direct-svnfs'.
"""
params.update(tags=self.tags, branches=self.branches)
+ params.setdefault('eol_style', self.eol_style)
repos = SubversionRepository(dir, params, self.log)
if type != 'direct-svnfs':
repos = SvnCachedRepository(self.env, repos, self.log)
@@ -328,7 +352,8 @@ class SubversionRepository(Repository):
else: # note that this should usually not happen (unicode arg expected)
path_utf8 = to_unicode(path).encode('utf-8')
- path_utf8 = os.path.normpath(path_utf8).replace('\\', '/')
+ path_utf8 = core.svn_path_canonicalize(
+ os.path.normpath(path_utf8).replace('\\', '/'))
self.path = path_utf8.decode('utf-8')
root_path_utf8 = repos.svn_repos_find_root_path(path_utf8, self.pool())
@@ -361,7 +386,8 @@ class SubversionRepository(Repository):
assert self.scope[0] == '/'
# we keep root_path_utf8 for RA
ra_prefix = 'file:///' if os.name == 'nt' else 'file://'
- self.ra_url_utf8 = ra_prefix + root_path_utf8
+ self.ra_url_utf8 = _svn_uri_canonicalize(ra_prefix +
+ quote(root_path_utf8))
self.clear()
def clear(self, youngest_rev=None):
@@ -475,7 +501,7 @@ class SubversionRepository(Repository):
specifications. No revision given means use the latest.
"""
path = path or ''
- if path and path[-1] == '/':
+ if path and path != '/' and path[-1] == '/':
path = path[:-1]
rev = self.normalize_rev(rev) or self.youngest_rev
return SubversionNode(path, rev, self, self.pool)
@@ -493,6 +519,18 @@ class SubversionRepository(Repository):
revs.append(r)
return revs
+ def _get_changed_revs(self, node_infos):
+ path_revs = {}
+ for node, first in node_infos:
+ path = node.path
+ revs = []
+ for p, r, chg in node.get_history():
+ if p != path or r < first:
+ break
+ revs.append(r)
+ path_revs[path] = revs
+ return path_revs
+
def _history(self, path, start, end, pool):
"""`path` is a unicode path in the scope.
@@ -640,14 +678,6 @@ class SubversionRepository(Repository):
(wraps ``repos.svn_repos_dir_delta``)
"""
- def key(value):
- return value[1].path if value[1] is not None else value[0].path
- return iter(sorted(self._get_changes(old_path, old_rev, new_path,
- new_rev, ignore_ancestry),
- key=key))
-
- def _get_changes(self, old_path, old_rev, new_path, new_rev,
- ignore_ancestry):
old_node = new_node = None
old_rev = self.normalize_rev(old_rev)
new_rev = self.normalize_rev(new_rev)
@@ -688,8 +718,12 @@ class SubversionRepository(Repository):
entry_props,
ignore_ancestry,
subpool())
- for path, kind, change in editor.deltas:
- path = _from_svn(path)
+ # sort deltas by path before creating `SubversionNode`s to reduce
+ # memory usage (#10978)
+ deltas = sorted(((_from_svn(path), kind, change)
+ for path, kind, change in editor.deltas),
+ key=lambda entry: entry[0])
+ for path, kind, change in deltas:
old_node = new_node = None
if change != Changeset.ADD:
old_node = self.get_node(posixpath.join(old_path, path),
@@ -753,13 +787,15 @@ class SubversionNode(Node):
"""Retrieve raw content as a "read()"able object."""
if self.isdir:
return None
- pool = Pool(self.pool)
- s = core.Stream(fs.file_contents(self.root, self._scoped_path_utf8,
- pool()))
- # The stream object needs to reference the pool to make sure the pool
- # is not destroyed before the former.
- s._pool = pool
- return s
+ return FileContentStream(self)
+
+ def get_processed_content(self, keyword_substitution=True, eol_hint=None):
+ """Retrieve processed content as a "read()"able object."""
+ if self.isdir:
+ return None
+ eol_style = self.repos.params.get('eol_style') if eol_hint is None \
+ else eol_hint
+ return FileContentStream(self, keyword_substitution, eol_style)
def get_entries(self):
"""Yield `SubversionNode` corresponding to entries in this directory.
@@ -811,7 +847,10 @@ class SubversionNode(Node):
rev = _svn_rev(self.rev)
start = _svn_rev(0)
file_url_utf8 = posixpath.join(self.repos.ra_url_utf8,
- self._scoped_path_utf8)
+ quote(self._scoped_path_utf8))
+ # svn_client_blame2() requires a canonical uri since
+ # Subversion 1.7 (#11167)
+ file_url_utf8 = _svn_uri_canonicalize(file_url_utf8)
self.repos.log.info('opening ra_local session to %r',
file_url_utf8)
from svn import client
@@ -1006,7 +1045,7 @@ class SubversionChangeset(Changeset):
action = Changeset.EDIT
# identify the most interesting base_path/base_rev
# in terms of last changed information (see r2562)
- if revroots.has_key(base_rev):
+ if base_rev in revroots:
b_root = revroots[base_rev]
else:
b_root = fs.revision_root(self.fs_ptr, base_rev, pool())
@@ -1094,3 +1133,186 @@ def DiffChangeEditor():
return DiffChangeEditor()
+
+class FileContentStream(object):
+
+ KEYWORD_GROUPS = {
+ 'rev': ['LastChangedRevision', 'Rev', 'Revision'],
+ 'date': ['LastChangedDate', 'Date'],
+ 'author': ['LastChangedBy', 'Author'],
+ 'url': ['HeadURL', 'URL'],
+ 'id': ['Id'],
+ 'header': ['Header'],
+ }
+ KEYWORDS = reduce(set.union, map(set, KEYWORD_GROUPS.values()))
+ NATIVE_EOL = '\r\n' if os.name == 'nt' else '\n'
+ NEWLINES = {'LF': '\n', 'CRLF': '\r\n', 'CR': '\r', 'native': NATIVE_EOL}
+ KEYWORD_MAX_SIZE = 256
+ CHUNK_SIZE = 4096
+
+ keywords_re = None
+ native_eol = None
+ newline = '\n'
+
+ def __init__(self, node, keyword_substitution=None, eol=None):
+ self.translated = ''
+ self.buffer = ''
+ self.repos = node.repos
+ self.node = node
+ self.fs_ptr = node.fs_ptr
+ self.pool = Pool()
+ # Note: we _must_ use a detached pool here, as the lifetime of
+ # this object can exceed those of the node or even the repository
+ if keyword_substitution:
+ keywords = (node._get_prop(core.SVN_PROP_KEYWORDS) or '').split()
+ self.keywords = self._get_keyword_values(set(keywords) &
+ set(self.KEYWORDS))
+ self.keywords_re = self._build_keywords_re(self.keywords)
+ if self.NEWLINES.get(eol, '\n') != '\n' and \
+ node._get_prop(core.SVN_PROP_EOL_STYLE) == 'native':
+ self.native_eol = True
+ self.newline = self.NEWLINES[eol]
+ self.stream = core.Stream(fs.file_contents(node.root,
+ node._scoped_path_utf8,
+ self.pool()))
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ self.stream = None
+ self.fs_ptr = None
+ if self.pool:
+ self.pool.destroy()
+ self.pool = None
+
+ def read(self, n=None):
+ if self.stream is None:
+ raise ValueError('I/O operation on closed file')
+ if self.keywords_re is None and not self.native_eol:
+ return self._read_dumb(self.stream, n)
+ else:
+ return self._read_substitute(self.stream, n)
+
+ def _get_revprop(self, name):
+ return fs.revision_prop(self.fs_ptr, self.node.rev, name, self.pool())
+
+ def _get_keyword_values(self, keywords):
+ if not keywords:
+ return None
+
+ node = self.node
+ mtime = to_datetime(node.last_modified, utc)
+ shortdate = mtime.strftime('%Y-%m-%d %H:%M:%SZ')
+ created_rev = unicode(node.created_rev)
+ # Note that the `to_unicode` has a small probability to mess-up binary
+ # properties, see #4321.
+ author = to_unicode(self._get_revprop(core.SVN_PROP_REVISION_AUTHOR))
+ url = node.repos.get_path_url(node.path, node.rev) or node.path
+ data = {
+ 'rev': created_rev, 'author': author, 'url': url,
+ 'date': mtime.strftime('%Y-%m-%d %H:%M:%S +0000 (%a, %d %b %Y)'),
+ 'id': ' '.join((posixpath.basename(node.path), created_rev,
+ shortdate, author)),
+ 'header': ' '.join((url, created_rev, shortdate, author)),
+ }
+ values = {}
+ for name, aliases in self.KEYWORD_GROUPS.iteritems():
+ if any(kw for kw in aliases if kw in keywords):
+ for kw in aliases:
+ values[kw] = data[name]
+ if values:
+ return dict((key, value.encode('utf-8'))
+ for key, value in values.iteritems())
+ else:
+ return None
+
+ def _build_keywords_re(self, keywords):
+ if keywords:
+ return re.compile("""
+ [$]
+ (?P<keyword>%s)
+ (?P<rest>
+ (?: :[ ][^$\r\n]+?[ ]
+ | ::[ ][^$\r\n]+?[ #]
+ )
+ )?
+ [$]""" % '|'.join(keywords),
+ re.VERBOSE)
+ else:
+ return None
+
+ def _read_dumb(self, stream, n):
+ return stream.read(n)
+
+ def _read_substitute(self, stream, n):
+ if n is None:
+ n = -1
+
+ buffer = self.buffer
+ translated = self.translated
+ while True:
+ if 0 <= n <= len(translated):
+ self.buffer = buffer
+ self.translated = translated[n:]
+ return translated[:n]
+
+ if len(buffer) < self.KEYWORD_MAX_SIZE:
+ buffer += stream.read(self.CHUNK_SIZE) or ''
+ if not buffer:
+ self.buffer = buffer
+ self.translated = ''
+ return translated
+
+ # search first "$" character
+ pos = buffer.find('$') if self.keywords_re else -1
+ if pos == -1:
+ translated += self._translate_newline(buffer)
+ buffer = ''
+ continue
+ if pos > 0:
+ # move to the first "$" character
+ translated += self._translate_newline(buffer[:pos])
+ buffer = buffer[pos:]
+
+ match = None
+ while True:
+ # search second "$" character
+ pos = buffer.find('$', 1)
+ if pos == -1:
+ translated += self._translate_newline(buffer)
+ buffer = ''
+ break
+ if pos < self.KEYWORD_MAX_SIZE:
+ match = self.keywords_re.match(buffer)
+ if match:
+ break # found "$Keyword$" in the first 255 bytes
+ # move to the second "$" character
+ translated += self._translate_newline(buffer[:pos])
+ buffer = buffer[pos:]
+ if pos == -1 or not match:
+ continue
+
+ # move to the next character of the second "$" character
+ pos += 1
+ translated += self._translate_keyword(buffer[:pos], match)
+ buffer = buffer[pos:]
+ continue
+
+ def _translate_newline(self, data):
+ if self.native_eol:
+ data = data.replace('\n', self.newline)
+ return data
+
+ def _translate_keyword(self, buffer, match):
+ keyword = match.group('keyword')
+ value = self.keywords.get(keyword)
+ if value is None:
+ return buffer
+ rest = match.group('rest')
+ if rest is None or not rest.startswith('::'):
+ return '$%s: %s $' % (keyword, value)
+ elif len(rest) - 4 >= len(value):
+ return '$%s:: %-*s $' % (keyword, len(rest) - 4, value)
+ else:
+ return '$%s:: %s#$' % (keyword, value[:len(rest) - 4])
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_prop.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_prop.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_prop.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/svn_prop.py Sat Nov 15 01:14:46 2014
@@ -154,7 +154,7 @@ class SubversionPropertyRenderer(Compone
def _render_needslock(self, context):
return tag.img(src=context.href.chrome('common/lock-locked.png'),
- alt="needs lock", title="needs lock")
+ alt=_("needs lock"), title=_("needs lock"))
def _render_mergeinfo(self, name, mode, context, props):
rows = []
@@ -197,6 +197,7 @@ class SubversionMergePropertyRenderer(Co
if path not in branch_starts:
branch_starts[path] = rev + 1
rows = []
+ eligible_infos = []
if name.startswith('svnmerge-'):
sources = props[name].split()
else:
@@ -232,9 +233,9 @@ class SubversionMergePropertyRenderer(Co
if blocked:
eligible -= set(Ranges(blocked))
if eligible:
- nrevs = repos._get_node_revs(spath, max(eligible),
- min(eligible))
- eligible &= set(nrevs)
+ node = repos.get_node(spath, max(eligible))
+ eligible_infos.append((spath, node, eligible, row))
+ continue
eligible = to_ranges(eligible)
row.append(_get_revs_link(_('eligible'), context,
spath, eligible))
@@ -246,6 +247,22 @@ class SubversionMergePropertyRenderer(Co
rows.append((deleted, spath,
[tag.td('/' + spath),
tag.td(revs, colspan=revs_cols)]))
+
+ # fetch eligible revisions for each path at a time
+ changed_revs = {}
+ changed_nodes = [(node, min(eligible))
+ for spath, node, eligible, row in eligible_infos]
+ if changed_nodes:
+ changed_revs = repos._get_changed_revs(changed_nodes)
+ for spath, node, eligible, row in eligible_infos:
+ if spath in changed_revs:
+ eligible &= set(changed_revs[spath])
+ else:
+ eligible.clear()
+ row.append(_get_revs_link(_("eligible"), context, spath,
+ to_ranges(eligible)))
+ rows.append((False, spath, [tag.td(each) for each in row]))
+
if not rows:
return None
rows.sort()
@@ -346,33 +363,52 @@ class SubversionMergePropertyDiffRendere
removed_label = [_("reverse-merged: "), _("un-blocked: ")][blocked]
added_ni_label = _("marked as non-inheritable: ")
removed_ni_label = _("unmarked as non-inheritable: ")
- def revs_link(revs, context):
- if revs:
- revs = to_ranges(revs)
- return _get_revs_link(revs.replace(',', u',\u200b'),
- context, spath, revs)
- modified_sources = []
+
+ sources = []
+ changed_revs = {}
+ changed_nodes = []
for spath, (new_revs, new_revs_ni) in new_sources.iteritems():
- if spath in old_sources:
- (old_revs, old_revs_ni), status = old_sources.pop(spath), None
- else:
+ new_spath = spath not in old_sources
+ if new_spath:
old_revs = old_revs_ni = set()
- status = _(' (added)')
+ else:
+ old_revs, old_revs_ni = old_sources.pop(spath)
added = new_revs - old_revs
removed = old_revs - new_revs
+ # unless new revisions differ from old revisions
+ if not added and not removed:
+ continue
added_ni = new_revs_ni - old_revs_ni
removed_ni = old_revs_ni - new_revs_ni
+ revs = sorted(added | removed | added_ni | removed_ni)
try:
- all_revs = set(repos._get_node_revs(spath))
- # TODO: also pass first_rev here, for getting smaller a set
- # (this is an optmization fix, result is already correct)
- added &= all_revs
- removed &= all_revs
- added_ni &= all_revs
- removed_ni &= all_revs
+ node = repos.get_node(spath, revs[-1])
+ changed_nodes.append((node, revs[0]))
except NoSuchNode:
pass
+ sources.append((spath, new_spath, added, removed, added_ni,
+ removed_ni))
+ if changed_nodes:
+ changed_revs = repos._get_changed_revs(changed_nodes)
+
+ def revs_link(revs, context):
+ if revs:
+ revs = to_ranges(revs)
+ return _get_revs_link(revs.replace(',', u',\u200b'),
+ context, spath, revs)
+ modified_sources = []
+ for spath, new_spath, added, removed, added_ni, removed_ni in sources:
+ if spath in changed_revs:
+ revs = set(changed_revs[spath])
+ added &= revs
+ removed &= revs
if added or removed:
+ added_ni &= revs
+ removed_ni &= revs
+ if new_spath:
+ status = _(" (added)")
+ else:
+ status = None
modified_sources.append((
spath, [_get_source_link(spath, new_context), status],
added and tag(added_label, revs_link(added, new_context)),
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/__init__.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/__init__.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/__init__.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/__init__.py Sat Nov 15 01:14:46 2014
@@ -1,3 +1,16 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2013 Edgewall Software
+# All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution. The terms
+# are also available at http://trac.edgewall.org/wiki/TracLicense.
+#
+# This software consists of voluntary contributions made by many
+# individuals. For the exact contribution history, see the revision
+# history and logs, available at http://trac.edgewall.org/log/.
+
import unittest
from tracopt.versioncontrol.svn.tests import svn_fs
Modified: bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/svn_fs.py
URL: http://svn.apache.org/viewvc/bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/svn_fs.py?rev=1639823&r1=1639822&r2=1639823&view=diff
==============================================================================
--- bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/svn_fs.py (original)
+++ bloodhound/branches/trac-1.0.2-integration/trac/tracopt/versioncontrol/svn/tests/svn_fs.py Sat Nov 15 01:14:46 2014
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C)2005-2009 Edgewall Software
+# Copyright (C) 2005-2013 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <cm...@gmx.de>
# All rights reserved.
#
@@ -17,8 +17,6 @@
from datetime import datetime
import new
import os.path
-import stat
-import shutil
import tempfile
import unittest
@@ -30,20 +28,35 @@ try:
except ImportError:
has_svn = False
-from trac.test import EnvironmentStub, TestSetup
+from genshi.core import Stream
+
+import trac.tests.compat
+from trac.test import EnvironmentStub, Mock, MockPerm, TestSetup
from trac.core import TracError
+from trac.mimeview.api import Context
from trac.resource import Resource, resource_exists
from trac.util.concurrency import get_thread_id
from trac.util.datefmt import utc
-from trac.versioncontrol import DbRepositoryProvider, Changeset, Node, \
- NoSuchChangeset
-from tracopt.versioncontrol.svn import svn_fs
+from trac.versioncontrol.api import DbRepositoryProvider, Changeset, Node, \
+ NoSuchChangeset, RepositoryManager
+from trac.versioncontrol import svn_fs, svn_prop
+from trac.web.href import Href
-REPOS_PATH = os.path.join(tempfile.gettempdir(), 'trac-svnrepos')
+REPOS_PATH = None
REPOS_NAME = 'repo'
+URL = 'svn://test'
+
+HEAD = 29
+TETE = 26
+
+NATIVE_EOL = '\r\n' if os.name == 'nt' else '\n'
+
-HEAD = 22
-TETE = 21
+def _create_context():
+ req = Mock(base_path='', chrome={}, args={}, session={},
+ abs_href=Href('/'), href=Href('/'), locale=None,
+ perm=MockPerm(), authname='anonymous', tz=utc)
+ return Context.from_request(req)
class SubversionRepositoryTestSetup(TestSetup):
@@ -57,8 +70,6 @@ class SubversionRepositoryTestSetup(Test
pool = core.svn_pool_create(None)
dumpstream = None
try:
- if os.path.exists(REPOS_PATH):
- print 'trouble ahead with db/rep-cache.db... see #8278'
r = repos.svn_repos_create(REPOS_PATH, '', '', None, None, pool)
if hasattr(repos, 'svn_repos_load_fs2'):
repos.svn_repos_load_fs2(r, dumpfile, StringIO(),
@@ -85,14 +96,14 @@ class NormalTests(object):
def test_resource_exists(self):
repos = Resource('repository', REPOS_NAME)
- self.assertEqual(True, resource_exists(self.env, repos))
- self.assertEqual(False, resource_exists(self.env, repos(id='xxx')))
+ self.assertTrue(resource_exists(self.env, repos))
+ self.assertFalse(resource_exists(self.env, repos(id='xxx')))
node = repos.child('source', u'tête')
- self.assertEqual(True, resource_exists(self.env, node))
- self.assertEqual(False, resource_exists(self.env, node(id='xxx')))
+ self.assertTrue(resource_exists(self.env, node))
+ self.assertFalse(resource_exists(self.env, node(id='xxx')))
cset = repos.child('changeset', HEAD)
- self.assertEqual(True, resource_exists(self.env, cset))
- self.assertEqual(False, resource_exists(self.env, cset(id=123456)))
+ self.assertTrue(resource_exists(self.env, cset))
+ self.assertFalse(resource_exists(self.env, cset(id=123456)))
def test_repos_normalize_path(self):
self.assertEqual('/', self.repos.normalize_path('/'))
@@ -115,42 +126,49 @@ class NormalTests(object):
def test_rev_navigation(self):
self.assertEqual(1, self.repos.oldest_rev)
- self.assertEqual(None, self.repos.previous_rev(0))
- self.assertEqual(None, self.repos.previous_rev(1))
+ self.assertIsNone(self.repos.previous_rev(0))
+ self.assertIsNone(self.repos.previous_rev(1))
self.assertEqual(HEAD, self.repos.youngest_rev)
self.assertEqual(6, self.repos.next_rev(5))
self.assertEqual(7, self.repos.next_rev(6))
# ...
- self.assertEqual(None, self.repos.next_rev(HEAD))
+ self.assertIsNone(self.repos.next_rev(HEAD))
self.assertRaises(NoSuchChangeset, self.repos.normalize_rev, HEAD + 1)
def test_rev_path_navigation(self):
self.assertEqual(1, self.repos.oldest_rev)
- self.assertEqual(None, self.repos.previous_rev(0, u'tête'))
- self.assertEqual(None, self.repos.previous_rev(1, u'tête'))
+ self.assertIsNone(self.repos.previous_rev(0, u'tête'))
+ self.assertIsNone(self.repos.previous_rev(1, u'tête'))
self.assertEqual(HEAD, self.repos.youngest_rev)
self.assertEqual(6, self.repos.next_rev(5, u'tête'))
self.assertEqual(13, self.repos.next_rev(6, u'tête'))
# ...
- self.assertEqual(None, self.repos.next_rev(HEAD, u'tête'))
+ self.assertIsNone(self.repos.next_rev(HEAD, u'tête'))
# test accentuated characters
- self.assertEqual(None,
- self.repos.previous_rev(17, u'tête/R\xe9sum\xe9.txt'))
+ self.assertIsNone(self.repos.previous_rev(17, u'tête/R\xe9sum\xe9.txt'))
self.assertEqual(17, self.repos.next_rev(16, u'tête/R\xe9sum\xe9.txt'))
def test_has_node(self):
- self.assertEqual(False, self.repos.has_node(u'/tête/dir1', 3))
- self.assertEqual(True, self.repos.has_node(u'/tête/dir1', 4))
- self.assertEqual(True, self.repos.has_node(u'/tête/dir1'))
+ self.assertFalse(self.repos.has_node(u'/tête/dir1', 3))
+ self.assertTrue(self.repos.has_node(u'/tête/dir1', 4))
+ self.assertTrue(self.repos.has_node(u'/tête/dir1'))
def test_get_node(self):
+ node = self.repos.get_node(u'/')
+ self.assertEqual(u'', node.name)
+ self.assertEqual(u'/', node.path)
+ self.assertEqual(Node.DIRECTORY, node.kind)
+ self.assertEqual(HEAD, node.rev)
+ self.assertEqual(HEAD, node.created_rev)
+ self.assertEqual(datetime(2014, 4, 14, 16, 49, 44, 990695, utc),
+ node.last_modified)
node = self.repos.get_node(u'/tête')
self.assertEqual(u'tête', node.name)
self.assertEqual(u'/tête', node.path)
self.assertEqual(Node.DIRECTORY, node.kind)
self.assertEqual(HEAD, node.rev)
self.assertEqual(TETE, node.created_rev)
- self.assertEqual(datetime(2007, 4, 30, 17, 45, 26, 234375, utc),
+ self.assertEqual(datetime(2013, 4, 28, 5, 36, 6, 29637, utc),
node.last_modified)
node = self.repos.get_node(u'/tête/README.txt')
self.assertEqual('README.txt', node.name)
@@ -195,9 +213,9 @@ class NormalTests(object):
def test_get_dir_content(self):
node = self.repos.get_node(u'/tête')
- self.assertEqual(None, node.content_length)
- self.assertEqual(None, node.content_type)
- self.assertEqual(None, node.get_content())
+ self.assertIsNone(node.content_length)
+ self.assertIsNone(node.content_type)
+ self.assertIsNone(node.get_content())
def test_get_file_content(self):
node = self.repos.get_node(u'/tête/README.txt')
@@ -216,6 +234,141 @@ class NormalTests(object):
self.assertEqual('native', props['svn:eol-style'])
self.assertEqual('text/plain', props['svn:mime-type'])
+ def test_get_file_content_without_native_eol_style(self):
+ f = self.repos.get_node(u'/tête/README.txt', 2)
+ props = f.get_properties()
+ self.assertIsNone(props.get('svn:eol-style'))
+ self.assertEqual('A text.\n', f.get_content().read())
+ self.assertEqual('A text.\n', f.get_processed_content().read())
+
+ def test_get_file_content_with_native_eol_style(self):
+ f = self.repos.get_node(u'/tête/README.txt', 3)
+ props = f.get_properties()
+ self.assertEqual('native', props.get('svn:eol-style'))
+
+ self.repos.params['eol_style'] = 'native'
+ self.assertEqual('A test.\n', f.get_content().read())
+ self.assertEqual('A test.' + NATIVE_EOL,
+ f.get_processed_content().read())
+
+ self.repos.params['eol_style'] = 'LF'
+ self.assertEqual('A test.\n', f.get_content().read())
+ self.assertEqual('A test.\n', f.get_processed_content().read())
+
+ self.repos.params['eol_style'] = 'CRLF'
+ self.assertEqual('A test.\n', f.get_content().read())
+ self.assertEqual('A test.\r\n', f.get_processed_content().read())
+
+ self.repos.params['eol_style'] = 'CR'
+ self.assertEqual('A test.\n', f.get_content().read())
+ self.assertEqual('A test.\r', f.get_processed_content().read())
+ # check that the hint is stronger than the repos default
+ self.assertEqual('A test.\r\n',
+ f.get_processed_content(eol_hint='CRLF').read())
+
+ def test_get_file_content_with_native_eol_style_and_no_keywords_28(self):
+ f = self.repos.get_node(u'/branches/v4/README.txt', 28)
+ props = f.get_properties()
+ self.assertEqual('native', props.get('svn:eol-style'))
+ self.assertIsNone(props.get('svn:keywords'))
+
+ self.assertEqual(
+ 'A test.\n' +
+ '# $Rev$ is not substituted with no svn:keywords.\n',
+ f.get_content().read())
+ self.assertEqual(
+ 'A test.\r\n' +
+ '# $Rev$ is not substituted with no svn:keywords.\r\n',
+ f.get_processed_content(eol_hint='CRLF').read())
+
+ def test_get_file_content_with_keyword_substitution_23(self):
+ f = self.repos.get_node(u'/tête/Résumé.txt', 23)
+ props = f.get_properties()
+ self.assertEqual('Revision Author URL', props['svn:keywords'])
+ self.assertEqual('''\
+# Simple test for svn:keywords property substitution (#717)
+# $Rev: 23 $: Revision of last commit
+# $Author: cboos $: Author of last commit
+# $Date$: Date of last commit (not substituted)
+
+Now with fixed width fields:
+# $URL:: svn://test/tête/Résumé.txt $ the configured URL
+# $HeadURL:: svn://test/tête/Résumé.txt $ same
+# $URL:: svn://test/tê#$ same, but truncated
+
+En r\xe9sum\xe9 ... \xe7a marche.
+''', f.get_processed_content().read())
+ # Note: "En résumé ... ça marche." in the content is really encoded in
+ # latin1 in the file, and our substitutions are UTF-8 encoded...
+ # This is expected.
+
+ def test_get_file_content_with_keyword_substitution_24(self):
+ f = self.repos.get_node(u'/tête/Résumé.txt', 24)
+ props = f.get_properties()
+ self.assertEqual('Revision Author URL Id', props['svn:keywords'])
+ self.assertEqual('''\
+# Simple test for svn:keywords property substitution (#717)
+# $Rev: 24 $: Revision of last commit
+# $Author: cboos $: Author of last commit
+# $Date$: Date of last commit (now substituted)
+# $Id: Résumé.txt 24 2013-04-27 14:38:50Z cboos $: Combination
+
+Now with fixed width fields:
+# $URL:: svn://test/t\xc3\xaate/R\xc3\xa9sum\xc3\xa9.txt $ the configured URL
+# $HeadURL:: svn://test/t\xc3\xaate/R\xc3\xa9sum\xc3\xa9.txt $ same
+# $URL:: svn://test/t\xc3\xaa#$ same, but truncated
+# $Header:: $ combination with URL
+
+En r\xe9sum\xe9 ... \xe7a marche.
+''', f.get_processed_content().read())
+
+ def test_get_file_content_with_keyword_substitution_25(self):
+ f = self.repos.get_node(u'/tête/Résumé.txt', 25)
+ props = f.get_properties()
+ self.assertEqual('Revision Author URL Date Id Header',
+ props['svn:keywords'])
+ self.assertEqual('''\
+# Simple test for svn:keywords property substitution (#717)
+# $Rev: 25 $: Revision of last commit
+# $Author: cboos $: Author of last commit
+# $Date: 2013-04-27 14:43:15 +0000 (Sat, 27 Apr 2013) $: Date of last commit (now really substituted)
+# $Id: Résumé.txt 25 2013-04-27 14:43:15Z cboos $: Combination
+
+Now with fixed width fields:
+# $URL:: svn://test/tête/Résumé.txt $ the configured URL
+# $HeadURL:: svn://test/tête/Résumé.txt $ same
+# $URL:: svn://test/tê#$ same, but truncated
+# $Header:: svn://test/t\xc3\xaate/R\xc3\xa9sum\xc3\xa9.txt 25 2013-04-#$ combination with URL
+
+En r\xe9sum\xe9 ... \xe7a marche.
+''', f.get_processed_content().read())
+
+ def test_get_file_content_with_keyword_substitution_27(self):
+ f = self.repos.get_node(u'/tête/Résumé.txt', 27)
+ props = f.get_properties()
+ self.assertEqual('Revision Author URL Date Id Header',
+ props['svn:keywords'])
+ self.assertEqual('''\
+# Simple test for svn:keywords property substitution (#717)
+# $Rev: 26 $: Revision of last commit
+# $Author: jomae $: Author of last commit
+# $Date: 2013-04-28 05:36:06 +0000 (Sun, 28 Apr 2013) $: Date of last commit (now really substituted)
+# $Id: Résumé.txt 26 2013-04-28 05:36:06Z jomae $: Combination
+
+Now with fixed width fields:
+# $URL:: svn://test/tête/Résumé.txt $ the configured URL
+# $HeadURL:: svn://test/tête/Résumé.txt $ same
+# $URL:: svn://test/tê#$ same, but truncated
+# $Header:: svn://test/t\xc3\xaate/R\xc3\xa9sum\xc3\xa9.txt 26 2013-04-#$ combination with URL
+
+Overlapped keywords:
+# $Xxx$Rev: 26 $Xxx$
+# $Rev: 26 $Xxx$Rev: 26 $
+# $Rev: 26 $Rev$Rev: 26 $
+
+En r\xe9sum\xe9 ... \xe7a marche.
+''', f.get_processed_content().read())
+
def test_created_path_rev(self):
node = self.repos.get_node(u'/tête/README3.txt', 15)
self.assertEqual(15, node.rev)
@@ -230,6 +383,32 @@ class NormalTests(object):
self.assertEqual(3, node.created_rev)
self.assertEqual(u'tête/README.txt', node.created_path)
+ def test_get_annotations(self):
+ # svn_client_blame2() requires a canonical uri since Subversion 1.7.
+ # If the uri is not canonical, assertion raises (#11167).
+ node = self.repos.get_node(u'/tête/R\xe9sum\xe9.txt', 25)
+ self.assertEqual([23, 23, 23, 25, 24, 23, 23, 23, 23, 23, 24, 23, 20],
+ node.get_annotations())
+
+ def test_get_annotations_lower_drive_letter(self):
+ # If the drive letter in the uri is lower case on Windows, a
+ # SubversionException raises (#10514).
+ drive, tail = os.path.splitdrive(REPOS_PATH)
+ repos_path = drive.lower() + tail
+ DbRepositoryProvider(self.env).add_repository('lowercase', repos_path,
+ 'direct-svnfs')
+ repos = self.env.get_repository('lowercase')
+ node = repos.get_node(u'/tête/R\xe9sum\xe9.txt', 25)
+ self.assertEqual([23, 23, 23, 25, 24, 23, 23, 23, 23, 23, 24, 23, 20],
+ node.get_annotations())
+
+ if os.name != 'nt':
+ del test_get_annotations_lower_drive_letter
+
+ def test_get_annotations_with_urlencoded_percent_sign(self):
+ node = self.repos.get_node(u'/branches/t10386/READ%25ME.txt')
+ self.assertEqual([14], node.get_annotations())
+
# Revision Log / node history
def test_get_node_history(self):
@@ -538,6 +717,122 @@ class NormalTests(object):
self.assertEqual(u'Chez moi ça marche\n', chgset.message)
self.assertEqual(u'Jonas Borgström', chgset.author)
+ def test_canonical_repos_path(self):
+ # Assertion `svn_dirent_is_canonical` with leading double slashes
+ # in repository path if os.name == 'posix' (#10390)
+ DbRepositoryProvider(self.env).add_repository(
+ 'canonical-path', '//' + REPOS_PATH.lstrip('/'), 'direct-svnfs')
+ repos = self.env.get_repository('canonical-path')
+ self.assertEqual(REPOS_PATH, repos.path)
+
+ if os.name != 'posix':
+ del test_canonical_repos_path
+
+ def test_merge_prop_renderer_without_deleted_branches(self):
+ context = _create_context()
+ context = context(self.repos.get_node('branches/v1x', HEAD).resource)
+ renderer = svn_prop.SubversionMergePropertyRenderer(self.env)
+ props = {'svn:mergeinfo': u"""\
+/tête:1-20,23-26
+/branches/v3:22
+/branches/v2:16
+"""}
+ result = Stream(renderer.render_property('svn:mergeinfo', 'browser',
+ context, props))
+
+ node = unicode(result.select('//tr[1]//td[1]'))
+ self.assertIn(' href="/browser/repo/branches/v2?rev=%d"' % HEAD, node)
+ self.assertIn('>/branches/v2</a>', node)
+ node = unicode(result.select('//tr[1]//td[2]'))
+ self.assertIn(' title="16"', node)
+ self.assertIn('>merged</a>', node)
+ node = unicode(result.select('//tr[1]//td[3]'))
+ self.assertIn(' title="No revisions"', node)
+ self.assertIn('>eligible</span>', node)
+
+ node = unicode(result.select('//tr[3]//td[1]'))
+ self.assertIn(' href="/browser/repo/%s?rev=%d"' % ('t%C3%AAte', HEAD),
+ node)
+ self.assertIn(u'>/tête</a>', node)
+ node = unicode(result.select('//tr[3]//td[2]'))
+ self.assertIn(' title="1-20, 23-26"', node)
+ self.assertIn(' href="/log/repo/t%C3%AAte?revs=1-20%2C23-26"', node)
+ self.assertIn('>merged</a>', node)
+ node = unicode(result.select('//tr[3]//td[3]'))
+ self.assertIn(' title="21"', node)
+ self.assertIn(' href="/changeset/21/repo/t%C3%AAte"', node)
+ self.assertIn('>eligible</a>', node)
+
+ self.assertNotIn('(toggle deleted branches)', unicode(result))
+
+ def test_merge_prop_renderer_with_deleted_branches(self):
+ context = _create_context()
+ context = context(self.repos.get_node('branches/v1x', HEAD).resource)
+ renderer = svn_prop.SubversionMergePropertyRenderer(self.env)
+ props = {'svn:mergeinfo': u"""\
+/tête:19
+/branches/v3:22
+/branches/deleted:1,3-5,22
+"""}
+ result = Stream(renderer.render_property('svn:mergeinfo', 'browser',
+ context, props))
+
+ node = unicode(result.select('//tr[1]//td[1]'))
+ self.assertIn(' href="/browser/repo/branches/v3?rev=%d"' % HEAD, node)
+ self.assertIn('>/branches/v3</a>', node)
+ node = unicode(result.select('//tr[1]//td[2]'))
+ self.assertIn(' title="22"', node)
+ self.assertIn('>merged</a>', node)
+ node = unicode(result.select('//tr[1]//td[3]'))
+ self.assertIn(' title="No revisions"', node)
+ self.assertIn('>eligible</span>', node)
+
+ node = unicode(result.select('//tr[2]//td[1]'))
+ self.assertIn(' href="/browser/repo/%s?rev=%d"' % ('t%C3%AAte', HEAD),
+ node)
+ self.assertIn(u'>/tête</a>', node)
+ node = unicode(result.select('//tr[2]//td[2]'))
+ self.assertIn(' title="19"', node)
+ self.assertIn(' href="/changeset/19/repo/t%C3%AAte"', node)
+ self.assertIn('>merged</a>', node)
+ node = unicode(result.select('//tr[2]//td[3]'))
+ self.assertIn(' title="13-14, 17-18, 20-21, 23-26"', node)
+ self.assertIn(' href="/log/repo/t%C3%AAte?revs='
+ '13-14%2C17-18%2C20-21%2C23-26"', node)
+ self.assertIn('>eligible</a>', node)
+
+ self.assertIn('(toggle deleted branches)', unicode(result))
+ self.assertIn('<td>/branches/deleted</td>',
+ unicode(result.select('//tr[3]//td[1]')))
+ self.assertIn(u'<td colspan="2">1,\u200b3-5,\u200b22</td>',
+ unicode(result.select('//tr[3]//td[2]')))
+
+ def test_merge_prop_diff_renderer_added(self):
+ context = _create_context()
+ old_context = context(self.repos.get_node(u'tête', 20).resource)
+ old_props = {'svn:mergeinfo': u"""\
+/branches/v2:1,8-9,12-15
+/branches/v1x:12
+/branches/deleted:1,3-5,22
+"""}
+ new_context = context(self.repos.get_node(u'tête', 21).resource)
+ new_props = {'svn:mergeinfo': u"""\
+/branches/v2:1,8-9,12-16
+/branches/v1x:12
+/branches/deleted:1,3-5,22
+"""}
+ options = {}
+ renderer = svn_prop.SubversionMergePropertyDiffRenderer(self.env)
+ result = Stream(renderer.render_property_diff(
+ 'svn:mergeinfo', old_context, old_props, new_context,
+ new_props, options))
+
+ node = unicode(result.select('//tr[1]//td[1]'))
+ self.assertIn(' href="/browser/repo/branches/v2?rev=21"', node)
+ self.assertIn('>/branches/v2</a>', node)
+ node = unicode(result.select('//tr[1]//td[2]'))
+ self.assertIn(' title="16"', node)
+ self.assertIn(' href="/changeset/16/repo/branches/v2"', node)
class ScopedTests(object):
@@ -561,17 +856,17 @@ class ScopedTests(object):
def test_rev_navigation(self):
self.assertEqual(1, self.repos.oldest_rev)
- self.assertEqual(None, self.repos.previous_rev(0))
+ self.assertIsNone(self.repos.previous_rev(0))
self.assertEqual(1, self.repos.previous_rev(2))
self.assertEqual(TETE, self.repos.youngest_rev)
self.assertEqual(2, self.repos.next_rev(1))
self.assertEqual(3, self.repos.next_rev(2))
# ...
- self.assertEqual(None, self.repos.next_rev(TETE))
+ self.assertIsNone(self.repos.next_rev(TETE))
def test_has_node(self):
- self.assertEqual(False, self.repos.has_node('/dir1', 3))
- self.assertEqual(True, self.repos.has_node('/dir1', 4))
+ self.assertFalse(self.repos.has_node('/dir1', 3))
+ self.assertTrue(self.repos.has_node('/dir1', 4))
def test_get_node(self):
node = self.repos.get_node('/dir1')
@@ -625,9 +920,9 @@ class ScopedTests(object):
def test_get_dir_content(self):
node = self.repos.get_node('/dir1')
- self.assertEqual(None, node.content_length)
- self.assertEqual(None, node.content_type)
- self.assertEqual(None, node.get_content())
+ self.assertIsNone(node.content_length)
+ self.assertIsNone(node.content_type)
+ self.assertIsNone(node.get_content())
def test_get_file_content(self):
node = self.repos.get_node('/README.txt')
@@ -808,14 +1103,14 @@ class ScopedTests(object):
class RecentPathScopedTests(object):
def test_rev_navigation(self):
- self.assertEqual(False, self.repos.has_node('/', 1))
- self.assertEqual(False, self.repos.has_node('/', 2))
- self.assertEqual(False, self.repos.has_node('/', 3))
- self.assertEqual(True, self.repos.has_node('/', 4))
+ self.assertFalse(self.repos.has_node('/', 1))
+ self.assertFalse(self.repos.has_node('/', 2))
+ self.assertFalse(self.repos.has_node('/', 3))
+ self.assertTrue(self.repos.has_node('/', 4))
# We can't make this work anymore because of #5213.
# self.assertEqual(4, self.repos.oldest_rev)
self.assertEqual(1, self.repos.oldest_rev) # should really be 4...
- self.assertEqual(None, self.repos.previous_rev(4))
+ self.assertIsNone(self.repos.previous_rev(4))
class NonSelfContainedScopedTests(object):
@@ -847,11 +1142,17 @@ class SubversionRepositoryTestCase(unitt
def setUp(self):
self.env = EnvironmentStub()
repositories = self.env.config['repositories']
- DbRepositoryProvider(self.env).add_repository(REPOS_NAME, self.path,
- 'direct-svnfs')
+ dbprovider = DbRepositoryProvider(self.env)
+ dbprovider.add_repository(REPOS_NAME, self.path, 'direct-svnfs')
+ dbprovider.modify_repository(REPOS_NAME, {'url': URL})
self.repos = self.env.get_repository(REPOS_NAME)
+
def tearDown(self):
+ self.repos.close()
+ self.repos = None
+ # clear cached repositories to avoid TypeError on termination (#11505)
+ RepositoryManager(self.env).reload_repositories()
self.env.reset_db()
# needed to avoid issue with 'WindowsError: The process cannot access
# the file ... being used by another process: ...\rep-cache.db'
@@ -864,8 +1165,9 @@ class SvnCachedRepositoryTestCase(unitte
def setUp(self):
self.env = EnvironmentStub()
- DbRepositoryProvider(self.env).add_repository(REPOS_NAME, self.path,
- 'svn')
+ dbprovider = DbRepositoryProvider(self.env)
+ dbprovider.add_repository(REPOS_NAME, self.path, 'svn')
+ dbprovider.modify_repository(REPOS_NAME, {'url': URL})
self.repos = self.env.get_repository(REPOS_NAME)
self.repos.sync()
@@ -873,11 +1175,16 @@ class SvnCachedRepositoryTestCase(unitte
self.env.reset_db()
self.repos.close()
self.repos = None
+ # clear cached repositories to avoid TypeError on termination (#11505)
+ RepositoryManager(self.env).reload_repositories()
def suite():
+ global REPOS_PATH
suite = unittest.TestSuite()
if has_svn:
+ REPOS_PATH = tempfile.mkdtemp(prefix='trac-svnrepos-')
+ os.rmdir(REPOS_PATH)
tests = [(NormalTests, ''),
(ScopedTests, u'/tête'),
(RecentPathScopedTests, u'/tête/dir1'),
@@ -898,19 +1205,18 @@ def suite():
(SubversionRepositoryTestCase, test),
{'path': REPOS_PATH + scope})
suite.addTest(unittest.makeSuite(
- tc, 'test', suiteClass=SubversionRepositoryTestSetup))
+ tc, suiteClass=SubversionRepositoryTestSetup))
tc = new.classobj('SvnCachedRepository' + test.__name__,
(SvnCachedRepositoryTestCase, test),
{'path': REPOS_PATH + scope})
for skip in skipped.get(tc.__name__, []):
setattr(tc, skip, lambda self: None) # no skip, so we cheat...
suite.addTest(unittest.makeSuite(
- tc, 'test', suiteClass=SubversionRepositoryTestSetup))
+ tc, suiteClass=SubversionRepositoryTestSetup))
else:
print("SKIP: tracopt/versioncontrol/svn/tests/svn_fs.py (no svn "
"bindings)")
return suite
if __name__ == '__main__':
- runner = unittest.TextTestRunner()
- runner.run(suite())
+ unittest.main(defaultTest='suite')