You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by ar...@apache.org on 2012/07/30 08:39:38 UTC
svn commit: r1367002 [19/21] - in /subversion/branches/svn-bisect: ./ build/
build/ac-macros/ build/generator/ build/generator/templates/
contrib/client-side/emacs/ contrib/server-side/mod_dontdothat/ notes/
notes/api-errata/1.7/ notes/http-and-webdav/...
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/merge_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/merge_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/merge_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/merge_tests.py Mon Jul 30 06:39:28 2012
@@ -861,7 +861,8 @@ def merge_similar_unrelated_trees(sbox):
#----------------------------------------------------------------------
def merge_one_file_helper(sbox, arg_flav, record_only = 0):
- "ARG_FLAV is one of 'r' (revision range) or 'c' (single change)."
+ """ARG_FLAV is one of 'r' (revision range) or 'c' (single change) or
+ '*' (no revision specified)."""
if arg_flav not in ('r', 'c', '*'):
raise svntest.Failure("Unrecognized flavor of merge argument")
@@ -998,9 +999,13 @@ def merge_record_only(sbox):
merge_one_file_helper(sbox, 'r', 1)
#----------------------------------------------------------------------
-# This is a regression for the enhancement added in issue #785.
+# This is a regression test for the enhancement added in issue #785 "add
+# friendly enhancement to 'svn merge'", which is about inferring that
+# the default target of "svn merge [-r...] FILE" should not be "." but
+# rather should be "FILE".
def merge_with_implicit_target_helper(sbox, arg_flav):
- "ARG_FLAV is one of 'r' (revision range) or 'c' (single change)."
+ """ARG_FLAV is one of 'r' (revision range) or 'c' (single change) or
+ '*' (no revision specified)."""
if arg_flav not in ('r', 'c', '*'):
raise svntest.Failure("Unrecognized flavor of merge argument")
@@ -5599,7 +5604,7 @@ def merge_to_switched_path(sbox):
# 3188: Mergeinfo on switched targets/subtrees should
# elide to repos
@SkipUnless(server_has_mergeinfo)
-@Issue(2823,2839,3187,3188)
+@Issue(2823,2839,3187,3188,4056)
def merge_to_path_with_switched_children(sbox):
"merge to path with switched children"
@@ -5694,18 +5699,18 @@ def merge_to_path_with_switched_children
'omega' : Item(status=' U')
})
expected_elision_output = wc.State(A_COPY_H_path, {
+ 'omega' : Item(status=' U')
})
expected_status = wc.State(A_COPY_H_path, {
'' : Item(status=' M', wc_rev=8),
'psi' : Item(status=' ', wc_rev=8, switched='S'),
- 'omega' : Item(status='MM', wc_rev=8),
+ 'omega' : Item(status='M ', wc_rev=8),
'chi' : Item(status=' ', wc_rev=8),
})
expected_disk = wc.State('', {
- '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8*'}),
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8'}),
'psi' : Item("This is the file 'psi'.\n"),
- 'omega' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'}),
+ 'omega' : Item("New content"),
'chi' : Item("This is the file 'chi'.\n"),
})
expected_skip = wc.State(A_COPY_H_path, { })
@@ -5739,7 +5744,7 @@ def merge_to_path_with_switched_children
'' : Item(status=' M', wc_rev=8),
'H' : Item(status=' M', wc_rev=8),
'H/chi' : Item(status=' ', wc_rev=8),
- 'H/omega' : Item(status='MM', wc_rev=8),
+ 'H/omega' : Item(status='M ', wc_rev=8),
'H/psi' : Item(status=' ', wc_rev=8, switched='S'),
'G' : Item(status=' M', wc_rev=8, switched='S'),
'G/pi' : Item(status=' ', wc_rev=8),
@@ -5749,10 +5754,9 @@ def merge_to_path_with_switched_children
})
expected_disk_D = wc.State('', {
'' : Item(props={SVN_PROP_MERGEINFO : '/A/D:6*'}),
- 'H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8*'}),
+ 'H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8'}),
'H/chi' : Item("This is the file 'chi'.\n"),
- 'H/omega' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'}),
+ 'H/omega' : Item("New content"),
'H/psi' : Item("This is the file 'psi'.\n",),
'G' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:6*'}),
'G/pi' : Item("This is the file 'pi'.\n"),
@@ -5786,10 +5790,10 @@ def merge_to_path_with_switched_children
})
expected_elision_output = wc.State(A_COPY_D_path, {
})
- expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5-6*'})
- expected_disk_D.tweak('H', props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8*'})
+ expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5,6*'})
+ expected_disk_D.tweak('H', props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'})
expected_disk_D.tweak('H/psi', contents="New content",
- props={SVN_PROP_MERGEINFO :'/A/D/H/psi:5'})
+ props={SVN_PROP_MERGEINFO :'/A/D/H/psi:5,8'})
expected_status_D.tweak('H/psi', status='MM')
svntest.actions.run_and_verify_merge(A_COPY_D_path, '4', '5',
sbox.repo_url + '/A/D', None,
@@ -5830,7 +5834,7 @@ def merge_to_path_with_switched_children
'D/H' : Item(status=' M', wc_rev=8),
'D/H/chi' : Item(status=' ', wc_rev=8),
'D/H/psi' : Item(status='MM', wc_rev=8, switched='S'),
- 'D/H/omega' : Item(status='MM', wc_rev=8),
+ 'D/H/omega' : Item(status='M ', wc_rev=8),
})
expected_disk = wc.State('', {
'' : Item(props={SVN_PROP_MERGEINFO : '/A:5-8'}),
@@ -5842,19 +5846,18 @@ def merge_to_path_with_switched_children
'B/lambda' : Item("This is the file 'lambda'.\n"),
'B/F' : Item(),
'C' : Item(),
- 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5-6*'}),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5,6*'}),
'D/G' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:6*'}),
'D/G/pi' : Item("This is the file 'pi'.\n"),
'D/G/rho' : Item("New content",
props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'}),
'D/G/tau' : Item("This is the file 'tau'.\n"),
'D/gamma' : Item("This is the file 'gamma'.\n"),
- 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8*'}),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'}),
'D/H/chi' : Item("This is the file 'chi'.\n"),
'D/H/psi' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/psi:5'}),
- 'D/H/omega' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'}),
+ props={SVN_PROP_MERGEINFO : '/A/D/H/psi:5,8'}),
+ 'D/H/omega' : Item("New content"),
})
expected_skip = wc.State(A_COPY_path, { })
svntest.actions.run_and_verify_merge(A_COPY_path, '4', '8',
@@ -5865,7 +5868,6 @@ def merge_to_path_with_switched_children
expected_disk,
expected_status, expected_skip,
None, None, None, None, None, 1)
-
# Commit changes thus far.
expected_output = svntest.wc.State(wc_dir, {
'A_COPY' : Item(verb='Sending'),
@@ -5892,17 +5894,16 @@ def merge_to_path_with_switched_children
wc_disk.tweak("A_COPY/B/E/beta",
contents="New content")
wc_disk.tweak("A_COPY/D",
- props={SVN_PROP_MERGEINFO : '/A/D:5-6*'})
+ props={SVN_PROP_MERGEINFO : '/A/D:5,6*'})
wc_disk.tweak("A_COPY/D/G",
props={SVN_PROP_MERGEINFO : '/A/D/G:6*'})
wc_disk.tweak("A_COPY/D/G/rho",
contents="New content",
props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'})
wc_disk.tweak("A_COPY/D/H",
- props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8*'})
+ props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'})
wc_disk.tweak("A_COPY/D/H/omega",
- contents="New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'})
+ contents="New content")
wc_disk.tweak("A_COPY_2", props={})
svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_psi_path,
sbox.repo_url + "/A_COPY/D/H/psi",
@@ -5941,8 +5942,7 @@ def merge_to_path_with_switched_children
expected_disk = wc.State('', {
'' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-8'}),
'psi' : Item("New content"),
- 'omega' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'}),
+ 'omega' : Item("New content"),
'chi' : Item("This is the file 'chi'.\n"),
})
expected_skip = wc.State(A_COPY_H_path, { })
@@ -5992,14 +5992,11 @@ def merge_to_path_with_switched_children
expected_status_D.tweak('H/psi', wc_rev=10, switched=None)
expected_status_D.tweak('H/omega', wc_rev=9)
expected_status_D.tweak('G', 'G/rho', switched='S', wc_rev=9)
- expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5-6*,10*',
+ expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5,6*,10',
"prop:name" : "propval"})
expected_disk_D.tweak('G/rho',
props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'})
expected_disk_D.tweak('H', props={SVN_PROP_MERGEINFO : '/A/D/H:5-8'})
-
- expected_disk_D.tweak('H/omega',
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'})
expected_disk_D.tweak('H/psi', contents="New content", props={})
svntest.actions.run_and_verify_merge(A_COPY_D_path, '9', '10',
sbox.repo_url + '/A/D', None,
@@ -6060,7 +6057,6 @@ def merge_to_path_with_switched_children
'D/G' : Item(status=' U'),
'D/G/rho' : Item(status=' U'),
'D/H' : Item(status=' U'),
- 'D/H/omega' : Item(status=' U'),
})
expected_elision_output = wc.State(A_COPY_path, {
'' : Item(status=' U'),
@@ -6068,7 +6064,6 @@ def merge_to_path_with_switched_children
'D/G' : Item(status=' U'),
'D/G/rho' : Item(status=' U'),
'D/H' : Item(status=' U'),
- 'D/H/omega' : Item(status=' U'),
})
expected_status = wc.State(A_COPY_path, {
'' : Item(status=' M', wc_rev=10),
@@ -6089,7 +6084,7 @@ def merge_to_path_with_switched_children
'D/H' : Item(status=' M', wc_rev=10),
'D/H/chi' : Item(status=' ', wc_rev=10),
'D/H/psi' : Item(status='M ', wc_rev=10),
- 'D/H/omega' : Item(status='MM', wc_rev=10),
+ 'D/H/omega' : Item(status='M ', wc_rev=10),
})
expected_disk = wc.State('', {
'B' : Item(),
@@ -7286,7 +7281,7 @@ def merge_with_depth_files(sbox):
#
# Test issue #3407 'Shallow merges incorrectly set mergeinfo on children'.
@SkipUnless(server_has_mergeinfo)
-@Issues(2976,3392,3407)
+@Issues(2976,3392,3407,4057)
def merge_away_subtrees_noninheritable_ranges(sbox):
"subtrees can lose non-inheritable ranges"
@@ -7605,8 +7600,9 @@ def merge_away_subtrees_noninheritable_r
svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir)
svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir)
- # Merge r8 from A/D/H to A_COPY_D/H at depth empty, creating non-inheritable
- # mergeinfo on the target. Commit this merge as r13.
+ # Merge r8 from A/D/H to A_COPY_D/H at depth empty. Since r8 affects only
+ # A_COPY/D/H itself, the resulting mergeinfo is inheritabled. Commit this
+ # merge as r13.
expected_output = wc.State(H_COPY_2_path, {
'' : Item(status=' U'),
})
@@ -7622,7 +7618,7 @@ def merge_away_subtrees_noninheritable_r
'chi' : Item(status=' ', wc_rev=12),
})
expected_disk = wc.State('', {
- '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8*',
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8',
"prop:name" : "propval"}),
'psi' : Item("This is the file 'psi'.\n"),
'omega' : Item("This is the file 'omega'.\n"),
@@ -13453,6 +13449,12 @@ def natural_history_filtering(sbox):
#
# To set up a situation where this can occur we'll do the following:
#
+ # trunk -1-----3-4-5-6-------8----------- A
+ # \ \ \
+ # branch1 2-----------\-------9-------- A_COPY
+ # \ \
+ # branch2 7--------10---- A_COPY_2
+ #
# 1) Create a 'trunk'.
#
# 2) Copy 'trunk' to 'branch1'.
@@ -16855,7 +16857,7 @@ def merge_adds_subtree_with_mergeinfo(sb
#----------------------------------------------------------------------
# A test for issue #3978 'reverse merge which adds subtree fails'.
-@Issue(3978)
+@Issue(3978,4057)
@SkipUnless(server_has_mergeinfo)
def reverse_merge_adds_subtree(sbox):
"reverse merge adds subtree"
@@ -16882,6 +16884,9 @@ def reverse_merge_adds_subtree(sbox):
'Cherry-pick r7 from A to A_COPY', wc_dir)
# r9 - File depth sync merge from A/D/H to A_COPY/D/H/
+ # This shallow merge does not create non-inheritable mergeinfo because of
+ # the issue #4057 fix; all subtrees affected by the diff are present, so
+ # non-inheritable mergeinfo is not required.
svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir)
svntest.actions.run_and_verify_svn(None, None, [], 'merge',
sbox.repo_url + '/A/D/H',
@@ -16919,7 +16924,6 @@ def reverse_merge_adds_subtree(sbox):
# ..\..\..\subversion\libsvn_subr\kitchensink.c:57: (apr_err=200022)
# svn: E200022: Negative revision number found parsing '-7'
svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir)
- svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir)
expected_output = wc.State(A_COPY_path, {
'D/H/chi' : Item(status='A '),
})
@@ -16968,12 +16972,10 @@ def reverse_merge_adds_subtree(sbox):
'D/G/rho' : Item("This is the file 'rho'.\n"),
'D/G/tau' : Item("This is the file 'tau'.\n"),
'D/gamma' : Item("This is the file 'gamma'.\n"),
- 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2-6*,8*'}),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2-6,8'}),
'D/H/chi' : Item("This is the file 'chi'.\n"),
- 'D/H/psi' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/psi:2-8'}),
- 'D/H/omega' : Item("New content",
- props={SVN_PROP_MERGEINFO : '/A/D/H/omega:2-8'}),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
})
expected_skip = wc.State('.', { })
svntest.actions.run_and_verify_merge(A_COPY_path, 7, 6,
@@ -17173,8 +17175,10 @@ def noninheritable_mergeinfo_test_set_up
})
expected_mergeinfo_output = wc.State(B_branch_path, {
'' : Item(status=' U'),
+ 'lambda' : Item(status=' U'),
})
expected_elision_output = wc.State(B_branch_path, {
+ 'lambda' : Item(status=' U'),
})
expected_status = wc.State(B_branch_path, {
'' : Item(status=' M'),
@@ -17203,7 +17207,6 @@ def noninheritable_mergeinfo_test_set_up
# Test for issue #4056 "don't record non-inheritable mergeinfo if missing
# subtrees are not touched by the full-depth diff".
@Issue(4056)
-@XFail()
@SkipUnless(server_has_mergeinfo)
def unnecessary_noninheritable_mergeinfo_missing_subtrees(sbox):
"missing subtrees untouched by infinite depth merge"
@@ -17222,19 +17225,16 @@ def unnecessary_noninheritable_mergeinfo
# Merge r3 from ^/A/B to branch/B
#
- # Currently this fails because merge isn't smart enough to
- # realize that despite the shallow merge target, the diff can
- # only affect branch/B/lambda, which is still present, so there
+ # Merge is smart enough to realize that despite the shallow merge target,
+ # the diff can only affect branch/B/lambda, which is still present, so there
# is no need to record non-inheritable mergeinfo on the target
# or any subtree mergeinfo whatsoever:
#
# >svn pg svn:mergeinfo -vR
# Properties on 'branch\B':
# svn:mergeinfo
- # /A/B:3* <-- Should be inheritable
- # Properties on 'branch\B\lambda':
- # svn:mergeinfo
- # /A/B/lambda:3 <-- Not neccessary
+ # /A/B:3 <-- Nothing was skipped, so doesn't need
+ # to be non-inheritable.
svntest.actions.run_and_verify_merge(B_branch_path,
'2', '3',
sbox.repo_url + '/A/B', None,
@@ -17251,12 +17251,12 @@ def unnecessary_noninheritable_mergeinfo
# Test for issue #4057 "don't record non-inheritable mergeinfo in shallow
# merge if entire diff is within requested depth".
@Issue(4057)
-@XFail()
@SkipUnless(server_has_mergeinfo)
def unnecessary_noninheritable_mergeinfo_shallow_merge(sbox):
- "shallow merge reaches all neccessary subtrees"
+ "shallow merge reaches all necessary subtrees"
B_branch_path = os.path.join(sbox.wc_dir, 'branch', 'B')
+ E_path = os.path.join(sbox.wc_dir, 'A', 'B', 'E')
# Setup a simple branch to which
expected_output, expected_mergeinfo_output, expected_elision_output, \
@@ -17265,7 +17265,7 @@ def unnecessary_noninheritable_mergeinfo
# Merge r3 from ^/A/B to branch/B at operational depth=files
#
- # Currently this fails because merge isn't smart enough to
+ # Previously this failed because merge wasn't smart enough to
# realize that despite being a shallow merge, the diff can
# only affect branch/B/lambda, which is within the specified
# depth, so there is no need to record non-inheritable mergeinfo
@@ -17277,7 +17277,7 @@ def unnecessary_noninheritable_mergeinfo
# /A/B:3* <-- Should be inheritable
# Properties on 'branch\B\lambda':
# svn:mergeinfo
- # /A/B/lambda:3 <-- Not neccessary
+ # /A/B/lambda:3 <-- Not necessary
expected_skip = wc.State(B_branch_path, {})
svntest.actions.run_and_verify_merge(B_branch_path, '2', '3',
sbox.repo_url + '/A/B', None,
@@ -17290,6 +17290,71 @@ def unnecessary_noninheritable_mergeinfo
None, None, None, None, None, 1, 1,
'--depth', 'files', B_branch_path)
+ # Revert the merge and then make a prop change to A/B/E in r4.
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'revert', '--recursive', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None,
+ ["property 'prop:name' set on '" +
+ E_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', E_path)
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'ci', '-m', 'A new property on a dir',
+ sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'up', sbox.wc_dir)
+
+ # Merge r4 from ^/A/B to branch/B at operational depth=immediates
+ #
+ # Previously this failed because the mergetracking logic didn't realize
+ # that despite being a shallow merge, the diff only affected branch/B/E,
+ # which was within the specified depth, so there was no need to record
+ # non-inheritable mergeinfo or subtree mergeinfo:
+ #
+ # >svn pg svn:mergeinfo -vR
+ # Properties on 'branch\B':
+ # svn:mergeinfo
+ # /A/B:4* <-- Should be inheritable
+ # Properties on 'branch\B\E':
+ # svn:mergeinfo
+ # /A/B/E:4 <-- Not necessary
+ expected_output = wc.State(B_branch_path, {
+ 'E' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(B_branch_path, {
+ '' : Item(status=' U'),
+ 'E' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B_branch_path, {
+ 'E' : Item(status=' U'),
+ })
+ expected_status = wc.State(B_branch_path, {
+ '' : Item(status=' M'),
+ 'lambda' : Item(status=' '),
+ 'E' : Item(status=' M'),
+ 'E/alpha' : Item(status=' '),
+ 'E/beta' : Item(status=' '),
+ 'F' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev='4')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ 'E' : Item(props={'prop:name' : 'propval'}),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ })
+ svntest.actions.run_and_verify_merge(B_branch_path, '3', '4',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, None, None, None, None, 1, 1,
+ '--depth', 'immediates', B_branch_path)
+
########################################################################
# Run the tests
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/mergeinfo_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/mergeinfo_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/mergeinfo_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/mergeinfo_tests.py Mon Jul 30 06:39:28 2012
@@ -68,8 +68,11 @@ def no_mergeinfo(sbox):
"'mergeinfo' on a URL that lacks mergeinfo"
sbox.build(create_wc=False)
+ sbox.simple_repo_copy('A', 'A2')
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- [], sbox.repo_url, sbox.repo_url)
+ [],
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A2')
def mergeinfo(sbox):
"'mergeinfo' on a path with mergeinfo"
@@ -77,41 +80,65 @@ def mergeinfo(sbox):
sbox.build()
wc_dir = sbox.wc_dir
+ # make a branch 'A2'
+ sbox.simple_repo_copy('A', 'A2') # r2
+ # make a change in branch 'A'
+ sbox.simple_mkdir('A/newdir')
+ sbox.simple_commit() # r3
+ sbox.simple_update()
+
# Dummy up some mergeinfo.
- svntest.actions.run_and_verify_svn(None, None, [], 'ps', SVN_PROP_MERGEINFO,
- '/:1', wc_dir)
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'ps', SVN_PROP_MERGEINFO, '/A:3',
+ sbox.ospath('A2'))
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- ['1'], sbox.repo_url, wc_dir)
+ ['3'],
+ sbox.repo_url + '/A',
+ sbox.ospath('A2'))
@SkipUnless(server_has_mergeinfo)
def explicit_mergeinfo_source(sbox):
"'mergeinfo' with source selection"
+ # The idea is the target has mergeinfo pertaining to two or more different
+ # source branches and we're asking about just one of them.
+
sbox.build()
- wc_dir = sbox.wc_dir
- H_path = os.path.join(wc_dir, 'A', 'D', 'H')
- H2_path = os.path.join(wc_dir, 'A', 'D', 'H2')
- B_url = sbox.repo_url + '/A/B'
- B_path = os.path.join(wc_dir, 'A', 'B')
- G_url = sbox.repo_url + '/A/D/G'
- G_path = os.path.join(wc_dir, 'A', 'D', 'G')
- H2_url = sbox.repo_url + '/A/D/H2'
- # Make a copy, and dummy up some mergeinfo.
- mergeinfo = '/A/B:1\n/A/D/G:1\n'
- svntest.actions.set_prop(SVN_PROP_MERGEINFO, mergeinfo, H_path)
- svntest.main.run_svn(None, "cp", H_path, H2_path)
- svntest.main.run_svn(None, "ci", "-m", "r2", wc_dir)
+ def url(relpath):
+ return sbox.repo_url + '/' + relpath
+ def path(relpath):
+ return sbox.ospath(relpath)
+
+ B = 'A/B'
+
+ # make some branches
+ B2 = 'A/B2'
+ B3 = 'A/B3'
+ sbox.simple_repo_copy(B, B2) # r2
+ sbox.simple_repo_copy(B, B3) # r3
+ sbox.simple_update()
+
+ # make changes in the branches
+ sbox.simple_mkdir('A/B2/newdir')
+ sbox.simple_commit() # r4
+ sbox.simple_mkdir('A/B3/newdir')
+ sbox.simple_commit() # r5
+
+ # Put dummy mergeinfo on branch root
+ mergeinfo = '/A/B2:2-5\n/A/B3:2-5\n'
+ sbox.simple_propset(SVN_PROP_MERGEINFO, mergeinfo, B)
+ sbox.simple_commit()
# Check using each of our recorded merge sources (as paths and URLs).
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- ['1'], B_url, H_path)
+ ['2', '4'], url(B2), path(B))
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- ['1'], B_path, H_path)
+ ['2', '4'], path(B2), path(B))
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- ['1'], G_url, H_path)
+ ['3', '5'], url(B3), path(B))
svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
- ['1'], G_path, H_path)
+ ['3', '5'], path(B3), path(B))
@SkipUnless(server_has_mergeinfo)
def mergeinfo_non_source(sbox):
@@ -683,7 +710,6 @@ def natural_history_is_not_eligible_nor_
# A test for issue 4050 "'svn mergeinfo' always considers non-inheritable
# ranges as partially merged".
@Issue(4050)
-@XFail()
@SkipUnless(server_has_mergeinfo)
def noninheritabled_mergeinfo_not_always_eligible(sbox):
"noninheritabled mergeinfo not always eligible"
@@ -693,11 +719,11 @@ def noninheritabled_mergeinfo_not_always
A_path = os.path.join(wc_dir, 'A')
branch_path = os.path.join(wc_dir, 'branch')
-
+
# r2 - Branch ^/A to ^/branch.
svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
sbox.repo_url + '/branch', '-m', 'make a branch')
-
+
# r3 - Make prop edit to A.
svntest.main.run_svn(None, 'ps', 'prop', 'val', A_path)
svntest.main.run_svn(None, 'commit', '-m', 'file edit', wc_dir)
@@ -707,13 +733,16 @@ def noninheritabled_mergeinfo_not_always
svntest.actions.run_and_verify_svn(None, None, [], 'merge',
sbox.repo_url + '/A', branch_path,
'-c3', '--depth=empty')
+ # Forcibly set non-inheritable mergeinfo to replicate the pre-1.8 behavior,
+ # where prior to the fix for issue #4057, non-inheritable mergeinfo was
+ # unconditionally set for merges with shallow operational depths.
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'propset', SVN_PROP_MERGEINFO,
+ '/A:3*\n', branch_path)
svntest.main.run_svn(None, 'commit', '-m', 'shallow merge', wc_dir)
# Now check that r3 is reported as fully merged from ^/A to ^/branch
# and does not show up all when asking for eligible revs.
- #
- # Currently this fails because r3 shows up as partially merged, even
- # though it is fully merged to ^/branch.
svntest.actions.run_and_verify_mergeinfo(
adjust_error_for_server_version(''),
['3'], sbox.repo_url + '/A', sbox.repo_url + '/branch',
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/patch_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/patch_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/patch_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/patch_tests.py Mon Jul 30 06:39:28 2012
@@ -230,7 +230,7 @@ def patch(sbox):
def patch_absolute_paths(sbox):
"patch containing absolute paths"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -932,7 +932,7 @@ def patch_no_index_line(sbox):
def patch_add_new_dir(sbox):
"patch with missing dirs"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -1032,7 +1032,7 @@ def patch_add_new_dir(sbox):
def patch_remove_empty_dirs(sbox):
"patch deleting all children of a directory"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -1581,7 +1581,7 @@ def patch_reverse(sbox):
def patch_no_svn_eol_style(sbox):
"patch target with no svn:eol-style"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -1798,7 +1798,7 @@ def patch_with_svn_eol_style(sbox):
def patch_with_svn_eol_style_uncommitted(sbox):
"patch target with uncommitted svn:eol-style"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -2571,7 +2571,7 @@ def patch_dir_properties(sbox):
def patch_add_path_with_props(sbox):
"patch that adds paths with props"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -2822,12 +2822,13 @@ def patch_prop_offset(sbox):
os.chdir(wc_dir)
- expected_output = [
+ # Changing two properties so output order not well defined.
+ expected_output = svntest.verify.UnorderedOutput([
' U iota\n',
'> applied hunk ## -6,6 +6,9 ## with offset -1 (prop1)\n',
'> applied hunk ## -14,11 +17,8 ## with offset 4 (prop1)\n',
'> applied hunk ## -5,6 +5,7 ## with offset -3 (prop2)\n',
- ]
+ ])
expected_disk = svntest.main.greek_state.copy()
expected_disk.tweak('iota', props = {'prop1' : prop1_content,
@@ -2993,7 +2994,7 @@ def patch_prop_with_fuzz(sbox):
def patch_git_empty_files(sbox):
"patch that contains empty files"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3363,7 +3364,7 @@ def patch_one_property(sbox, trailing_eo
"""Helper. Apply a patch that sets the property 'k' to 'v\n' or to 'v',
and check the results."""
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3427,7 +3428,6 @@ def patch_strip_cwd(sbox):
"patch --strip propchanges cwd"
return patch_one_property(sbox, True)
-@XFail()
@Issue(3814)
def patch_set_prop_no_eol(sbox):
"patch doesn't append newline to properties"
@@ -3439,7 +3439,7 @@ def patch_set_prop_no_eol(sbox):
def patch_add_symlink(sbox):
"patch that adds a symlink"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3615,7 +3615,6 @@ def patch_moved_away(sbox):
1, # check-props
1) # dry-run
-@XFail()
@Issue(3991)
def patch_lacking_trailing_eol(sbox):
"patch file lacking trailing eol"
@@ -3650,12 +3649,11 @@ def patch_lacking_trailing_eol(sbox):
expected_output = [
'U %s\n' % os.path.join(wc_dir, 'iota'),
- 'svn: W[0-9]+: .*', # warning about appending a newline to iota's last line
]
# Expect a newline to be appended
expected_disk = svntest.main.greek_state.copy()
- expected_disk.tweak('iota', contents=iota_contents+"Some more bytes\n")
+ expected_disk.tweak('iota', contents=iota_contents + "Some more bytes")
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.tweak('iota', status='M ')
@@ -3758,7 +3756,7 @@ def patch_deletes_prop(sbox):
def patch_reversed_add_with_props(sbox):
"reverse patch new file+props atop uncommitted"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3861,7 +3859,7 @@ def patch_reversed_add_with_props2(sbox)
def patch_dev_null(sbox):
"patch with /dev/null filenames"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3917,7 +3915,7 @@ def patch_dev_null(sbox):
def patch_delete_and_skip(sbox):
"patch that deletes and skips"
- sbox.build()
+ sbox.build(read_only = True)
wc_dir = sbox.wc_dir
patch_file_path = make_patch_path(sbox)
@@ -3991,6 +3989,116 @@ def patch_delete_and_skip(sbox):
1, # check-props
1) # dry-run
+def patch_target_no_eol_at_eof(sbox):
+ "patch target with no eol at eof"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = make_patch_path(sbox)
+ iota_path = os.path.join(wc_dir, 'iota')
+
+ iota_contents = [
+ "This is the file iota."
+ ]
+
+ svntest.main.file_write(iota_path, ''.join(iota_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status, None, wc_dir)
+ unidiff_patch = [
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1,7 +1,7 @@\n",
+ "-This is the file iota.\n"
+ "\\ No newline at end of file\n",
+ "+It is really the file 'iota'.\n",
+ "\\ No newline at end of file\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ iota_contents = [
+ "It is really the file 'iota'."
+ ]
+ expected_output = [
+ 'U %s\n' % os.path.join(wc_dir, 'iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=''.join(iota_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_add_and_delete(sbox):
+ "patch add multiple levels and delete"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ patch_file_path = make_patch_path(sbox)
+
+ unidiff_patch = [
+ "Index: foo\n",
+ "===================================================================\n",
+ "--- P/Q/foo\t(revision 0)\n"
+ "+++ P/Q/foo\t(working copy)\n"
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'foo'.\n",
+ "Index: iota\n"
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n"
+ "+++ iota\t(working copy)\n"
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'iota'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'A %s\n' % os.path.join(wc_dir, 'P'),
+ 'A %s\n' % os.path.join(wc_dir, 'P', 'Q'),
+ 'A %s\n' % os.path.join(wc_dir, 'P', 'Q', 'foo'),
+ 'D %s\n' % os.path.join(wc_dir, 'iota'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk.add({'P/Q/foo' : Item(contents="This is the file 'foo'.\n")})
+ expected_status.tweak('iota', status='D ')
+ expected_status.add({
+ 'P' : Item(status='A ', wc_rev=0),
+ 'P/Q' : Item(status='A ', wc_rev=0),
+ 'P/Q/foo' : Item(status='A ', wc_rev=0),
+ })
+ expected_skip = wc.State('', { })
+
+ # Failed with "The node 'P' was not found" when erroneously checking
+ # whether 'P/Q' should be deleted.
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
########################################################################
#Run the tests
@@ -4033,6 +4141,8 @@ test_list = [ None,
patch_reversed_add_with_props2,
patch_dev_null,
patch_delete_and_skip,
+ patch_target_no_eol_at_eof,
+ patch_add_and_delete,
]
if __name__ == '__main__':
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/prop_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/prop_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/prop_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/prop_tests.py Mon Jul 30 06:39:28 2012
@@ -893,8 +893,7 @@ def prop_value_conversions(sbox):
svntest.actions.set_prop('svn:executable', '*', lambda_path)
for pval in (' ', '', 'no', 'off', 'false'):
svntest.actions.set_prop('svn:executable', pval, mu_path,
- ["svn: warning: To turn off the svn:executable property, use 'svn propdel';\n",
- "setting the property to '" + pval + "' will not turn it off.\n"])
+ "svn: warning: W125005.*use 'svn propdel'")
# Anything else should be untouched
svntest.actions.set_prop('svn:some-prop', 'bar', lambda_path)
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/special_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/special_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/special_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/special_tests.py Mon Jul 30 06:39:28 2012
@@ -30,7 +30,7 @@ import sys, os, re
# Our testing module
import svntest
-from svntest.main import server_has_mergeinfo
+from svntest.main import server_has_mergeinfo, run_svn, file_write
# (abbreviation)
Skip = svntest.testcase.Skip_deco
@@ -551,7 +551,9 @@ def diff_symlink_to_dir(sbox):
"___________________________________________________________________\n",
"Added: svn:special\n",
"## -0,0 +1 ##\n",
- "+*\n" ]
+ "+*\n",
+ "\\ No newline at end of property\n"
+ ]
svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff',
'.')
# We should get the same output if we the diff the symlink itself.
@@ -904,6 +906,167 @@ def update_symlink(sbox):
None, None, None,
None, None, 1)
+@XFail()
+@Issue(4091)
+@SkipUnless(svntest.main.is_posix_os)
+def replace_symlinks(sbox):
+ "replace symlinks"
+ sbox.build()
+ wc = sbox.ospath
+
+ # Some of these tests are implemented for git (in test script
+ # t/t9100-git-svn-basic.sh) using the Perl bindings for Subversion.
+ # Our issue #4091 is about 'svn update' failures in the git tests.
+
+ sbox.simple_mkdir('A/D/G/Z')
+ sbox.simple_mkdir('A/D/Gx')
+ sbox.simple_mkdir('A/D/Gx/Z')
+ sbox.simple_mkdir('A/D/Hx')
+ sbox.simple_mkdir('A/D/Y')
+ sbox.simple_mkdir('Ax')
+
+ os.symlink('../Y', wc('A/D/H/Z'))
+ os.symlink('../Y', wc('A/D/Hx/Z'))
+ sbox.simple_add('A/D/H/Z',
+ 'A/D/Hx/Z')
+
+ for p in ['Ax/mu',
+ 'A/D/Gx/pi',
+ 'A/D/Hx/chi',
+ ]:
+ file_write(wc(p), 'This starts as a normal file.\n')
+ sbox.simple_add(p)
+ for p in ['iota.sh',
+ 'A/mu.sh',
+ 'Ax/mu.sh',
+ 'A/D/gamma.sh',
+ 'A/B/E/beta.sh',
+ 'A/D/G/rho.sh',
+ 'A/D/Gx/rho.sh',
+ 'A/D/H/psi.sh',
+ 'A/D/Hx/psi.sh',
+ ]:
+ file_write(wc(p), '#!/bin/sh\necho "hello, svn!"\n')
+ os.chmod(wc(p), 0775)
+ sbox.simple_add(p)
+ sbox.simple_commit() # r2
+
+ # Failing git-svn test: 'new symlink is added to a file that was
+ # also just made executable', i.e., in the same revision.
+ sbox.simple_propset("svn:executable", "*", 'A/B/E/alpha')
+ os.symlink('alpha', wc('A/B/E/sym-alpha'))
+ sbox.simple_add('A/B/E/sym-alpha')
+
+ # Add a symlink to a file made non-executable in the same revision.
+ sbox.simple_propdel("svn:executable", 'A/B/E/beta.sh')
+ os.symlink('beta.sh', wc('A/B/E/sym-beta.sh'))
+ sbox.simple_add('A/B/E/sym-beta.sh')
+
+ # Replace a normal {file, exec, dir} with a symlink to the same kind
+ # via Subversion replacement.
+ sbox.simple_rm('A/D/G/pi',
+ 'A/D/G/rho.sh',
+ #'A/D/G/Z', # Ooops, not compatible with --bin=svn1.6.
+ )
+ os.symlink(wc('../gamma'), wc('A/D/G/pi'))
+ os.symlink(wc('../gamma.sh'), wc('A/D/G/rho.sh'))
+ #os.symlink(wc('../Y'), wc('A/D/G/Z'))
+ sbox.simple_add('A/D/G/pi',
+ 'A/D/G/rho.sh',
+ #'A/D/G/Z',
+ )
+
+ # Replace a symlink to {file, exec, dir} with a normal item of the
+ # same kind via Subversion replacement.
+ sbox.simple_rm('A/D/H/chi',
+ 'A/D/H/psi.sh',
+ #'A/D/H/Z',
+ )
+ os.symlink(wc('../gamma'), wc('A/D/H/chi'))
+ os.symlink(wc('../gamma.sh'), wc('A/D/H/psi.sh'))
+ #os.symlink(wc('../Y'), wc('A/D/H/Z'))
+ sbox.simple_add('A/D/H/chi',
+ 'A/D/H/psi.sh',
+ #'A/D/H/Z',
+ )
+
+ # Replace a normal {file, exec} with a symlink to {exec, file} via
+ # Subversion replacement.
+ sbox.simple_rm('A/mu',
+ 'A/mu.sh')
+ os.symlink('../iota2', wc('A/mu'))
+ os.symlink('../iota', wc('A/mu.sh'))
+ sbox.simple_add('A/mu',
+ 'A/mu.sh')
+
+ # Ditto, without the Subversion replacement. Failing git-svn test
+ # 'executable file becomes a symlink to bar/zzz (file)'.
+ os.remove(wc('Ax/mu'))
+ os.remove(wc('Ax/mu.sh'))
+ os.symlink('../iota2', wc('Ax/mu'))
+ os.symlink('../iota', wc('Ax/mu.sh'))
+ sbox.simple_propset('svn:special', '*',
+ 'Ax/mu',
+ 'Ax/mu.sh')
+ sbox.simple_propdel('svn:executable', 'Ax/mu.sh')
+
+ ### TODO Replace a normal {file, exec, dir, dir} with a symlink to
+ ### {dir, dir, file, exec}. And the same symlink-to-normal.
+
+ ### Commit fails as of r1226697 with either "svn: E145001: Entry
+ ### '.../A/D/Gx/Z' has unexpectedly changed special status" or "svn:
+ ### E155010: The node '.../Ax/mu' was not found".
+ sbox.simple_commit() # r3
+
+ # Try updating from HEAD-1 to HEAD.
+ run_svn(None, 'up', '-r2', sbox.wc_dir)
+ sbox.simple_update()
+
+
+@Issue(4102)
+@SkipUnless(svntest.main.is_posix_os)
+def externals_as_symlink_targets(sbox):
+ "externals as symlink targets"
+ sbox.build()
+ wc = sbox.ospath
+
+ # Control: symlink to normal dir and file.
+ os.symlink('E', wc('sym_E'))
+ os.symlink('mu', wc('sym_mu'))
+
+ # Test case: symlink to external dir and file.
+ sbox.simple_propset("svn:externals",
+ '^/A/B/E ext_E\n'
+ '^/A/mu ext_mu',
+ '')
+ sbox.simple_update()
+ os.symlink('ext_E', wc('sym_ext_E'))
+ os.symlink('ext_mu', wc('sym_ext_mu'))
+
+ # Adding symlinks to normal items and to a file external is OK.
+ sbox.simple_add('sym_E', 'sym_mu', 'sym_ext_mu')
+
+ ### Adding a symlink to an external dir failed with
+ ### svn: E200009: Could not add all targets because some targets are
+ ### already versioned
+ sbox.simple_add('sym_ext_E')
+
+ sbox.simple_commit()
+
+@XFail()
+@Issue(4119)
+@SkipUnless(svntest.main.is_posix_os)
+def cat_added_symlink(sbox):
+ "cat added symlink"
+
+ sbox.build(read_only = True)
+
+ kappa_path = sbox.ospath('kappa')
+ os.symlink('iota', kappa_path)
+ sbox.simple_add('kappa')
+ svntest.actions.run_and_verify_svn(None, "link iota", [],
+ "cat", kappa_path)
+
########################################################################
# Run the tests
@@ -931,6 +1094,9 @@ test_list = [ None,
symlink_to_wc_basic,
symlink_to_wc_svnversion,
update_symlink,
+ replace_symlinks,
+ externals_as_symlink_targets,
+ cat_added_symlink,
]
if __name__ == '__main__':
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/stat_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/stat_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/stat_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/stat_tests.py Mon Jul 30 06:39:28 2012
@@ -117,6 +117,12 @@ def status_update_with_nested_adds(sbox)
svntest.actions.run_and_verify_unquiet_status(wc_backup,
expected_status)
+ # At one time an obstructing 'newdir' caused a SEGV on 'newdir/newfile'
+ os.makedirs(os.path.join(wc_backup, 'newdir'))
+ expected_status.tweak('newdir', status='? ')
+ svntest.actions.run_and_verify_unquiet_status(wc_backup,
+ expected_status)
+
#----------------------------------------------------------------------
# svn status -vN should include all entries in a directory
@@ -899,7 +905,7 @@ def missing_dir_in_anchor(sbox):
def status_in_xml(sbox):
"status output in XML format"
- sbox.build(read_only = True)
+ sbox.build()
wc_dir = sbox.wc_dir
file_name = "iota"
@@ -918,38 +924,27 @@ def status_in_xml(sbox):
else:
raise svntest.Failure
- template = ['<?xml version="1.0" encoding="UTF-8"?>\n',
- "<status>\n",
- "<target\n",
- " path=\"%s\">\n" % (file_path),
- "<entry\n",
- " path=\"%s\">\n" % (file_path),
- "<wc-status\n",
- " props=\"none\"\n",
- " item=\"modified\"\n",
- " revision=\"1\">\n",
- "<commit\n",
- " revision=\"1\">\n",
- "<author>%s</author>\n" % svntest.main.wc_author,
- time_str,
- "</commit>\n",
- "</wc-status>\n",
- "</entry>\n",
- "<against\n",
- " revision=\"1\"/>\n",
- "</target>\n",
- "</status>\n",
- ]
+ expected_entries = {file_path : {'wcprops' : 'none',
+ 'wcitem' : 'modified',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author}}
- exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, [],
- 'status',
- file_path,
- '--xml', '-u')
-
- for i in range(0, len(output)):
- if output[i] != template[i]:
- print("ERROR: expected: %s actual: %s" % (template[i], output[i]))
- raise svntest.Failure
+ svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
+
+ svntest.actions.run_and_verify_svn(None, None, [],
+ 'cp', '-m', 'repo-to-repo copy',
+ sbox.repo_url + '/iota',
+ sbox.repo_url + '/iota2')
+
+ file_path = sbox.ospath('iota2')
+
+ expected_entries = {file_path : {'wcprops' : 'none',
+ 'wcitem' : 'none',
+ 'rprops' : 'none',
+ 'ritem' : 'added'}}
+
+ svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
#----------------------------------------------------------------------
@@ -1225,53 +1220,23 @@ def status_update_with_incoming_props(sb
else:
raise svntest.Failure
- xout = ['<?xml version="1.0" encoding="UTF-8"?>\n',
- "<status>\n",
- "<target\n",
- " path=\"%s\">\n" % (wc_dir),
- "<entry\n",
- " path=\"%s\">\n" % (A_path),
- "<wc-status\n",
- " props=\"none\"\n",
- " item=\"normal\"\n",
- " revision=\"1\">\n",
- "<commit\n",
- " revision=\"1\">\n",
- "<author>%s</author>\n" % svntest.main.wc_author,
- time_str,
- "</commit>\n",
- "</wc-status>\n",
- "<repos-status\n",
- " props=\"modified\"\n",
- " item=\"none\">\n",
- "</repos-status>\n",
- "</entry>\n",
- "<entry\n",
- " path=\"%s\">\n" % (wc_dir),
- "<wc-status\n",
- " props=\"none\"\n",
- " item=\"normal\"\n",
- " revision=\"1\">\n",
- "<commit\n",
- " revision=\"1\">\n",
- "<author>%s</author>\n" % svntest.main.wc_author,
- time_str,
- "</commit>\n",
- "</wc-status>\n",
- "<repos-status\n",
- " props=\"modified\"\n",
- " item=\"none\">\n",
- "</repos-status>\n",
- "</entry>\n",
- "<against\n",
- " revision=\"2\"/>\n",
- "</target>\n",
- "</status>\n",]
-
- exit_code, output, error = svntest.actions.run_and_verify_svn(None, xout, [],
- 'status',
- wc_dir,
- '--xml', '-uN')
+ expected_entries ={wc_dir : {'wcprops' : 'none',
+ 'wcitem' : 'normal',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author,
+ 'rprops' : 'modified',
+ 'ritem' : 'none'},
+ A_path : {'wcprops' : 'none',
+ 'wcitem' : 'normal',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author,
+ 'rprops' : 'modified',
+ 'ritem' : 'none'},
+ }
+
+ svntest.actions.run_and_verify_status_xml(expected_entries, wc_dir, '-uN')
# more incoming prop updates.
def status_update_verbose_with_incoming_props(sbox):
@@ -1924,6 +1889,50 @@ def wclock_status(sbox):
'status', wc_dir)
+@Issue(4072)
+@XFail()
+def modified_modulo_translation(sbox):
+ "modified before translation, unmodified after"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # iota is a shell script.
+ sbox.simple_propset('svn:eol-style', 'LF', 'iota')
+ sbox.simple_commit()
+
+ # CRLF it.
+ open(sbox.ospath('iota'), 'wb').write("This is the file 'iota'.\r\n")
+
+ # Run status. Expect some output.
+ # TODO: decide how such files should show in the output; whether they
+ # always show, or only with some --flag; and adjust this accordingly.
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [],
+ 'status', wc_dir)
+
+ # Expect the file to be renormalized (to LF) after a revert.
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_svn(None, [], [], 'status', wc_dir)
+
+def status_not_present(sbox):
+ "no status on not-present and excluded nodes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # iota is a shell script.
+ sbox.simple_rm('iota', 'A/C')
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude',
+ sbox.ospath('A/mu'), sbox.ospath('A/B'))
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [], [],'status',
+ sbox.ospath('iota'),
+ sbox.ospath('A/B'),
+ sbox.ospath('A/C'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('no-file'))
+
########################################################################
# Run the tests
@@ -1965,6 +1974,8 @@ test_list = [ None,
status_locked_deleted,
wc_wc_copy_timestamp,
wclock_status,
+ modified_modulo_translation,
+ status_not_present,
]
if __name__ == '__main__':
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/svnadmin_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/svnadmin_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/svnadmin_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/svnadmin_tests.py Mon Jul 30 06:39:28 2012
@@ -46,6 +46,85 @@ Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
Item = svntest.wc.StateItem
+def check_hotcopy_bdb(src, dst):
+ "Verify that the SRC BDB repository has been correctly copied to DST."
+ ### TODO: This function should be extended to verify all hotcopied files,
+ ### not just compare the output of 'svnadmin dump'. See check_hotcopy_fsfs().
+ exit_code, origout, origerr = svntest.main.run_svnadmin("dump", src,
+ '--quiet')
+ exit_code, backout, backerr = svntest.main.run_svnadmin("dump", dst,
+ '--quiet')
+ if origerr or backerr or origout != backout:
+ raise svntest.Failure
+
+def check_hotcopy_fsfs(src, dst):
+ "Verify that the SRC FSFS repository has been correctly copied to DST."
+ # Walk the source and compare all files to the destination
+ for src_dirpath, src_dirs, src_files in os.walk(src):
+ # Verify that the current directory exists in the destination
+ dst_dirpath = src_dirpath.replace(src, dst)
+ if not os.path.isdir(dst_dirpath):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "destination" % dst_dirpath)
+ # Verify that all dirents in the current directory also exist in source
+ for dst_dirent in os.listdir(dst_dirpath):
+ src_dirent = os.path.join(src_dirpath, dst_dirent)
+ if not os.path.exists(src_dirent):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "source" % src_dirent)
+ # Compare all files in this directory
+ for src_file in src_files:
+ src_path = os.path.join(src_dirpath, src_file)
+ dst_path = os.path.join(dst_dirpath, src_file)
+ if not os.path.isfile(dst_path):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "destination" % dst_path)
+
+ # Special case for rep-cache: It will always differ in a byte-by-byte
+ # comparison, so compare db tables instead.
+ if src_file == 'rep-cache.db':
+ db1 = svntest.sqlite3.connect(src_path)
+ db2 = svntest.sqlite3.connect(dst_path)
+ rows1 = []
+ rows2 = []
+ for row in db1.execute("select * from rep_cache order by hash"):
+ rows1.append(row)
+ for row in db2.execute("select * from rep_cache order by hash"):
+ rows2.append(row)
+ if len(rows1) != len(rows2):
+ raise svntest.Failure("number of rows in rep-cache differs")
+ for i in range(len(rows1)):
+ if rows1[i] != rows2[i]:
+ raise svntest.Failure("rep-cache row %i differs: '%s' vs. '%s'"
+ % (row, rows1[i]))
+ continue
+
+ f1 = open(src_path, 'r')
+ f2 = open(dst_path, 'r')
+ while True:
+ offset = 0
+ BUFSIZE = 1024
+ buf1 = f1.read(BUFSIZE)
+ buf2 = f2.read(BUFSIZE)
+ if not buf1 or not buf2:
+ if not buf1 and not buf2:
+ # both at EOF
+ break
+ elif buf1:
+ raise svntest.Failure("%s differs at offset %i" %
+ (dst_path, offset))
+ elif buf2:
+ raise svntest.Failure("%s differs at offset %i" %
+ (dst_path, offset))
+ if len(buf1) != len(buf2):
+ raise svntest.Failure("%s differs in length" % dst_path)
+ for i in range(len(buf1)):
+ if buf1[i] != buf2[i]:
+ raise svntest.Failure("%s differs at offset %i"
+ % (dst_path, offset))
+ offset += 1
+ f1.close()
+ f2.close()
#----------------------------------------------------------------------
@@ -359,17 +438,16 @@ def hotcopy_dot(sbox):
os.chdir(cwd)
- exit_code, origout, origerr = svntest.main.run_svnadmin("dump",
- sbox.repo_dir,
- '--quiet')
- exit_code, backout, backerr = svntest.main.run_svnadmin("dump",
- backup_dir,
- '--quiet')
- if origerr or backerr or origout != backout:
- raise svntest.Failure
+ if svntest.main.is_fs_type_fsfs():
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ else:
+ check_hotcopy_bdb(sbox.repo_dir, backup_dir)
#----------------------------------------------------------------------
+# This test is redundant for FSFS. The hotcopy_dot and hotcopy_incremental
+# tests cover this check for FSFS already.
+@SkipUnless(svntest.main.is_fs_type_bdb)
def hotcopy_format(sbox):
"'svnadmin hotcopy' checking db/format file"
sbox.build()
@@ -1344,9 +1422,12 @@ def verify_non_utf8_paths(sbox):
if line == "A\n":
# replace 'A' with a latin1 character -- the new path is not valid UTF-8
fp_new.write("\xE6\n")
- elif line == "text: 1 279 32 32 d63ecce65d8c428b86f4f8b0920921fe\n":
+ elif line == "text: 1 279 32 0 d63ecce65d8c428b86f4f8b0920921fe\n":
# fix up the representation checksum
- fp_new.write("text: 1 279 32 32 b50b1d5ed64075b5f632f3b8c30cd6b2\n")
+ fp_new.write("text: 1 279 32 0 b50b1d5ed64075b5f632f3b8c30cd6b2\n")
+ elif line == "text: 1 292 44 32 a6be7b4cf075fd39e6a99eb69a31232b\n":
+ # fix up the representation checksum
+ fp_new.write("text: 1 292 44 32 f2e93e73272cac0f18fccf16f224eb93\n")
elif line == "cpath: /A\n":
# also fix up the 'created path' field
fp_new.write("cpath: /\xE6\n")
@@ -1510,6 +1591,195 @@ def load_ranges(sbox):
svntest.verify.compare_and_display_lines("Dump files", "DUMP",
expected_dump, new_dumpdata)
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def hotcopy_incremental(sbox):
+ "'svnadmin hotcopy --incremental PATH .'"
+ sbox.build()
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ os.mkdir(backup_dir)
+ cwd = os.getcwd()
+
+ for i in [1, 2, 3]:
+ os.chdir(backup_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ None, None, [],
+ "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.')
+
+ os.chdir(cwd)
+
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+
+ if i < 3:
+ sbox.simple_mkdir("newdir-%i" % i)
+ sbox.simple_commit()
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def hotcopy_incremental_packed(sbox):
+ "'svnadmin hotcopy --incremental' with packing"
+ sbox.build()
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ os.mkdir(backup_dir)
+ cwd = os.getcwd()
+ # Configure two files per shard to trigger packing
+ format_file = open(os.path.join(sbox.repo_dir, 'db', 'format'), 'wb')
+ format_file.write("4\nlayout sharded 2\n")
+ format_file.close()
+
+ # Pack revisions 0 and 1.
+ svntest.actions.run_and_verify_svnadmin(
+ None, None, [], "pack", os.path.join(cwd, sbox.repo_dir))
+
+ # Commit 5 more revs, hotcopy and pack after each commit.
+ for i in [1, 2, 3, 4, 5]:
+ os.chdir(backup_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ None, None, [],
+ "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.')
+
+ os.chdir(cwd)
+
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+
+ if i < 5:
+ sbox.simple_mkdir("newdir-%i" % i)
+ sbox.simple_commit()
+ svntest.actions.run_and_verify_svnadmin(
+ None, None, [], "pack", os.path.join(cwd, sbox.repo_dir))
+
+
+def locking(sbox):
+ "svnadmin lock tests"
+ sbox.build(create_wc=False)
+
+ comment_path = os.path.join(svntest.main.temp_dir, "comment")
+ svntest.main.file_write(comment_path, "dummy comment")
+
+ invalid_comment_path = os.path.join(svntest.main.temp_dir, "invalid_comment")
+ svntest.main.file_write(invalid_comment_path, "character is invalid")
+
+ # Test illegal character in comment file.
+ expected_error = ".*svnadmin: E130004:.*"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ invalid_comment_path)
+
+ # Test locking path with --bypass-hooks
+ expected_output = "'iota' locked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(None, expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path,
+ "--bypass-hooks")
+
+ # Remove lock
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ None, "rmlocks",
+ sbox.repo_dir, "iota")
+
+ # Test locking path without --bypass-hooks
+ expected_output = "'iota' locked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(None, expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Test locking already locked path.
+ expected_error = ".*svnadmin: E160035:.*"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Test locking non-existent path.
+ expected_error = ".*svnadmin: E160013:.*"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "non-existent", "jrandom",
+ comment_path)
+
+ # Test locking a path while specifying a lock token.
+ expected_output = "'A/D/G/rho' locked by user 'jrandom'."
+ lock_token = "opaquelocktoken:01234567-89ab-cdef-89ab-cdef01234567"
+ svntest.actions.run_and_verify_svnadmin(None, expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ comment_path, lock_token)
+
+ # Test unlocking a path, but provide the wrong lock token.
+ expected_error = ".*svnadmin: E160040:.*"
+ wrong_lock_token = "opaquelocktoken:12345670-9ab8-defc-9ab8-def01234567c"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "unlock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ wrong_lock_token)
+
+ # Test unlocking the path again, but this time provide the correct
+ # lock token.
+ expected_output = "'A/D/G/rho' unlocked."
+ svntest.actions.run_and_verify_svnadmin(None, expected_output,
+ None, "unlock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ lock_token)
+
+ # Install lock/unlock prevention hooks.
+ hook_path = svntest.main.get_pre_lock_hook_path(sbox.repo_dir)
+ svntest.main.create_python_hook_script(hook_path, 'import sys; sys.exit(1)')
+ hook_path = svntest.main.get_pre_unlock_hook_path(sbox.repo_dir)
+ svntest.main.create_python_hook_script(hook_path, 'import sys; sys.exit(1)')
+
+ # Test locking a path. Don't use --bypass-hooks, though, as we wish
+ # to verify that hook script is really getting executed.
+ expected_error = ".*svnadmin: E165001:.*"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Fetch the lock token for our remaining locked path. (We didn't
+ # explicitly set it, so it will vary from test run to test run.)
+ exit_code, output, errput = svntest.main.run_svnadmin("lslocks",
+ sbox.repo_dir,
+ "iota")
+ iota_token = None
+ for line in output:
+ if line.startswith("UUID Token: opaquelocktoken:"):
+ iota_token = line[12:].rstrip()
+ break
+ if iota_token is None:
+ raise svntest.Failure("Unable to lookup lock token for 'iota'")
+
+ # Try to unlock a path while providing the correct lock token but
+ # with a preventative hook in place.
+ expected_error = ".*svnadmin: E165001:.*"
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ expected_error, "unlock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ iota_token)
+
+ # Finally, use --bypass-hooks to unlock the path (again using the
+ # correct lock token).
+ expected_output = "'iota' unlocked."
+ svntest.actions.run_and_verify_svnadmin(None, expected_output,
+ None, "unlock",
+ "--bypass-hooks",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ iota_token)
+
+
########################################################################
# Run the tests
@@ -1541,6 +1811,9 @@ test_list = [ None,
verify_non_utf8_paths,
test_lslocks_and_rmlocks,
load_ranges,
+ hotcopy_incremental,
+ hotcopy_incremental_packed,
+ locking,
]
if __name__ == '__main__':
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/svndumpfilter_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/svndumpfilter_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/svndumpfilter_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/svndumpfilter_tests.py Mon Jul 30 06:39:28 2012
@@ -164,6 +164,9 @@ def svndumpfilter_loses_mergeinfo(sbox):
def _simple_dumpfilter_test(sbox, dumpfile, *dumpargs):
+ """Run svndumpfilter with arguments DUMPARGS, taking input from DUMPFILE.
+ Check that the output consists of the standard Greek tree excluding
+ all paths that start with 'A/B/E', 'A/D/G' or 'A/D/H'."""
wc_dir = sbox.wc_dir
filtered_output, filtered_err = filter_and_return_output(dumpfile, 0,
@@ -596,6 +599,58 @@ def dropped_but_not_renumbered_empty_rev
'propget', 'svn:mergeinfo', '-R',
sbox.repo_url)
+#----------------------------------------------------------------------
+def match_empty_prefix(sbox):
+ "svndumpfilter with an empty prefix"
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'greek_tree.dump')
+ dumpfile = open(dumpfile_location).read()
+
+ def test(sbox, dumpfile, *dumpargs):
+ """Run svndumpfilter with DUMPFILE as the input lines, load
+ the result and check it matches EXPECTED_DISK, EXPECTED_OUTPUT,
+ EXPECTED_STATUS."""
+
+ # Filter the Greek tree dump
+ filtered_output, filtered_err = filter_and_return_output(dumpfile, 0,
+ '--quiet',
+ *dumpargs)
+ if filtered_err:
+ raise verify.UnexpectedStderr(filtered_err)
+
+ # Load the filtered dump into a repo and check the result
+ test_create(sbox)
+ load_and_verify_dumpstream(sbox, [], [], None, filtered_output,
+ '--ignore-uuid')
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Test excluding everything
+ expected_disk = svntest.wc.State(sbox.wc_dir, {})
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '': Item(status=' ', wc_rev=1) })
+
+ test(sbox, dumpfile, 'exclude', '')
+
+ # Test including everything
+ expected_disk = svntest.main.greek_state.copy()
+ expected_output = svntest.main.greek_state.copy().tweak(status='A ')
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ test(sbox, dumpfile, 'include', '', '/A/D/G')
+
+ # Note: We also ought to test the '--pattern' option, including or
+ # excluding a pattern of '*'. However, passing a wildcard parameter
+ # is troublesome on Windows: it may be expanded, depending on whether
+ # the svndumpfilter executable was linked with 'setargv.obj', and there
+ # doesn't seem to be a consistent way to quote such an argument to
+ # prevent expansion.
+
########################################################################
# Run the tests
@@ -608,6 +663,7 @@ test_list = [ None,
dumpfilter_with_patterns,
filter_mergeinfo_revs_outside_of_dump_stream,
dropped_but_not_renumbered_empty_revs,
+ match_empty_prefix,
]
if __name__ == '__main__':
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/svnlook_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/svnlook_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/svnlook_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/svnlook_tests.py Mon Jul 30 06:39:28 2012
@@ -117,35 +117,39 @@ def test_misc(sbox):
# the 'svnlook tree --full-paths' output if demanding the whole repository
treelist = run_svnlook('tree', repo_dir)
treelistfull = run_svnlook('tree', '--full-paths', repo_dir)
+
path = ''
- n = 0
+ treelistexpand = []
for entry in treelist:
len1 = len(entry)
len2 = len(entry.lstrip())
- path = path[0:2*(len1-len2)-1] + entry.strip()
- test = treelistfull[n].rstrip()
- if n != 0:
- test = "/" + test
- if not path == test:
- print("Unexpected result from tree with --full-paths:")
- print(" entry : %s" % entry.rstrip())
- print(" with --full-paths: %s" % treelistfull[n].rstrip())
- raise svntest.Failure
- n = n + 1
+ path = path[0:2*(len1-len2)-1] + entry.strip() + '\n'
+ if path == '/\n':
+ treelistexpand.append(path)
+ else:
+ treelistexpand.append(path[1:])
+
+ treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
+ svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
+ treelistexpand, treelistfull)
# check if the 'svnlook tree' output is the ending of
# the 'svnlook tree --full-paths' output if demanding
# any part of the repository
- n = 0
treelist = run_svnlook('tree', repo_dir, '/A/B')
treelistfull = run_svnlook('tree', '--full-paths', repo_dir, '/A/B')
+
+ path = ''
+ treelistexpand = []
for entry in treelist:
- if not treelistfull[n].endswith(entry.lstrip()):
- print("Unexpected result from tree with --full-paths:")
- print(" entry : %s" % entry.rstrip())
- print(" with --full-paths: %s" % treelistfull[n].rstrip())
- raise svntest.Failure
- n = n + 1
+ len1 = len(entry)
+ len2 = len(entry.lstrip())
+ path = path[0:2*(len1-len2)] + entry.strip() + '\n'
+ treelistexpand.append('/A/' + path)
+
+ treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
+ svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
+ treelistexpand, treelistfull)
treelist = run_svnlook('tree', repo_dir, '/')
if treelist[0] != '/\n':
@@ -695,7 +699,7 @@ fp.close()"""
# internal property, not really expected
' svn:check-locks\n',
' bogus_rev_prop\n', ' svn:date\n']
- verify_logfile(logfilepath, expected_data)
+ verify_logfile(logfilepath, svntest.verify.UnorderedOutput(expected_data))
########################################################################
# Run the tests
Modified: subversion/branches/svn-bisect/subversion/tests/cmdline/svnrdump_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/svn-bisect/subversion/tests/cmdline/svnrdump_tests.py?rev=1367002&r1=1367001&r2=1367002&view=diff
==============================================================================
--- subversion/branches/svn-bisect/subversion/tests/cmdline/svnrdump_tests.py (original)
+++ subversion/branches/svn-bisect/subversion/tests/cmdline/svnrdump_tests.py Mon Jul 30 06:39:28 2012
@@ -70,8 +70,31 @@ def build_repos(sbox):
# Create an empty repository.
svntest.main.create_repos(sbox.repo_dir)
+def compare_repos_dumps(svnrdump_sbox, svnadmin_dumpfile):
+ """Compare two dumpfiles, one created from SVNRDUMP_SBOX, and other given
+ by SVNADMIN_DUMPFILE. The dumpfiles do not need to match linewise, as the
+ SVNADMIN_DUMPFILE contents will first be loaded into a repository and then
+ re-dumped to do the match, which should generate the same dumpfile as
+ dumping SVNRDUMP_SBOX."""
+
+ svnrdump_contents = svntest.actions.run_and_verify_dump(
+ svnrdump_sbox.repo_dir)
+
+ svnadmin_sbox = svnrdump_sbox.clone_dependent()
+ svntest.main.safe_rmtree(svnadmin_sbox.repo_dir)
+ svntest.main.create_repos(svnadmin_sbox.repo_dir)
+
+ svntest.actions.run_and_verify_load(svnadmin_sbox.repo_dir, svnadmin_dumpfile)
+
+ svnadmin_contents = svntest.actions.run_and_verify_dump(
+ svnadmin_sbox.repo_dir)
+
+ svntest.verify.compare_dump_files(
+ "Dump files", "DUMP", svnadmin_contents, svnrdump_contents)
+
def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None,
- subdir = None, bypass_prop_validation = False):
+ subdir = None, bypass_prop_validation = False,
+ ignore_base_checksums = False):
"""Load a dumpfile using 'svnadmin load', dump it with 'svnrdump
dump' and check that the same dumpfile is produced or that
expected_dumpfile_name is produced if provided. Additionally, the
@@ -107,12 +130,21 @@ def run_dump_test(sbox, dumpfile_name, e
svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir,
expected_dumpfile_name),
'rb').readlines()
+ # Compare the output from stdout
+ if ignore_base_checksums:
+ svnadmin_dumpfile = [l for l in svnadmin_dumpfile
+ if not l.startswith('Text-delta-base-md5')]
+ svnrdump_dumpfile = [l for l in svnrdump_dumpfile
+ if not l.startswith('Text-delta-base-md5')]
+
svnadmin_dumpfile = svntest.verify.UnorderedOutput(svnadmin_dumpfile)
- # Compare the output from stdout
- svntest.verify.compare_and_display_lines(
- "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile,
- None, mismatched_headers_re)
+ svntest.verify.compare_and_display_lines(
+ "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile,
+ None, mismatched_headers_re)
+
+ else:
+ compare_repos_dumps(sbox, svnadmin_dumpfile)
def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None,
expect_deltas = True):
@@ -155,13 +187,17 @@ def run_load_test(sbox, dumpfile_name, e
expected_dumpfile_name),
'rb').readlines()
- # Compare the output from stdout
- svntest.verify.compare_and_display_lines(
- "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile)
+ # Compare the output from stdout
+ svntest.verify.compare_and_display_lines(
+ "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile)
+
+ else:
+ compare_repos_dumps(sbox, svnrdump_dumpfile)
######################################################################
# Tests
+@Skip(svntest.main.is_ra_type_dav_serf)
def basic_dump(sbox):
"dump: standard sbox repos"
sbox.build(read_only = True, create_wc = False)
@@ -174,6 +210,7 @@ def basic_dump(sbox):
if not out[0].startswith('SVN-fs-dump-format-version:'):
raise svntest.Failure('No valid output')
+@Skip(svntest.main.is_ra_type_dav_serf)
def revision_0_dump(sbox):
"dump: revision zero"
run_dump_test(sbox, "revision-0.dump")
@@ -192,6 +229,7 @@ def revision_0_load(sbox):
# docs/ (Added r6)
# README (Added r6)
+@Skip(svntest.main.is_ra_type_dav_serf)
def skeleton_dump(sbox):
"dump: skeleton repository"
run_dump_test(sbox, "skeleton.dump")
@@ -200,6 +238,7 @@ def skeleton_load(sbox):
"load: skeleton repository"
run_load_test(sbox, "skeleton.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def sparse_propchanges_dump(sbox):
"dump: sparse file/dir propchanges"
run_dump_test(sbox, "sparse-propchanges.dump")
@@ -209,6 +248,7 @@ def sparse_propchanges_load(sbox):
"load: sparse file/dir propchanges"
run_load_test(sbox, "sparse-propchanges.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_and_modify_dump(sbox):
"dump: copy and modify"
run_dump_test(sbox, "copy-and-modify.dump")
@@ -217,6 +257,7 @@ def copy_and_modify_load(sbox):
"load: copy and modify"
run_load_test(sbox, "copy-and-modify.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def no_author_dump(sbox):
"dump: copy revs with no svn:author revprops"
run_dump_test(sbox, "no-author.dump")
@@ -225,6 +266,7 @@ def no_author_load(sbox):
"load: copy revs with no svn:author revprops"
run_load_test(sbox, "no-author.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_from_previous_version_and_modify_dump(sbox):
"dump: copy from previous version and modify"
run_dump_test(sbox, "copy-from-previous-version-and-modify.dump")
@@ -233,6 +275,7 @@ def copy_from_previous_version_and_modif
"load: copy from previous version and modify"
run_load_test(sbox, "copy-from-previous-version-and-modify.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def modified_in_place_dump(sbox):
"dump: modified in place"
run_dump_test(sbox, "modified-in-place.dump")
@@ -241,6 +284,7 @@ def modified_in_place_load(sbox):
"load: modified in place"
run_load_test(sbox, "modified-in-place.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def move_and_modify_in_the_same_revision_dump(sbox):
"dump: move parent & modify child file in same rev"
run_dump_test(sbox, "move-and-modify.dump")
@@ -249,6 +293,7 @@ def move_and_modify_in_the_same_revision
"load: move parent & modify child file in same rev"
run_load_test(sbox, "move-and-modify.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def tag_empty_trunk_dump(sbox):
"dump: tag empty trunk"
run_dump_test(sbox, "tag-empty-trunk.dump")
@@ -257,6 +302,7 @@ def tag_empty_trunk_load(sbox):
"load: tag empty trunk"
run_load_test(sbox, "tag-empty-trunk.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def tag_trunk_with_file_dump(sbox):
"dump: tag trunk containing a file"
run_dump_test(sbox, "tag-trunk-with-file.dump")
@@ -265,6 +311,7 @@ def tag_trunk_with_file_load(sbox):
"load: tag trunk containing a file"
run_load_test(sbox, "tag-trunk-with-file.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def tag_trunk_with_file2_dump(sbox):
"dump: tag trunk containing a file (#2)"
run_dump_test(sbox, "tag-trunk-with-file2.dump")
@@ -273,6 +320,7 @@ def tag_trunk_with_file2_load(sbox):
"load: tag trunk containing a file (#2)"
run_load_test(sbox, "tag-trunk-with-file2.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def dir_prop_change_dump(sbox):
"dump: directory property changes"
run_dump_test(sbox, "dir-prop-change.dump")
@@ -281,6 +329,7 @@ def dir_prop_change_load(sbox):
"load: directory property changes"
run_load_test(sbox, "dir-prop-change.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_parent_modify_prop_dump(sbox):
"dump: copy parent and modify prop"
run_dump_test(sbox, "copy-parent-modify-prop.dump")
@@ -289,6 +338,7 @@ def copy_parent_modify_prop_load(sbox):
"load: copy parent and modify prop"
run_load_test(sbox, "copy-parent-modify-prop.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_revprops_dump(sbox):
"dump: copy revprops other than svn:*"
run_dump_test(sbox, "revprops.dump")
@@ -297,16 +347,21 @@ def copy_revprops_load(sbox):
"load: copy revprops other than svn:*"
run_load_test(sbox, "revprops.dump")
+@XFail()
+@Skip(svntest.main.is_ra_type_dav_serf)
def only_trunk_dump(sbox):
"dump: subdirectory"
run_dump_test(sbox, "trunk-only.dump", subdir="/trunk",
expected_dumpfile_name="trunk-only.expected.dump")
+@XFail()
+@Skip(svntest.main.is_ra_type_dav_serf)
def only_trunk_A_with_changes_dump(sbox):
"dump: subdirectory with changes on root"
run_dump_test(sbox, "trunk-A-changes.dump", subdir="/trunk/A",
expected_dumpfile_name="trunk-A-changes.expected.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def url_encoding_dump(sbox):
"dump: url encoding issues"
run_dump_test(sbox, "url-encoding-bug.dump")
@@ -315,18 +370,21 @@ def url_encoding_load(sbox):
"load: url encoding issues"
run_load_test(sbox, "url-encoding-bug.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_bad_line_endings_dump(sbox):
"dump: inconsistent line endings in svn:* props"
run_dump_test(sbox, "copy-bad-line-endings.dump",
expected_dumpfile_name="copy-bad-line-endings.expected.dump",
bypass_prop_validation=True)
+@Skip(svntest.main.is_ra_type_dav_serf)
def copy_bad_line_endings2_dump(sbox):
"dump: non-LF line endings in svn:* props"
run_dump_test(sbox, "copy-bad-line-endings2.dump",
expected_dumpfile_name="copy-bad-line-endings2.expected.dump",
- bypass_prop_validation=True)
+ bypass_prop_validation=True, ignore_base_checksums=True)
+@Skip(svntest.main.is_ra_type_dav_serf)
def commit_a_copy_of_root_dump(sbox):
"dump: commit a copy of root"
run_dump_test(sbox, "repo-with-copy-of-root-dir.dump")
@@ -335,6 +393,7 @@ def commit_a_copy_of_root_load(sbox):
"load: commit a copy of root"
run_load_test(sbox, "repo-with-copy-of-root-dir.dump")
+@Skip(svntest.main.is_ra_type_dav_serf)
def descend_into_replace_dump(sbox):
"dump: descending into replaced dir looks in src"
run_dump_test(sbox, "descend-into-replace.dump", subdir='/trunk/H',
@@ -345,6 +404,7 @@ def descend_into_replace_load(sbox):
run_load_test(sbox, "descend-into-replace.dump")
@Issue(3847)
+@Skip(svntest.main.is_ra_type_dav_serf)
def add_multi_prop_dump(sbox):
"dump: add with multiple props"
run_dump_test(sbox, "add-multi-prop.dump")
@@ -359,6 +419,7 @@ def multi_prop_edit_load(sbox):
# revs in svn:mergeinfo' but uses 'svnrdump load' in place of
# 'svnadmin load'.
@Issue(3890)
+@Skip(svntest.main.is_ra_type_dav_serf)
def reflect_dropped_renumbered_revs(sbox):
"svnrdump renumbers dropped revs in mergeinfo"
@@ -422,6 +483,7 @@ def reflect_dropped_renumbered_revs(sbox
# from incremental dump' but uses 'svnrdump [dump|load]' in place of
# 'svnadmin [dump|load]'.
@Issue(3890)
+@Skip(svntest.main.is_ra_type_dav_serf)
def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox):
"don't drop mergeinfo revs in incremental svnrdump"