You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by ju...@apache.org on 2015/01/20 18:52:21 UTC
svn commit: r1653314 [19/20] - in /subversion/branches/move-tracking-2: ./
notes/ subversion/ subversion/bindings/swig/
subversion/bindings/swig/include/ subversion/bindings/swig/perl/native/
subversion/bindings/swig/perl/native/t/ subversion/bindings/...
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/patch_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/patch_tests.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/patch_tests.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/patch_tests.py Tue Jan 20 17:52:18 2015
@@ -4689,6 +4689,757 @@ def patch_git_rename(sbox):
expected_output, expected_disk,
expected_status, expected_skip)
+@Issue(4533)
+def patch_hunk_avoid_reorder(sbox):
+ """avoid reordering hunks"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n', truncate=True)
+ sbox.simple_commit()
+
+ # two hunks, first matches at offset +18, second matches at both -13
+ # and +18 but we want the second match as it is after the first
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -20,6 +20,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -20,6 +20,7 @@ with offset 18\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ sbox.simple_revert('A/mu')
+
+ # change patch so second hunk matches at both -14 and +17, we still
+ # want the second match
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -21,6 +21,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -21,6 +21,7 @@ with offset 17\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ sbox.simple_revert('A/mu')
+
+@Issue(4533)
+def patch_hunk_avoid_reorder2(sbox):
+ """avoid reordering hunks 2"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n', truncate=True)
+ sbox.simple_commit()
+
+ # two hunks, first matches at offset +18, second matches at both -13
+ # change patch so second hunk matches at both -12 and +19, we still
+ # want the second match
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -19,6 +19,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -19,6 +19,7 @@ with offset 19\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@Issue(4533)
+def patch_hunk_reorder(sbox):
+ """hunks that reorder"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n' 'GG\n'
+ 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n' 'MM\n' 'NN\n',
+ truncate=True)
+ sbox.simple_commit()
+
+ # Two hunks match in opposite order
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -2,6 +2,7 @@\n",
+ " II\n",
+ " JJ\n",
+ " KK\n",
+ "+11111\n",
+ " LL\n",
+ " MM\n",
+ " NN\n",
+ "@@ -9,6 +10,7 @@\n",
+ " BB\n",
+ " CC\n",
+ " DD\n",
+ "+22222\n",
+ " EE\n",
+ " FF\n",
+ " GG\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -9,6 +10,7 @@ with offset -7\n',
+ '> applied hunk @@ -2,6 +2,7 @@ with offset 7\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' '22222\n' 'EE\n' 'FF\n' 'GG\n'
+ 'HH\n' 'II\n' 'JJ\n' 'KK\n' '11111\n' 'LL\n' 'MM\n' 'NN\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # In the following case the reordered hunk2 is smaller offset
+ # magnitude than hunk2 at the end and the reorder is preferred.
+ sbox.simple_revert('A/mu')
+ sbox.simple_append('A/mu',
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n',
+ truncate=True)
+ sbox.simple_commit()
+
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -28,7 +28,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -44,7 +44,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk2\n",
+ "+hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -44,7 +44,7 @@ with offset -32\n',
+ '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n',
+ ]
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n')
+
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ sbox.simple_revert('A/mu')
+
+ # In this case the reordered hunk2 is further than hunk2 at the end
+ # and the reordered is not preferred.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -28,7 +28,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -110,7 +110,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk2\n",
+ "+hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n',
+ '> applied hunk @@ -110,7 +110,7 @@ with offset 26\n',
+ ]
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n')
+
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@XFail()
+def patch_hunk_overlap(sbox):
+ """hunks that overlap"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'GG\n' 'HH\n' 'II\n', truncate=True)
+ sbox.simple_commit()
+
+ # Two hunks that overlap when applied, GNU patch can apply both hunks.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -2,6 +2,7 @@\n",
+ " BB\n",
+ " CC\n",
+ " DD\n",
+ "+11111\n",
+ " EE\n",
+ " FF\n",
+ " GG\n",
+ "@@ -9,6 +10,7 @@\n",
+ " DD\n",
+ " EE\n",
+ " FF\n",
+ "+22222\n",
+ " GG\n",
+ " HH\n",
+ " II\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -9,6 +10,7 @@ with offset -5\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' '11111\n' 'EE\n' 'FF\n'
+ '22222\n' 'GG\n' 'HH\n' 'II\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_delete_modified(sbox):
+ """patch delete modified"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A patch that deletes beta.
+ unidiff_patch = [
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ # First application deletes beta
+ expected_output = [
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/beta')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/beta', status='D ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Second application skips
+ expected_output = [
+ 'Skipped \'%s\'\n' % sbox.ospath('A/B/E/beta'),
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1)
+ expected_skip = wc.State('', {
+ sbox.ospath('A/B/E/beta') : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Third application, with file present even though state is 'D', also skips
+ sbox.simple_append('A/B/E/beta', 'Modified', truncate=True)
+ expected_disk.add({'A/B/E/beta' : Item(contents='Modified')})
+ expected_output = [
+ 'Skipped \'%s\'\n' % sbox.ospath('A/B/E/beta'),
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1)
+ expected_skip = wc.State('', {
+ sbox.ospath('A/B/E/beta') : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Revert and modify beta, fourth application gives a text conflict.
+ sbox.simple_revert('A/B/E/beta')
+ sbox.simple_append('A/B/E/beta', 'Modified', truncate=True)
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/B/E/beta'),
+ '> rejected hunk @@ -1,1 +0,0 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_skip = wc.State('', { })
+ reject_file_contents = [
+ "--- A/B/E/beta\n",
+ "+++ A/B/E/beta\n",
+ "@@ -1,1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+ expected_disk.add({'A/B/E/beta.svnpatch.rej'
+ : Item(contents=''.join(reject_file_contents))
+ })
+ expected_status.tweak('A/B/E/beta', status='M ')
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_closest(sbox):
+ "find closest hunk"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -47,7 +47,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -66,7 +66,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-rejected-hunk2-\n",
+ "+rejected-hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -180,7 +180,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk3\n",
+ "+hunk3-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+ patch_file_path = make_patch_path(sbox)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +18, prefer +18 gives final offset +22
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 30 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset 22\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/mu.svnpatch.rej' : Item(contents=
+ "--- A/mu\n" +
+ "+++ A/mu\n" +
+ "@@ -66,7 +66,7 @@\n" +
+ " 1\n" +
+ " 2\n" +
+ " 3\n" +
+ "-rejected-hunk2-\n" +
+ "+rejected-hunk2-mod\n" +
+ " 4\n" +
+ " 5\n" +
+ " 6\n")})
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 30 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +20, prefer -19 gives final offset -15
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 32 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 32 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +19, prefer -19 gives final offset -15
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 31 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 31 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=4)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of +173 and -173, prefer +173 gives final offset +177
+ sbox.simple_append('A/mu',
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 242 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset 177\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 242 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=5)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of +174 and -173, prefer -173 gives final offset -169
+ sbox.simple_append('A/mu',
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 243 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -169\n',
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 243 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=6)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path),
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
########################################################################
#Run the tests
@@ -4741,7 +5492,13 @@ test_list = [ None,
patch_apply_no_fuz,
patch_lacking_trailing_eol_on_context,
patch_with_custom_keywords,
- patch_git_rename
+ patch_git_rename,
+ patch_hunk_avoid_reorder,
+ patch_hunk_avoid_reorder2,
+ patch_hunk_reorder,
+ patch_hunk_overlap,
+ patch_delete_modified,
+ patch_closest,
]
if __name__ == '__main__':
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/svndumpfilter_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/svndumpfilter_tests.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/svndumpfilter_tests.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/svndumpfilter_tests.py Tue Jan 20 17:52:18 2015
@@ -60,15 +60,10 @@ def filter_and_return_output(dump, bufsi
dump = [ dump ]
# Does the caller want the stderr?
- try:
- varargs.index('-q')
+ if '-q' in varargs or '--quiet' in varargs:
expected_errput = None # Stderr with -q or --quiet is a real error!
- except:
- try:
- varargs.index('--quiet')
- expected_errput = None
- except:
- expected_errput = svntest.verify.AnyOutput
+ else:
+ expected_errput = svntest.verify.AnyOutput
## TODO: Should we handle exit_code?
exit_code, output, errput = svntest.main.run_command_stdin(
svntest.main.svndumpfilter_binary, expected_errput, bufsize, True,
@@ -323,9 +318,9 @@ def filter_mergeinfo_revs_outside_of_dum
# --skip-missing-merge-soruces which should strip out any revisions < 6.
# Then we'll load the filtered result into an empty repository. This
# should offset the incoming mergeinfo by -5. In addition, any mergeinfo
- # revisions that are adjusted to r1 should be removed because that implies
- # a merge of -r0:1, which is impossible. The resulting mergeinfo should
- # look like this:
+ # referring to the initial revision in the dump file (r6) should be
+ # removed because the change it refers to (r5:6) is not wholly within the
+ # dumpfile. The resulting mergeinfo should look like this:
#
# Properties on 'branches/B1':
# svn:mergeinfo
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/svnrdump_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/svnrdump_tests.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/svnrdump_tests.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/svnrdump_tests.py Tue Jan 20 17:52:18 2015
@@ -70,27 +70,28 @@ def build_repos(sbox):
# Create an empty repository.
svntest.main.create_repos(sbox.repo_dir)
-def compare_repos_dumps(svnrdump_sbox, svnadmin_dumpfile):
- """Compare two dumpfiles, one created from SVNRDUMP_SBOX, and other given
- by SVNADMIN_DUMPFILE. The dumpfiles do not need to match linewise, as the
- SVNADMIN_DUMPFILE contents will first be loaded into a repository and then
+def compare_repos_dumps(sbox, other_dumpfile,
+ bypass_prop_validation=False):
+ """Compare two dumpfiles, one created from SBOX, and other given
+ by OTHER_DUMPFILE. The dumpfiles do not need to match linewise, as the
+ OTHER_DUMPFILE contents will first be loaded into a repository and then
re-dumped to do the match, which should generate the same dumpfile as
- dumping SVNRDUMP_SBOX."""
+ dumping SBOX."""
- svnrdump_contents = svntest.actions.run_and_verify_dump(
- svnrdump_sbox.repo_dir)
- svnadmin_sbox = svnrdump_sbox.clone_dependent()
- svntest.main.safe_rmtree(svnadmin_sbox.repo_dir)
- svntest.main.create_repos(svnadmin_sbox.repo_dir)
+ sbox_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir)
- svntest.actions.run_and_verify_load(svnadmin_sbox.repo_dir, svnadmin_dumpfile)
-
- svnadmin_contents = svntest.actions.run_and_verify_dump(
- svnadmin_sbox.repo_dir)
+ # Load and dump the other dumpfile (using svnadmin)
+ other_sbox = sbox.clone_dependent()
+ svntest.main.safe_rmtree(other_sbox.repo_dir)
+ svntest.main.create_repos(other_sbox.repo_dir)
+ svntest.actions.run_and_verify_load(other_sbox.repo_dir, other_dumpfile,
+ bypass_prop_validation)
+ other_dumpfile = svntest.actions.run_and_verify_dump(other_sbox.repo_dir)
+ ### This call kind-of assumes EXPECTED is first and ACTUAL is second.
svntest.verify.compare_dump_files(
- "Dump files", "DUMP", svnadmin_contents, svnrdump_contents)
+ "Dump files", "DUMP", other_dumpfile, sbox_dumpfile)
def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None,
subdir = None, bypass_prop_validation = False,
@@ -111,11 +112,10 @@ def run_dump_test(sbox, dumpfile_name, e
# Load the specified dump file into the sbox repository using
# svnadmin load
- svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ original_dumpfile = open(os.path.join(svnrdump_tests_dir,
dumpfile_name),
'rb').readlines()
-
- svntest.actions.run_and_verify_load(sbox.repo_dir, svnadmin_dumpfile,
+ svntest.actions.run_and_verify_load(sbox.repo_dir, original_dumpfile,
bypass_prop_validation)
repo_url = sbox.repo_url
@@ -129,28 +129,29 @@ def run_dump_test(sbox, dumpfile_name, e
[], 0, *opts)
if expected_dumpfile_name:
- svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ expected_dumpfile = open(os.path.join(svnrdump_tests_dir,
expected_dumpfile_name),
'rb').readlines()
# Compare the output from stdout
if ignore_base_checksums:
- svnadmin_dumpfile = [l for l in svnadmin_dumpfile
+ expected_dumpfile = [l for l in expected_dumpfile
if not l.startswith('Text-delta-base-md5')]
svnrdump_dumpfile = [l for l in svnrdump_dumpfile
if not l.startswith('Text-delta-base-md5')]
- svnadmin_dumpfile = [l for l in svnadmin_dumpfile
+ expected_dumpfile = [l for l in expected_dumpfile
if not mismatched_headers_re.match(l)]
svnrdump_dumpfile = [l for l in svnrdump_dumpfile
if not mismatched_headers_re.match(l)]
- svnadmin_dumpfile = svntest.verify.UnorderedOutput(svnadmin_dumpfile)
+ expected_dumpfile = svntest.verify.UnorderedOutput(expected_dumpfile)
svntest.verify.compare_and_display_lines(
- "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile,
+ "Dump files", "DUMP", expected_dumpfile, svnrdump_dumpfile,
None)
else:
- compare_repos_dumps(sbox, svnadmin_dumpfile)
+ # The expected dumpfile is the result of dumping SBOX.
+ compare_repos_dumps(sbox, svnrdump_dumpfile, bypass_prop_validation)
def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None,
expect_deltas = True):
@@ -169,36 +170,37 @@ def run_load_test(sbox, dumpfile_name, e
# Load the specified dump file into the sbox repository using
# svnrdump load
- svnrdump_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ original_dumpfile = open(os.path.join(svnrdump_tests_dir,
dumpfile_name),
'rb').readlines()
# Set the UUID of the sbox repository to the UUID specified in the
# dumpfile ### RA layer doesn't have a set_uuid functionality
- uuid = svnrdump_dumpfile[2].split(' ')[1][:-1]
+ uuid = original_dumpfile[2].split(' ')[1][:-1]
svntest.actions.run_and_verify_svnadmin2("Setting UUID", None, None, 0,
'setuuid', sbox.repo_dir,
uuid)
- svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile,
+ svntest.actions.run_and_verify_svnrdump(original_dumpfile,
svntest.verify.AnyOutput,
[], 0, 'load', sbox.repo_url)
- # Create a dump file using svnadmin dump
- svnadmin_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir,
+ # Re-dump the rdump-loaded repo using svnadmin dump
+ resulted_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir,
expect_deltas)
if expected_dumpfile_name:
- svnrdump_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ expected_dumpfile = open(os.path.join(svnrdump_tests_dir,
expected_dumpfile_name),
'rb').readlines()
# Compare the output from stdout
svntest.verify.compare_and_display_lines(
- "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile)
+ "Dump files", "DUMP", expected_dumpfile, resulted_dumpfile)
else:
- compare_repos_dumps(sbox, svnrdump_dumpfile)
+ expected_dumpfile = original_dumpfile
+ compare_repos_dumps(sbox, expected_dumpfile)
######################################################################
# Tests
@@ -799,6 +801,129 @@ def load_prop_change_in_non_deltas_dump(
[], [], 0,
'-q', 'load', sbox.repo_url)
+#----------------------------------------------------------------------
+
+@Issue(4476)
+def dump_mergeinfo_contains_r0(sbox):
+ "dump: mergeinfo that contains r0"
+ ### We pass the original dump file name as 'expected_dumpfile_name' because
+ ### run_dump_test is currently broken when we don't.
+ run_dump_test(sbox, "mergeinfo-contains-r0.dump",
+ bypass_prop_validation=True)
+
+#----------------------------------------------------------------------
+
+@XFail()
+@Issue(4476)
+def load_mergeinfo_contains_r0(sbox):
+ "load: mergeinfo that contains r0"
+ run_load_test(sbox, "mergeinfo-contains-r0.dump",
+ expected_dumpfile_name="mergeinfo-contains-r0.expected.dump")
+
+#----------------------------------------------------------------------
+
+# Regression test for issue 4551 "svnrdump load commits wrong properties,
+# or fails, on a non-deltas dumpfile". In this test, the copy source does
+# not exist and the failure mode is to error out.
+@XFail()
+@Issue(4551)
+def load_non_deltas_copy_with_props(sbox):
+ "load non-deltas copy with props"
+ sbox.build()
+
+ # Set props on a file and on a dir and on a child of the dir to be copied
+ sbox.simple_propset('p', 'v', 'A/mu', 'A/B', 'A/B/E')
+ sbox.simple_commit()
+ sbox.simple_update() # avoid mixed-rev
+
+ # Case (1): Copy file/dir, not replacing anything; the copy target path
+ # at (new rev - 1) does not exist
+ sbox.simple_copy('A/mu@2', 'A/mu_COPY')
+ sbox.simple_copy('A/B@2', 'A/B_COPY')
+ # On the copy, delete a prop
+ sbox.simple_propdel('p', 'A/mu_COPY', 'A/B_COPY', 'A/B_COPY/E')
+
+ sbox.simple_commit()
+
+ # Dump with 'svnadmin' (non-deltas mode)
+ dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Load with 'svnrdump'
+ new_repo_dir, new_repo_url = sbox.add_repo_path('new_repo')
+ svntest.main.create_repos(new_repo_dir)
+ svntest.actions.enable_revprop_changes(new_repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', new_repo_url)
+ # For regression test purposes, all we require is that the 'load'
+ # doesn't throw an error
+
+# Regression test for issue 4551 "svnrdump load commits wrong properties,
+# or fails, on a non-deltas dumpfile". In this test, the copy source does
+# exist and the failure mode is to fail to delete a property.
+@XFail()
+@Issue(4551)
+def load_non_deltas_replace_copy_with_props(sbox):
+ "load non-deltas replace© with props"
+ sbox.build()
+
+ # Set props on a file and on a dir
+ sbox.simple_propset('p', 'v', 'A/mu', 'A/B')
+ sbox.simple_commit()
+ sbox.simple_update() # avoid mixed-rev
+
+ # Case (2): Copy file/dir, replacing something; the copy target path
+ # at (new rev - 1) exists and has no property named 'p'
+ sbox.simple_rm('A/D/gamma', 'A/C')
+ sbox.simple_copy('A/mu@2', 'A/D/gamma')
+ sbox.simple_copy('A/B@2', 'A/C')
+ # On the copy, delete a prop that isn't present on the replaced node
+ sbox.simple_propdel('p', 'A/D/gamma', 'A/C')
+
+ sbox.simple_commit()
+
+ # Dump with 'svnadmin' (non-deltas mode)
+ dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Load with 'svnrdump'
+ new_repo_dir, new_repo_url = sbox.add_repo_path('new_repo')
+ svntest.main.create_repos(new_repo_dir)
+ svntest.actions.enable_revprop_changes(new_repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', new_repo_url)
+
+ # Check that property 'p' really was deleted on each copied node
+ for tgt_path in ['A/D/gamma', 'A/C']:
+ _, out, _ = svntest.main.run_svn(None, 'proplist',
+ new_repo_url + '/' + tgt_path)
+ expected = []
+ actual = out[1:]
+ svntest.verify.compare_and_display_lines(None, 'PROPS', expected, actual)
+
+# Regression test for issue #4552 "svnrdump writes duplicate headers for a
+# replace-with-copy". 'svnrdump dump' wrote the Node-path and Node-kind
+# headers twice for the 'delete' record of a replace-with-copy.
+@Issue(4552)
+def dump_replace_with_copy(sbox):
+ "dump replace with copy"
+ sbox.build()
+
+ # Copy file/dir, replacing something
+ sbox.simple_rm('A/D/gamma', 'A/C')
+ sbox.simple_copy('A/mu@1', 'A/D/gamma')
+ sbox.simple_copy('A/B@1', 'A/C')
+ sbox.simple_commit()
+
+ # Dump with 'svnrdump'
+ dumpfile = svntest.actions.run_and_verify_svnrdump(
+ None, svntest.verify.AnyOutput, [], 0,
+ 'dump', '--quiet', '--incremental', '-r2',
+ sbox.repo_url)
+
+ # Check the 'delete' record headers: expect this parse to fail if headers
+ # are duplicated
+ svntest.verify.DumpParser(dumpfile).parse()
########################################################################
# Run the tests
@@ -855,6 +980,11 @@ test_list = [ None,
only_trunk_range_dump,
only_trunk_A_range_dump,
load_prop_change_in_non_deltas_dump,
+ dump_mergeinfo_contains_r0,
+ load_mergeinfo_contains_r0,
+ load_non_deltas_copy_with_props,
+ load_non_deltas_replace_copy_with_props,
+ dump_replace_with_copy,
]
if __name__ == '__main__':
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/svnsync_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/svnsync_tests.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/svnsync_tests.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/svnsync_tests.py Tue Jan 20 17:52:18 2015
@@ -209,8 +209,12 @@ def setup_and_sync(sbox, dump_file_conte
return dest_sbox
-def verify_mirror(dest_sbox, src_sbox):
- """Compare the contents of the DEST_SBOX repository with EXP_DUMP_FILE_CONTENTS."""
+def verify_mirror(dest_sbox, exp_dump_file_contents):
+ """Compare the contents of the mirror repository in DEST_SBOX with
+ EXP_DUMP_FILE_CONTENTS, by comparing the parsed dump stream content.
+
+ First remove svnsync rev-props from the DEST_SBOX repository.
+ """
# Remove some SVNSync-specific housekeeping properties from the
# mirror repository in preparation for the comparison dump.
@@ -222,10 +226,9 @@ def verify_mirror(dest_sbox, src_sbox):
# Create a dump file from the mirror repository.
dest_dump = svntest.actions.run_and_verify_dump(dest_sbox.repo_dir)
- src_dump = svntest.actions.run_and_verify_dump(src_sbox.repo_dir)
svntest.verify.compare_dump_files(
- "Dump files", "DUMP", src_dump, dest_dump)
+ "Dump files", "DUMP", exp_dump_file_contents, dest_dump)
def run_test(sbox, dump_file_name, subdir=None, exp_dump_file_name=None,
bypass_prop_validation=False, source_prop_encoding=None,
@@ -251,16 +254,12 @@ or another dump file."""
# dump file (used to create the master repository) or another specified dump
# file.
if exp_dump_file_name:
- build_repos(sbox)
- svntest.actions.run_and_verify_load(sbox.repo_dir,
- open(os.path.join(svnsync_tests_dir,
- exp_dump_file_name),
- 'rb').readlines())
- src_sbox = sbox
+ exp_dump_file_contents = open(os.path.join(svnsync_tests_dir,
+ exp_dump_file_name), 'rb').readlines()
else:
- src_sbox = sbox
+ exp_dump_file_contents = master_dumpfile_contents
- verify_mirror(dest_sbox, sbox)
+ verify_mirror(dest_sbox, exp_dump_file_contents)
@@ -564,9 +563,7 @@ def delete_revprops(sbox):
run_copy_revprops(dest_sbox.repo_url, sbox.repo_url)
# Does the result look as we expected?
- build_repos(sbox)
- svntest.actions.run_and_verify_load(sbox.repo_dir, expected_contents)
- verify_mirror(dest_sbox, sbox)
+ verify_mirror(dest_sbox, expected_contents)
@Issue(3870)
@SkipUnless(svntest.main.is_posix_os)
@@ -576,6 +573,15 @@ def fd_leak_sync_from_serf_to_local(sbox
resource.setrlimit(resource.RLIMIT_NOFILE, (128, 128))
run_test(sbox, "largemods.dump", is_src_ra_local=None, is_dest_ra_local=True)
+#----------------------------------------------------------------------
+
+@Issue(4476)
+def mergeinfo_contains_r0(sbox):
+ "mergeinfo contains r0"
+ run_test(sbox, "mergeinfo-contains-r0.dump",
+ exp_dump_file_name="mergeinfo-contains-r0.expected.dump",
+ bypass_prop_validation=True)
+
########################################################################
# Run the tests
@@ -612,6 +618,7 @@ test_list = [ None,
descend_into_replace,
delete_revprops,
fd_leak_sync_from_serf_to_local, # calls setrlimit
+ mergeinfo_contains_r0,
]
if __name__ == '__main__':
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/svntest/main.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/svntest/main.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/svntest/main.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/svntest/main.py Tue Jan 20 17:52:18 2015
@@ -545,6 +545,15 @@ def run_command_stdin(command, error_exp
and not any(map(lambda arg: 'prop_tests-12' in arg, varargs)):
raise Failure("Repository diskpath in %s: %r" % (name, lines))
+ valgrind_diagnostic = False
+ # A valgrind diagnostic will raise a failure if the command is
+ # expected to run without error. When an error is expected any
+ # subsequent error pattern matching is usually lenient and will not
+ # detect the diagnostic so make sure a failure is raised here.
+ if error_expected and stderr_lines:
+ if any(map(lambda arg: re.match('==[0-9]+==', arg), stderr_lines)):
+ valgrind_diagnostic = True
+
stop = time.time()
logger.info('<TIME = %.6f>' % (stop - start))
for x in stdout_lines:
@@ -552,7 +561,8 @@ def run_command_stdin(command, error_exp
for x in stderr_lines:
logger.info(x.rstrip())
- if (not error_expected) and ((stderr_lines) or (exit_code != 0)):
+ if (((not error_expected) and ((stderr_lines) or (exit_code != 0)))
+ or valgrind_diagnostic):
for x in stderr_lines:
logger.warning(x.rstrip())
if len(varargs) <= 5:
Modified: subversion/branches/move-tracking-2/subversion/tests/cmdline/update_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/cmdline/update_tests.py?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/cmdline/update_tests.py (original)
+++ subversion/branches/move-tracking-2/subversion/tests/cmdline/update_tests.py Tue Jan 20 17:52:18 2015
@@ -6517,7 +6517,6 @@ def windows_update_backslash(sbox):
expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
svntest.actions.run_and_verify_status(wc_dir, expected_status)
-@XFail() # Tries to modify unlocked part of working copy; found via r1561425
def update_moved_away(sbox):
"update subtree of moved away"
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_fs/fs-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_fs/fs-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_fs/fs-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_fs/fs-test.c Tue Jan 20 17:52:18 2015
@@ -6599,6 +6599,103 @@ test_fsfs_config_opts(const svn_test_opt
return SVN_NO_ERROR;
}
+
+static svn_error_t *
+test_txn_pool_lifetime(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* Technically, the FS API makes no assumption on the lifetime of logically
+ * dependent objects. In particular, a txn root object may get destroyed
+ * after the FS object that it has been built upon. Actual data access is
+ * implied to be invalid without a valid svn_fs_t.
+ *
+ * This test verifies that at least the destruction order of those two
+ * objects is arbitrary.
+ */
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ /* We will allocate FS in FS_POOL. Using a separate allocator makes
+ * sure that we actually free the memory when destroying the pool.
+ */
+ apr_allocator_t *fs_allocator = svn_pool_create_allocator(FALSE);
+ apr_pool_t *fs_pool = apr_allocator_owner_get(fs_allocator);
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-pool-lifetime",
+ opts, fs_pool));
+
+ /* Create a TXN_ROOT referencing FS. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Destroy FS. Depending on the actual allocator implementation,
+ * these memory pages becomes inaccessible. */
+ svn_pool_destroy(fs_pool);
+
+ /* Unclean implementations will try to access FS and may segfault here. */
+ svn_fs_close_root(txn_root);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_modify_txn_being_written(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* FSFS has a limitation (and check) that only one file can be
+ * modified in TXN at time: see r861812 and svn_fs_apply_text() docstring.
+ * This is regression test for this behavior. */
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ const char *txn_name;
+ svn_fs_root_t *txn_root;
+ svn_stream_t *foo_contents;
+ svn_stream_t *bar_contents;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-modify-txn-being-written",
+ opts, pool));
+
+ /* Create a TXN_ROOT referencing FS. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Make file /foo and open for writing.*/
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool));
+ SVN_ERR(svn_fs_apply_text(&foo_contents, txn_root, "/foo", NULL, pool));
+
+ /* Attempt to modify another file '/bar' -- FSFS doesn't allow this. */
+ SVN_ERR(svn_fs_make_file(txn_root, "/bar", pool));
+ SVN_TEST_ASSERT_ERROR(
+ svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool),
+ SVN_ERR_FS_REP_BEING_WRITTEN);
+
+ /* *Reopen TXN. */
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Check that file '/bar' still cannot be modified */
+ SVN_TEST_ASSERT_ERROR(
+ svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool),
+ SVN_ERR_FS_REP_BEING_WRITTEN);
+
+ /* Close file '/foo'. */
+ SVN_ERR(svn_stream_close(foo_contents));
+
+ /* Now file '/bar' can be modified. */
+ SVN_ERR(svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
/* ------------------------------------------------------------------------ */
/* The test table. */
@@ -6724,6 +6821,10 @@ static struct svn_test_descriptor_t test
"get merging txns with newer revisions"),
SVN_TEST_OPTS_PASS(test_fsfs_config_opts,
"test creating FSFS repository with different opts"),
+ SVN_TEST_OPTS_PASS(test_txn_pool_lifetime,
+ "test pool lifetime dependencies with txn roots"),
+ SVN_TEST_OPTS_PASS(test_modify_txn_being_written,
+ "test modify txn being written in FSFS"),
SVN_TEST_NULL
};
Propchange: subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Tue Jan 20 17:52:18 2015
@@ -4,8 +4,7 @@ Release
test-repo-*
upgrade_*
fs-pack-test
-fs-fs-fuzzy-test
-fs-fs-pack-test
+fs-fs-*-test
test-get-set-revprop-packed-fs
get_set_multiple_huge_revprops_packed_fs
*.o
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c Tue Jan 20 17:52:18 2015
@@ -133,7 +133,7 @@ fuzzing_1_byte_1_rev(const char *repo_na
/* Let us know where we were too strict ... */
printf("Detected case change in checksum digest at offset 0x%"
APR_UINT64_T_HEX_FMT " (%" APR_OFF_T_FMT ") in r%ld: "
- "%c -> %c\n", i, i, revision, c_old, c_new);
+ "%c -> %c\n", (apr_uint64_t)i, i, revision, c_old, c_new);
SVN_ERR(err);
}
@@ -143,7 +143,7 @@ fuzzing_1_byte_1_rev(const char *repo_na
/* Let us know where we miss changes ... */
printf("Undetected mod at offset 0x%"APR_UINT64_T_HEX_FMT
" (%"APR_OFF_T_FMT") in r%ld: 0x%02x -> 0x%02x\n",
- i, i, revision, c_old, c_new);
+ (apr_uint64_t)i, i, revision, c_old, c_new);
SVN_TEST_ASSERT(err);
}
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c Tue Jan 20 17:52:18 2015
@@ -1079,7 +1079,7 @@ metadata_checksumming(const svn_test_opt
SVN_ERR(svn_stringbuf_from_file2(&r0, r0_path, pool));
r0->data[21] = '1';
SVN_ERR(svn_io_remove_file2(r0_path, FALSE, pool));
- SVN_ERR(svn_io_file_create_binary(r0_path, r0->data, r0->len, pool));
+ SVN_ERR(svn_io_file_create_bytes(r0_path, r0->data, r0->len, pool));
/* Reading the corrupted data on the normal code path triggers no error.
* Use a separate namespace to avoid simply reading data from cache. */
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c Tue Jan 20 17:52:18 2015
@@ -79,7 +79,7 @@ create_greek_repo(svn_repos_t **repos,
static svn_error_t *
verify_representation_stats(const svn_fs_fs__representation_stats_t *stats,
- apr_int64_t expected_count)
+ apr_uint64_t expected_count)
{
/* Small items, no packing (but inefficiency due to packing attempt). */
SVN_TEST_ASSERT(stats->total.count == expected_count);
@@ -110,7 +110,7 @@ verify_representation_stats(const svn_fs
static svn_error_t *
verify_node_stats(const svn_fs_fs__node_stats_t *node_stats,
- apr_int64_t expected_count)
+ apr_uint64_t expected_count)
{
SVN_TEST_ASSERT(node_stats->count == expected_count);
SVN_TEST_ASSERT( node_stats->size > 100 * node_stats->count
@@ -162,8 +162,8 @@ verify_large_change(const svn_fs_fs__lar
static svn_error_t *
verify_histogram(const svn_fs_fs__histogram_t *histogram)
{
- apr_int64_t sum_count = 0;
- apr_int64_t sum_size = 0;
+ apr_uint64_t sum_count = 0;
+ apr_uint64_t sum_size = 0;
int i;
for (i = 0; i < 64; ++i)
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_repos/dump-load-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_repos/dump-load-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_repos/dump-load-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_repos/dump-load-test.c Tue Jan 20 17:52:18 2015
@@ -35,7 +35,114 @@
-/* Notification receiver for test_dump_bad_mergeinfo(). This does not
+/* Test dumping in the presence of the property PROP_NAME:PROP_VAL.
+ * Return the dumped data in *DUMP_DATA_P (if DUMP_DATA_P is not null).
+ * REPOS is an empty repository.
+ * See svn_repos_dump_fs3() for START_REV, END_REV, NOTIFY_FUNC, NOTIFY_BATON.
+ */
+static svn_error_t *
+test_dump_bad_props(svn_stringbuf_t **dump_data_p,
+ svn_repos_t *repos,
+ const char *prop_name,
+ const svn_string_t *prop_val,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ svn_repos_notify_func_t notify_func,
+ void *notify_baton,
+ apr_pool_t *pool)
+{
+ const char *test_path = "/bar";
+ svn_fs_t *fs = svn_repos_fs(repos);
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev = 0;
+ svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool);
+ const char *expected_str;
+
+ /* Revision 1: Any commit will do, here */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, test_path , pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Revision 2: Add the bad property */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, test_path , prop_name, prop_val,
+ pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Test that a dump completes without error. */
+ SVN_ERR(svn_repos_dump_fs3(repos, stream, start_rev, end_rev,
+ FALSE, FALSE,
+ notify_func, notify_baton,
+ NULL, NULL,
+ pool));
+ svn_stream_close(stream);
+
+ /* Check that the property appears in the dump data */
+ expected_str = apr_psprintf(pool, "K %d\n%s\n"
+ "V %d\n%s\n"
+ "PROPS-END\n",
+ (int)strlen(prop_name), prop_name,
+ (int)prop_val->len, prop_val->data);
+ SVN_TEST_ASSERT(strstr(dump_data->data, expected_str));
+
+ if (dump_data_p)
+ *dump_data_p = dump_data;
+ return SVN_NO_ERROR;
+}
+
+/* Test loading in the presence of the property PROP_NAME:PROP_VAL.
+ * Load data from DUMP_DATA.
+ * REPOS is an empty repository.
+ */
+static svn_error_t *
+test_load_bad_props(svn_stringbuf_t *dump_data,
+ svn_repos_t *repos,
+ const char *prop_name,
+ const svn_string_t *prop_val,
+ const char *parent_fspath,
+ svn_boolean_t validate_props,
+ svn_repos_notify_func_t notify_func,
+ void *notify_baton,
+ apr_pool_t *pool)
+{
+ const char *test_path = apr_psprintf(pool, "%s%s",
+ parent_fspath ? parent_fspath : "",
+ "/bar");
+ svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool);
+ svn_fs_t *fs;
+ svn_fs_root_t *rev_root;
+ svn_revnum_t youngest_rev;
+ svn_string_t *loaded_prop_val;
+
+ SVN_ERR(svn_repos_load_fs5(repos, stream,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ svn_repos_load_uuid_default,
+ parent_fspath,
+ FALSE, FALSE, /*use_*_commit_hook*/
+ validate_props,
+ FALSE /*ignore_dates*/,
+ notify_func, notify_baton,
+ NULL, NULL, /*cancellation*/
+ pool));
+ svn_stream_close(stream);
+
+ /* Check the loaded property */
+ fs = svn_repos_fs(repos);
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_node_prop(&loaded_prop_val,
+ rev_root, test_path, prop_name, pool));
+ SVN_TEST_ASSERT(svn_string_compare(loaded_prop_val, prop_val));
+ return SVN_NO_ERROR;
+}
+
+/* Notification receiver for test_dump_r0_mergeinfo(). This does not
need to do anything, it just needs to exist.
*/
static void
@@ -51,44 +158,115 @@ static svn_error_t *
test_dump_r0_mergeinfo(const svn_test_opts_t *opts,
apr_pool_t *pool)
{
- svn_repos_t *repos;
- svn_fs_t *fs;
- svn_fs_txn_t *txn;
- svn_fs_root_t *txn_root;
- svn_revnum_t youngest_rev = 0;
+ const char *prop_name = "svn:mergeinfo";
const svn_string_t *bad_mergeinfo = svn_string_create("/foo:0", pool);
+ svn_repos_t *repos;
SVN_ERR(svn_test__create_repos(&repos, "test-repo-dump-r0-mergeinfo",
opts, pool));
- fs = svn_repos_fs(repos);
-
- /* Revision 1: Any commit will do, here */
- SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
- SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
- SVN_ERR(svn_fs_make_dir(txn_root, "/bar", pool));
- SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
- SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
-
- /* Revision 2: Add bad mergeinfo */
- SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
- SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
- SVN_ERR(svn_fs_change_node_prop(txn_root, "/bar", "svn:mergeinfo", bad_mergeinfo, pool));
- SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
- SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
-
- /* Test that a dump completes without error. In order to exercise the
+ /* In order to exercise the
functionality under test -- that is, in order for the dump to try to
parse the mergeinfo it is dumping -- the dump must start from a
revision greater than 1 and must take a notification callback. */
+ SVN_ERR(test_dump_bad_props(NULL, repos,
+ prop_name, bad_mergeinfo,
+ 2, SVN_INVALID_REVNUM,
+ dump_r0_mergeinfo_notifier, NULL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static void
+load_r0_mergeinfo_notifier(void *baton,
+ const svn_repos_notify_t *notify,
+ apr_pool_t *scratch_pool)
+{
+ svn_boolean_t *had_mergeinfo_warning = baton;
+
+ if (notify->action == svn_repos_notify_warning)
+ {
+ if (notify->warning == svn_repos_notify_warning_invalid_mergeinfo)
+ {
+ *had_mergeinfo_warning = TRUE;
+ }
+ }
+}
+
+/* Regression test for the 'load' part of issue #4476 "Mergeinfo
+ * containing r0 makes svnsync and svnadmin dump fail".
+ *
+ * Bad mergeinfo should not prevent loading a backup, at least when we do not
+ * require mergeinfo revision numbers or paths to be adjusted during loading.
+ */
+static svn_error_t *
+test_load_r0_mergeinfo(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *prop_name = "svn:mergeinfo";
+ const svn_string_t *prop_val = svn_string_create("/foo:0", pool);
+ svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool);
+
+ /* Produce a dump file containing bad mergeinfo */
{
- svn_stringbuf_t *stringbuf = svn_stringbuf_create_empty(pool);
- svn_stream_t *stream = svn_stream_from_stringbuf(stringbuf, pool);
+ svn_repos_t *repos;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-1",
+ opts, pool));
+ SVN_ERR(test_dump_bad_props(&dump_data, repos,
+ prop_name, prop_val,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ NULL, NULL, pool));
+ }
+
+ /* Test loading without validating properties: should warn and succeed */
+ {
+ svn_repos_t *repos;
+ svn_boolean_t had_mergeinfo_warning = FALSE;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-2",
+ opts, pool));
+
+ /* Without changing revision numbers or paths */
+ SVN_ERR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ NULL /*parent_dir*/, FALSE /*validate_props*/,
+ load_r0_mergeinfo_notifier, &had_mergeinfo_warning,
+ pool));
+ SVN_TEST_ASSERT(had_mergeinfo_warning);
+
+ /* With changing revision numbers and/or paths (by loading the same data
+ again, on top of existing revisions, into subdirectory 'bar') */
+ had_mergeinfo_warning = FALSE;
+ SVN_ERR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ "/bar", FALSE /*validate_props*/,
+ load_r0_mergeinfo_notifier, &had_mergeinfo_warning,
+ pool));
+ SVN_TEST_ASSERT(had_mergeinfo_warning);
+ }
+
+ /* Test loading with validating properties: should return an error */
+ {
+ svn_repos_t *repos;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-3",
+ opts, pool));
- SVN_ERR(svn_repos_dump_fs3(repos, stream, 2, SVN_INVALID_REVNUM,
- FALSE, FALSE,
- dump_r0_mergeinfo_notifier, NULL,
- NULL, NULL,
- pool));
+ /* Without changing revision numbers or paths */
+ SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ NULL /*parent_dir*/, TRUE /*validate_props*/,
+ NULL, NULL,
+ pool));
+
+ /* With changing revision numbers and/or paths (by loading the same data
+ again, on top of existing revisions, into subdirectory 'bar') */
+ SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ "/bar", TRUE /*validate_props*/,
+ NULL, NULL,
+ pool));
}
return SVN_NO_ERROR;
@@ -103,6 +281,8 @@ static struct svn_test_descriptor_t test
SVN_TEST_NULL,
SVN_TEST_OPTS_PASS(test_dump_r0_mergeinfo,
"test dumping with r0 mergeinfo"),
+ SVN_TEST_OPTS_PASS(test_load_r0_mergeinfo,
+ "test loading with r0 mergeinfo"),
SVN_TEST_NULL
};
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/cache-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/cache-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/cache-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/cache-test.c Tue Jan 20 17:52:18 2015
@@ -201,6 +201,102 @@ test_membuffer_cache_basic(apr_pool_t *p
return basic_cache_test(cache, FALSE, pool);
}
+/* Implements svn_cache__deserialize_func_t */
+static svn_error_t *
+raise_error_deserialize_func(void **out,
+ void *data,
+ apr_size_t data_len,
+ apr_pool_t *pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+/* Implements svn_cache__partial_getter_func_t */
+static svn_error_t *
+raise_error_partial_getter_func(void **out,
+ const void *data,
+ apr_size_t data_len,
+ void *baton,
+ apr_pool_t *result_pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+/* Implements svn_cache__partial_setter_func_t */
+static svn_error_t *
+raise_error_partial_setter_func(void **data,
+ apr_size_t *data_len,
+ void *baton,
+ apr_pool_t *result_pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+static svn_error_t *
+test_membuffer_serializer_error_handling(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+ svn_revnum_t twenty = 20;
+ svn_boolean_t found;
+ void *val;
+
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ raise_error_deserialize_func,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ pool, pool));
+
+ SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool));
+
+ /* Test retrieving data from cache using full getter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__get(&val, &found, cache, "twenty", pool),
+ APR_EGENERAL);
+
+ /* Test retrieving data from cache using partial getter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__get_partial(&val, &found, cache, "twenty",
+ raise_error_partial_getter_func,
+ NULL, pool),
+ APR_EGENERAL);
+
+ /* Create a new cache. */
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ pool, pool));
+
+ /* Store one entry in cache. */
+ SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool));
+
+ /* Test setting data in cache using partial setter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__set_partial(cache, "twenty",
+ raise_error_partial_setter_func,
+ NULL, pool),
+ APR_EGENERAL);
+
+ return SVN_NO_ERROR;
+}
static svn_error_t *
test_memcache_long_key(const svn_test_opts_t *opts,
@@ -274,6 +370,8 @@ static struct svn_test_descriptor_t test
"memcache svn_cache with very long keys"),
SVN_TEST_PASS2(test_membuffer_cache_basic,
"basic membuffer svn_cache test"),
+ SVN_TEST_PASS2(test_membuffer_serializer_error_handling,
+ "test for error handling in membuffer svn_cache"),
SVN_TEST_NULL
};
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/config-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/config-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/config-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/config-test.c Tue Jan 20 17:52:18 2015
@@ -347,6 +347,29 @@ test_read_only_mode(const svn_test_opts_
return SVN_NO_ERROR;
}
+static svn_error_t *
+test_expand(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file, *val;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool));
+
+ /* Get expanded "g" which requires expanding "c". */
+ svn_config_get(cfg, &val, "section1", "g", NULL);
+
+ /* Get expanded "c". */
+ svn_config_get(cfg, &val, "section1", "c", NULL);
+
+ /* With pool debugging enabled this ensures that the expanded value
+ of "c" was not created in a temporary pool when expanding "g". */
+ SVN_TEST_STRING_ASSERT(val, "bar");
+
+ return SVN_NO_ERROR;
+}
+
/*
====================================================================
If you add a new test to this file, update this array.
@@ -377,6 +400,8 @@ static struct svn_test_descriptor_t test
"test parsing config file with BOM"),
SVN_TEST_OPTS_PASS(test_read_only_mode,
"test r/o mode"),
+ SVN_TEST_OPTS_PASS(test_expand,
+ "test variable expansion"),
SVN_TEST_NULL
};
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/mergeinfo-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/mergeinfo-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/mergeinfo-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/mergeinfo-test.c Tue Jan 20 17:52:18 2015
@@ -114,7 +114,7 @@ verify_mergeinfo_parse(const char *input
}
-#define NBR_MERGEINFO_VALS 24
+#define NBR_MERGEINFO_VALS 25
/* Valid mergeinfo values. */
static const char * const mergeinfo_vals[NBR_MERGEINFO_VALS] =
@@ -148,7 +148,8 @@ static const char * const mergeinfo_vals
"/A/:7-8",
"/A///:7-8",
"/A/.:7-8",
- "/A/./B:7-8"
+ "/A/./B:7-8",
+ ":7-8",
};
/* Paths corresponding to mergeinfo_vals. */
static const char * const mergeinfo_paths[NBR_MERGEINFO_VALS] =
@@ -181,7 +182,8 @@ static const char * const mergeinfo_path
"/A",
"/A",
"/A",
- "/A/B"
+ "/A/B",
+ "/",
};
/* First ranges from the paths identified by mergeinfo_paths. */
static svn_merge_range_t mergeinfo_ranges[NBR_MERGEINFO_VALS][MAX_NBR_RANGES] =
@@ -212,6 +214,7 @@ static svn_merge_range_t mergeinfo_range
{ {6, 8, TRUE} },
{ {6, 8, TRUE} },
{ {6, 8, TRUE} },
+ { {6, 8, TRUE} },
};
static svn_error_t *
@@ -298,7 +301,7 @@ test_parse_combine_rangeinfo(apr_pool_t
}
-#define NBR_BROKEN_MERGEINFO_VALS 27
+#define NBR_BROKEN_MERGEINFO_VALS 26
/* Invalid mergeinfo values. */
static const char * const broken_mergeinfo_vals[NBR_BROKEN_MERGEINFO_VALS] =
{
@@ -330,8 +333,6 @@ static const char * const broken_mergein
"/trunk:",
"/trunk:2-9\n/branch:",
"::",
- /* No path */
- ":1-3",
/* Invalid revisions */
"trunk:a-3",
"branch:3-four",
Modified: subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/string-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/string-test.c?rev=1653314&r1=1653313&r2=1653314&view=diff
==============================================================================
--- subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/string-test.c (original)
+++ subversion/branches/move-tracking-2/subversion/tests/libsvn_subr/string-test.c Tue Jan 20 17:52:18 2015
@@ -604,7 +604,19 @@ test_stringbuf_insert(apr_pool_t *pool)
SVN_TEST_STRING_ASSERT(a->data, "test hello, world");
svn_stringbuf_insert(a, 1200, "!", 1);
- return expect_stringbuf_equal(a, "test hello, world!", pool);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world!");
+
+ svn_stringbuf_insert(a, 4, "\0-\0", 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test\0-\0 hello, world!",
+ 21, pool)));
+
+ svn_stringbuf_insert(a, 14, a->data + 4, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test\0-\0 hello,\0-\0 world!",
+ 24, pool)));
+
+ return SVN_NO_ERROR;
}
static svn_error_t *
@@ -643,8 +655,24 @@ test_stringbuf_replace(apr_pool_t *pool)
SVN_TEST_STRING_ASSERT(a->data, "test hello, world!");
svn_stringbuf_replace(a, 1200, 199, "!!", 2);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world!!!");
+
+ svn_stringbuf_replace(a, 10, 2, "\0-\0", 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!!!",
+ 21, pool)));
+
+ svn_stringbuf_replace(a, 10, 3, a->data + 10, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!!!",
+ 21, pool)));
+
+ svn_stringbuf_replace(a, 19, 1, a->data + 10, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!\0-\0!",
+ 23, pool)));
- return expect_stringbuf_equal(a, "test hello, world!!!", pool);
+ return SVN_NO_ERROR;
}
static svn_error_t *