You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by hw...@apache.org on 2010/08/11 18:43:31 UTC
svn commit: r984468 [24/25] - in /subversion/branches/ignore-mergeinfo: ./
build/ build/generator/ build/generator/templates/ notes/
notes/tree-conflicts/ notes/wc-ng/ subversion/bindings/javahl/native/
subversion/bindings/javahl/src/org/apache/subvers...
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/update_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/update_tests.py?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/update_tests.py (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/update_tests.py Wed Aug 11 16:43:22 2010
@@ -324,17 +324,23 @@ def update_missing(sbox):
svntest.main.safe_rmtree(E_path)
svntest.main.safe_rmtree(H_path)
+ # In single-db mode all missing items will just be restored
+ if svntest.main.wc_is_singledb(wc_dir):
+ A_or_Restored = Item(verb='Restored')
+ else:
+ A_or_Restored = Item(status='A ')
+
# Create expected output tree for an update of the missing items by name
expected_output = svntest.wc.State(wc_dir, {
'A/mu' : Item(verb='Restored'),
'A/D/G/rho' : Item(verb='Restored'),
- 'A/B/E' : Item(status='A '),
- 'A/B/E/alpha' : Item(status='A '),
- 'A/B/E/beta' : Item(status='A '),
- 'A/D/H' : Item(status='A '),
- 'A/D/H/chi' : Item(status='A '),
- 'A/D/H/omega' : Item(status='A '),
- 'A/D/H/psi' : Item(status='A '),
+ 'A/B/E' : A_or_Restored,
+ 'A/B/E/alpha' : A_or_Restored,
+ 'A/B/E/beta' : A_or_Restored,
+ 'A/D/H' : A_or_Restored,
+ 'A/D/H/chi' : A_or_Restored,
+ 'A/D/H/omega' : A_or_Restored,
+ 'A/D/H/psi' : A_or_Restored,
})
# Create expected disk tree for the update.
@@ -793,15 +799,25 @@ def obstructed_update_alters_wc_props(sb
# Update the WC to that newer rev to trigger the obstruction.
#print "Updating WC"
- expected_output = svntest.wc.State(wc_dir, {})
+ # svntest.factory.make(sbox, 'svn update')
+ # exit(0)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/foo' : Item(status=' ', treeconflict='C'),
+ })
+
expected_disk = svntest.main.greek_state.copy()
- expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
- error_re = 'Failed to add directory.*object of the same name already exists'
- svntest.actions.run_and_verify_update(wc_dir,
- expected_output,
- expected_disk,
- expected_status,
- error_re)
+ expected_disk.add({
+ 'A/foo' : Item(contents="an obstruction"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/foo' : Item(status='? ', treeconflict='C'),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status, None, None, None, None, None, False, wc_dir)
+
# Remove the file which caused the obstruction.
#print "Removing obstruction"
@@ -1062,6 +1078,17 @@ def update_deleted_missing_dir(sbox):
'A/D/H' : Item(status='D '),
})
+ # In single-db mode the missing items are restored before the update
+ if svntest.main.wc_is_singledb(wc_dir):
+ expected_output.add({
+ 'A/D/H/psi' : Item(verb='Restored'),
+ 'A/D/H/omega' : Item(verb='Restored'),
+ 'A/D/H/chi' : Item(verb='Restored'),
+ 'A/B/E/beta' : Item(verb='Restored'),
+ 'A/B/E/alpha' : Item(verb='Restored')
+ # A/B/E and A/D/H are also restored, but are then overriden by the delete
+ })
+
# Create expected disk tree for the update.
expected_disk = svntest.main.greek_state.copy()
expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
@@ -1087,6 +1114,12 @@ def update_deleted_missing_dir(sbox):
# This time we're updating the whole working copy
expected_status.tweak(wc_rev=2)
+ # And now we don't expect restore operations
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ })
+
# Do the update, on the whole working copy this time
svntest.actions.run_and_verify_update(wc_dir,
expected_output,
@@ -1134,12 +1167,23 @@ def another_hudson_problem(sbox):
# Update missing directory to receive the delete, this should mark G
# as 'deleted' and should not alter gamma's entry.
+ if not svntest.main.wc_is_singledb(wc_dir):
+ expected_output = ['D '+G_path+'\n',
+ 'Updated to revision 3.\n',
+ ]
+ else:
+ expected_output = ['Restored \'' + G_path + '\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'pi\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'rho\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'tau\'\n',
+ 'D '+G_path+'\n',
+ 'Updated to revision 3.\n',
+ ]
+
# Sigh, I can't get run_and_verify_update to work (but not because
# of issue 919 as far as I can tell)
svntest.actions.run_and_verify_svn(None,
- ['D '+G_path+'\n',
- 'Updated to revision 3.\n',
- ], [],
+ expected_output, [],
'up', G_path)
# Both G and gamma should be 'deleted', update should produce no output
@@ -1545,6 +1589,9 @@ def nested_in_read_only(sbox):
sbox.build()
wc_dir = sbox.wc_dir
+ if svntest.main.wc_is_singledb(wc_dir):
+ raise svntest.Skip('Unsupported in single-db')
+
# Delete/commit a file
alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path)
@@ -2176,36 +2223,97 @@ def forced_update_failures(sbox):
# A forced update that tries to add a directory when an unversioned file
# of the same name already exists should fail.
- C_Path = os.path.join(wc_backup, 'A', 'C')
- svntest.actions.run_and_verify_update(C_Path, None, None, None,
- ".*Failed to add directory.*" + \
- "a non-directory object of the " + \
- "same name already exists",
- None, None, None, None, 0, C_Path,
- '--force')
-
- # Clean-up what we have done so far. Remove the unversioned file A/C/I
- # and the unversioned directory A/B/F/nu. Then update the backup to
- # r2, except for A/C, update that to r1 so A/C/I isn't present.
- # working copy.
- os.remove(I_path)
- os.rmdir(nu_path)
- svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [],
- 'up', wc_backup)
- svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [],
- 'up', '-r', '1', C_Path)
-
- # Checkout %URL%/A/C/I@2 directly to A/C/I. A/C, being at r1, views
- # this as an unversioned object.
- I_url = sbox.repo_url + "/A/C/I"
- exit_code, so, se = svntest.actions.run_and_verify_svn(
- "Unexpected error during co",
- ['Checked out revision 2.\n'], [],
- "co", I_url, I_path)
- svntest.actions.run_and_verify_update(C_Path, None, None, None,
- "Failed to add directory '.*I'.*already exists",
- None, None, None, None, 0, C_Path,
- '--force')
+ # svntest.factory.make(sbox, """
+ # svn up --force wc_dir_backup/A/C
+ # rm -rf wc_dir_backup/A/C/I wc_dir_backup/A/B/F/nu
+ # svn up wc_dir_backup
+ # svn up -r1 wc_dir_backup/A/C
+ # svn co url/A/C/I wc_dir_backup/A/C/I
+ # svn up --force wc_dir_backup/A/C
+ # """)
+ # exit(0)
+ url = sbox.repo_url
+ wc_dir_backup = sbox.wc_dir + '.backup'
+
+ backup_A_B_F_nu = os.path.join(wc_dir_backup, 'A', 'B', 'F', 'nu')
+ backup_A_C = os.path.join(wc_dir_backup, 'A', 'C')
+ backup_A_C_I = os.path.join(wc_dir_backup, 'A', 'C', 'I')
+ url_A_C_I = url + '/A/C/I'
+
+ # svn up --force wc_dir_backup/A/C
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/F/nu' : Item(),
+ 'A/C/I' :
+ Item(contents="This is the file 'I'...shouldn't I be a dir?\n"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir_backup, 1)
+ expected_status.add({
+ 'A/C/I' : Item(status='? ', treeconflict='C'),
+ 'A/B/F/nu' : Item(status='? ', treeconflict='C'),
+ })
+ expected_status.tweak('A/C', 'A/B/F', wc_rev='2')
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status, None, None, None, None, None, False,
+ '--force', backup_A_C)
+
+ # rm -rf wc_dir_backup/A/C/I wc_dir_backup/A/B/F/nu
+ os.remove(backup_A_C_I)
+ svntest.main.safe_rmtree(backup_A_B_F_nu)
+
+ # svn up wc_dir_backup
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(status='A '),
+ 'A/B/F/nu' : Item(status='A '),
+ })
+
+ expected_disk.tweak('A/B/F/nu', contents="This is the file 'nu'\n")
+ expected_disk.tweak('A/C/I', contents=None)
+
+ expected_status.tweak(wc_rev='2', status=' ')
+ expected_status.tweak('A/C/I', 'A/B/F/nu', treeconflict=None)
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status, None, None, None, None, None, False,
+ wc_dir_backup)
+
+ # svn up -r1 wc_dir_backup/A/C
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(status='D '),
+ })
+
+ expected_disk.remove('A/C/I')
+
+ expected_status.remove('A/C/I')
+ expected_status.tweak('A/C', wc_rev='1')
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status, None, None, None, None, None, False,
+ '-r1', backup_A_C)
+
+ # svn co url/A/C/I wc_dir_backup/A/C/I
+ expected_output = svntest.wc.State(wc_dir_backup, {})
+
+ expected_disk = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_checkout2(False, url_A_C_I, backup_A_C_I,
+ expected_output, expected_disk, None, None, None, None)
+
+ # svn up --force wc_dir_backup/A/C
+ expected_error = (
+ "svn: Failed to add directory .*I.*working copy with the same name "
+ + "already exists"
+ )
+
+ actions.run_and_verify_update(wc_dir_backup, None, None, None,
+ expected_error, None, None, None, None, False, '--force', backup_A_C)
+
#----------------------------------------------------------------------
# Test for issue #2556. The tests maps a virtual drive to a working copy
@@ -4310,6 +4418,8 @@ def tree_conflicts_on_update_1_1(sbox):
# use case 1, as in notes/tree-conflicts/use-cases.txt
# 1.1) local tree delete, incoming leaf edit
+ sbox.build()
+
expected_output = deep_trees_conflict_output.copy()
expected_output.add({
'DDF/D1/D2' : Item(status='D '),
@@ -4324,6 +4434,10 @@ def tree_conflicts_on_update_1_1(sbox):
})
expected_disk = disk_empty_dirs.copy()
+ if svntest.main.wc_is_singledb(sbox.wc_dir):
+ expected_disk.remove('D/D1', 'DF/D1', 'DD/D1', 'DD/D1/D2',
+ 'DDF/D1', 'DDF/D1/D2',
+ 'DDD/D1', 'DDD/D1/D2', 'DDD/D1/D2/D3')
# The files delta, epsilon, and zeta are incoming additions, but since
# they are all within locally deleted trees they should also be schedule
@@ -4392,6 +4506,8 @@ def tree_conflicts_on_update_1_2(sbox):
# 1.2) local tree delete, incoming leaf delete
+ sbox.build()
+
expected_output = deep_trees_conflict_output.copy()
expected_output.add({
'DDD/D1/D2' : Item(status='D '),
@@ -4429,6 +4545,10 @@ def tree_conflicts_on_update_1_2(sbox):
expected_disk.remove('D/D1',
'DD/D1/D2',
'DDD/D1/D2/D3')
+ if svntest.main.wc_is_singledb(sbox.wc_dir):
+ expected_disk.remove('DF/D1', 'DD/D1',
+ 'DDF/D1', 'DDF/D1/D2',
+ 'DDD/D1', 'DDD/D1/D2')
expected_info = {
'F/alpha' : {
@@ -5474,6 +5594,49 @@ def mergeinfo_updates_merge_with_local_m
'pg', SVN_PROP_MERGEINFO, '-R',
A_COPY_path)
+#----------------------------------------------------------------------
+# Test for receiving modified properties on added files that were originally
+# moved from somewhere else. (Triggers locate_copyfrom behavior)
+def add_moved_file_has_props(sbox):
+ """update adding moved file receives modified props"""
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ G = os.path.join(os.path.join(wc_dir, 'A', 'D', 'G'))
+ pi = os.path.join(G, 'pi')
+ G_new = os.path.join(wc_dir, 'G_new')
+
+ # Give pi some property
+ svntest.main.run_svn(None, 'ps', 'svn:eol-style', 'native', pi)
+ svntest.main.run_svn(None, 'ci', wc_dir, '-m', 'added eol-style')
+
+ svntest.actions.run_and_verify_svn(None, 'At revision 2.', [], 'up', wc_dir)
+
+ # Now move pi to a different place
+ svntest.main.run_svn(None, 'mkdir', G_new)
+ svntest.main.run_svn(None, 'mv', pi, G_new)
+ svntest.main.run_svn(None, 'ci', wc_dir, '-m', 'Moved pi to G_new')
+
+ svntest.actions.run_and_verify_svn(None, 'At revision 3.', [], 'up', wc_dir)
+
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/D/G/pi')
+ expected_status.add({
+ 'G_new' : Item (status=' ', wc_rev=3),
+ 'G_new/pi' : Item (status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.main.run_svn(None, 'up', '-r', '0', G_new)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # This shouldn't show property modifications, but at r982550 it did.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
#######################################################################
# Run the tests
@@ -5542,6 +5705,7 @@ test_list = [ None,
XFail(update_deleted_locked_files),
XFail(update_empty_hides_entries),
mergeinfo_updates_merge_with_local_mods,
+ XFail(add_moved_file_has_props),
]
if __name__ == '__main__':
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/upgrade_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/upgrade_tests.py?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/upgrade_tests.py (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/cmdline/upgrade_tests.py Wed Aug 11 16:43:22 2010
@@ -107,6 +107,60 @@ def check_dav_cache(dir_path, wc_id, exp
db.close()
+# Very simple working copy property diff handler for single line textual properties
+# Should probably be moved to svntest/actions.py after some major refactoring.
+def simple_property_verify(dir_path, expected_props):
+
+ # Shows all items in dict1 that are not also in dict2
+ def diff_props(dict1, dict2, name, match):
+
+ equal = True;
+ for key in dict1:
+ node = dict1[key]
+ node2 = dict2.get(key, None)
+ if node2:
+ for prop in node:
+ v1 = node[prop]
+ v2 = node2.get(prop, None)
+
+ if not v2:
+ print('\'%s\' property on \'%s\' not found in %s' %
+ (prop, key, name))
+ equal = False
+ if match and v1 != v2:
+ print('Expected \'%s\' on \'%s\' to be \'%s\', but found \'%s\'' %
+ (prop, key, v1, v2))
+ equal = False
+ else:
+ print('\'%s\': %s not found in %s' % (key, dict1[key], name))
+ equal = False
+
+ return equal
+
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '-R',
+ '-v', dir_path)
+
+ actual_props = {}
+ target = None
+ name = None
+
+ for i in output:
+ if i.startswith('Properties on '):
+ target = i[15+len(dir_path)+1:-3].replace(os.path.sep, '/')
+ elif not i.startswith(' '):
+ name = i.strip()
+ else:
+ v = actual_props.get(target, {})
+ v[name] = i.strip()
+ actual_props[target] = v
+
+ v1 = diff_props(expected_props, actual_props, 'actual', True)
+ v2 = diff_props(actual_props, expected_props, 'expected', False)
+
+ if not v1 or not v2:
+ print('Actual properties: %s' % actual_props)
+ raise svntest.Failure("Properties unequal")
def run_and_verify_status_no_server(wc_dir, expected_status):
"same as svntest.actions.run_and_verify_status(), but without '-u'"
@@ -401,6 +455,29 @@ def do_x3_upgrade(sbox):
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+ simple_property_verify(sbox.wc_dir, {
+ 'A/B_new/E/beta' : {'x3' : '3x',
+ 'svn:eol-style': 'native'},
+ 'A/B/E/beta' : {'s' : 't',
+ 'svn:eol-style': 'native'},
+ 'A/B_new/B/E/alpha' : {'svn:eol-style': 'native'},
+ 'A/B/E/alpha' : {'q': 'r',
+ 'svn:eol-style': 'native'},
+ 'A_new/alpha' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/new' : {'svn:eol-style': 'native'},
+ 'A/B_new/E/alpha' : {'svn:eol-style': 'native',
+ 'u': 'v'},
+ 'A/B_new/B/E' : {'q': 'r'},
+ 'A/B_new/lambda' : {'svn:eol-style': 'native'},
+ 'A/B_new/E' : {'x3': '3x'},
+ 'A/B_new/new' : {'svn:eol-style': 'native'},
+ 'A/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/E/beta' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B/new' : {'svn:eol-style': 'native'},
+ 'A/G_new/rho' : {'svn:eol-style': 'native'}
+ })
+
svntest.actions.run_and_verify_svn(None, 'Reverted.*', [],
'revert', '-R', sbox.wc_dir)
@@ -425,6 +502,12 @@ def do_x3_upgrade(sbox):
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+ simple_property_verify(sbox.wc_dir, {
+ 'A/B/E/beta' : {'svn:eol-style': 'native'},
+# 'A/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B/E/alpha' : {'svn:eol-style': 'native'}
+ })
+
def x3_1_4_0(sbox):
"3x same wc upgrade 1.4.0 test"
@@ -462,7 +545,9 @@ test_list = [ None,
logs_left_1_5,
upgrade_wcprops,
basic_upgrade_1_0,
- x3_1_4_0,
+ # Upgrading from 1.4.0-1.4.5 with specific states fails
+ # See issue #2530
+ XFail(x3_1_4_0),
x3_1_4_6,
x3_1_6_12,
]
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_client/client-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_client/client-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_client/client-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_client/client-test.c Wed Aug 11 16:43:22 2010
@@ -433,6 +433,10 @@ test_wc_add_scenarios(const svn_test_opt
pool));
SVN_ERR(svn_dirent_get_absolute(&wc_path, "test-wc-add", pool));
+
+ /* Remove old test data from the previous run */
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
svn_test_add_dir_cleanup(wc_path);
@@ -466,6 +470,14 @@ test_wc_add_scenarios(const svn_test_opt
SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, wc_path, TRUE, -1, NULL, NULL,
pool));
+ /* ### The above svn_wc_adm_open3 creates a new svn_wc__db_t
+ ### instance. The svn_wc_add3 below doesn't work while the
+ ### original svn_wc__db_t created by svn_client_create_context
+ ### remains open. Closing the wc-context gets around the
+ ### problem but is obviously a hack. */
+ SVN_ERR(svn_wc_context_destroy(ctx->wc_ctx));
+ SVN_ERR(svn_wc_context_create(&ctx->wc_ctx, NULL, pool, pool));
+
/* Fix up copy as add with history */
SVN_ERR(svn_wc_add3(new_dir_path, adm_access, svn_depth_infinity,
repos_url, committed_rev, NULL, NULL, NULL, NULL,
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_diff/parse-diff-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_diff/parse-diff-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_diff/parse-diff-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_diff/parse-diff-test.c Wed Aug 11 16:43:22 2010
@@ -86,17 +86,65 @@ static const char *git_tree_and_text_uni
"git --diff a/iota b/iota.copied" NL
"copy from iota" NL
"copy to iota.copied" NL
+ "--- a/iota\t(revision 2)" NL
+ "+++ b/iota.copied\t(working copy)" NL
"@@ -1 +1,2 @@" NL
" This is the file 'iota'." NL
"+some more bytes to 'iota'" NL
"Index: A/mu.moved" NL
"===================================================================" NL
"git --diff a/A/mu b/A/mu.moved" NL
- "move from A/mu" NL
- "move to A/mu.moved" NL
+ "rename from A/mu" NL
+ "rename to A/mu.moved" NL
+ "--- a/A/mu\t(revision 2)" NL
+ "+++ b/A/mu.moved\t(working copy)" NL
"@@ -1 +1,2 @@" NL
" This is the file 'mu'." NL
"+some more bytes to 'mu'" NL
+ "Index: new" NL
+ "===================================================================" NL
+ "git --diff a/new b/new" NL
+ "new file mode 100644" NL
+ "--- /dev/null\t(revision 0)" NL
+ "+++ b/new\t(working copy)" NL
+ "@@ -0,0 +1 @@" NL
+ "+This is the file 'new'." NL
+ "Index: A/B/lambda" NL
+ "===================================================================" NL
+ "git --diff a/A/B/lambda b/A/B/lambda" NL
+ "deleted file mode 100644" NL
+ "--- a/A/B/lambda\t(revision 2)" NL
+ "+++ /dev/null\t(working copy)" NL
+ "@@ -1 +0,0 @@" NL
+ "-This is the file 'lambda'." NL
+ "" NL;
+
+ /* Only the last git diff header is valid. The other ones either misses a
+ * path element or has noise between lines that must be continous. */
+static const char *bad_git_diff_header =
+ "Index: iota.copied" NL
+ "===================================================================" NL
+ "git --diff a/foo1 b/" NL
+ "git --diff a/foo2 b" NL
+ "git --diff a/foo3 " NL
+ "git --diff a/foo3 " NL
+ "git --diff foo4 b/foo4" NL
+ "git --diff a/foo5 b/foo5" NL
+ "random noise" NL
+ "copy from foo5" NL
+ "copy to foo5" NL
+ "git --diff a/foo6 b/foo6" NL
+ "copy from foo6" NL
+ "random noise" NL
+ "copy to foo6" NL
+ "git --diff a/foo6 b/foo6" NL
+ "copy from foo6" NL
+ "git --diff a/iota b/iota.copied" NL
+ "copy from iota" NL
+ "copy to iota.copied" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'iota'." NL
+ "+some more bytes to 'iota'" NL
"" NL;
static const char *property_unidiff =
@@ -120,9 +168,19 @@ static const char *git_tree_and_text_uni
"Property changes on: iota" NL
"___________________________________________________________________" NL
"Modified: prop_mod" NL
- "## -1 +1 ##" NL
+ "## -1,4 +1,4 ##" NL
+ "-value" NL
+ "+new value" NL
+ " context" NL
+ " context" NL
+ " context" NL
+ "## -10,4 +10,4 ##" NL
+ " context" NL
+ " context" NL
+ " context" NL
"-value" NL
- "+new value" NL;
+ "+new value" NL
+ "" NL;
/* ### Add edge cases like context lines stripped from leading whitespaces
* ### that starts with 'Added: ', 'Deleted: ' or 'Modified: '. */
@@ -141,6 +199,53 @@ static const char *git_tree_and_text_uni
"## -0,0 +1 ##" NL
"+value" NL;
+ /* A unidiff containing diff symbols in the body of the hunks. */
+ static const char *diff_symbols_in_prop_unidiff =
+ "Index: iota" NL
+ "===================================================================" NL
+ "--- iota" NL
+ "+++ iota" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Added: prop_add" NL
+ "## -0,0 +1,3 ##" NL
+ "+Added: bogus_prop" NL
+ "+## -0,0 +20 ##" NL
+ "+@@ -1,2 +0,0 @@" NL
+ "Deleted: prop_del" NL
+ "## -1,2 +0,0 ##" NL
+ "---- iota" NL
+ "-+++ iota" NL
+ "Modified: non-existent" NL
+ "blah, just noise - no valid hunk header" NL
+ "Modified: prop_mod" NL
+ "## -1,4 +1,4 ##" NL
+ "-## -1,2 +1,2 ##" NL
+ "+## -1,3 +1,3 ##" NL
+ " ## -1,5 -0,0 ##" NL
+ " @@ -1,5 -0,0 @@" NL
+ " Modified: prop_mod" NL
+ "## -10,4 +10,4 ##" NL
+ " context" NL
+ " context" NL
+ " context" NL
+ "-## -0,0 +1 ##" NL
+ "+## -1,2 +1,4 ##" NL
+ "" NL;
+
+ /* A unidiff containing paths with spaces. */
+ static const char *path_with_spaces_unidiff =
+ "git --diff a/path 1 b/path 1" NL
+ "new file mode 100644" NL
+ "git --diff a/path one 1 b/path one 1" NL
+ "new file mode 100644" NL
+ "git --diff a/dir/ b/path b/dir/ b/path" NL
+ "new file mode 100644" NL
+ "git --diff a/ b/path 1 b/ b/path 1" NL
+ "new file mode 100644" NL;
+
+
/* Create a PATCH_FILE with name FNAME containing the contents of DIFF. */
static svn_error_t *
create_patch_file(apr_file_t **patch_file, const char *fname,
@@ -167,15 +272,18 @@ create_patch_file(apr_file_t **patch_fil
return SVN_NO_ERROR;
}
-/* Check that CONTENT equals what's inside EXPECTED. */
+/* Check that reading a line from HUNK equals what's inside EXPECTED.
+ * If ORIGINAL is TRUE, read the original hunk text; else, read the
+ * modified hunk text. */
static svn_error_t *
-check_content(svn_stream_t *content, const char *expected, apr_pool_t *pool)
+check_content(svn_hunk_t *hunk, svn_boolean_t original,
+ const char *expected, apr_pool_t *pool)
{
svn_stream_t *exp;
svn_stringbuf_t *exp_buf;
- svn_stringbuf_t *content_buf;
+ svn_stringbuf_t *hunk_buf;
svn_boolean_t exp_eof;
- svn_boolean_t content_eof;
+ svn_boolean_t hunk_eof;
exp = svn_stream_from_string(svn_string_create(expected, pool),
pool);
@@ -183,18 +291,23 @@ check_content(svn_stream_t *content, con
while (TRUE)
{
SVN_ERR(svn_stream_readline(exp, &exp_buf, NL, &exp_eof, pool));
- SVN_ERR(svn_stream_readline(content, &content_buf, NL, &content_eof,
- pool));
- SVN_TEST_ASSERT(exp_eof == content_eof);
+ if (original)
+ SVN_ERR(svn_diff_hunk_readline_original_text(hunk, &hunk_buf, NULL,
+ &hunk_eof, pool, pool));
+ else
+ SVN_ERR(svn_diff_hunk_readline_modified_text(hunk, &hunk_buf, NULL,
+ &hunk_eof, pool, pool));
+
+ SVN_TEST_ASSERT(exp_eof == hunk_eof);
if (exp_eof)
break;
- if (strcmp(exp_buf->data, content_buf->data))
+ if (strcmp(exp_buf->data, hunk_buf->data))
return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
"Expected '%s' but was '%s'", exp_buf->data,
- content_buf->data);
+ hunk_buf->data);
}
- SVN_TEST_ASSERT(content_buf->len == 0);
+ SVN_TEST_ASSERT(hunk_buf->len == 0);
return SVN_NO_ERROR;
}
@@ -219,8 +332,6 @@ test_parse_unidiff(apr_pool_t *pool)
svn_patch_t *patch;
svn_hunk_t *hunk;
apr_off_t pos;
- svn_stream_t *original_text;
- svn_stream_t *modified_text;
svn_pool_clear(iterpool);
@@ -239,24 +350,11 @@ test_parse_unidiff(apr_pool_t *pool)
SVN_TEST_ASSERT(patch->hunks->nelts == 1);
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- if (reverse)
- {
- /* Hunk texts come out of the parser inverted,
- * so this inverts them a second time. */
- original_text = hunk->modified_text;
- modified_text = hunk->original_text;
- }
- else
- {
- original_text = hunk->original_text;
- modified_text = hunk->modified_text;
- }
-
- SVN_ERR(check_content(original_text,
+ SVN_ERR(check_content(hunk, ! reverse,
"This is the file 'gamma'." NL,
pool));
- SVN_ERR(check_content(modified_text,
+ SVN_ERR(check_content(hunk, reverse,
"This is the file 'gamma'." NL
"some more bytes to 'gamma'" NL,
pool));
@@ -278,25 +376,12 @@ test_parse_unidiff(apr_pool_t *pool)
SVN_TEST_ASSERT(patch->hunks->nelts == 1);
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- if (reverse)
- {
- /* Hunk texts come out of the parser inverted,
- * so this inverts them a second time. */
- original_text = hunk->modified_text;
- modified_text = hunk->original_text;
- }
- else
- {
- original_text = hunk->original_text;
- modified_text = hunk->modified_text;
- }
-
- SVN_ERR(check_content(original_text,
+ SVN_ERR(check_content(hunk, ! reverse,
"This is the file 'gamma'." NL
"some less bytes to 'gamma'" NL,
pool));
- SVN_ERR(check_content(modified_text,
+ SVN_ERR(check_content(hunk, reverse,
"This is the file 'gamma'." NL,
pool));
@@ -342,11 +427,11 @@ test_parse_git_diff(apr_pool_t *pool)
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
"This is the file 'gamma'." NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
"This is the file 'gamma'." NL
"some more bytes to 'gamma'" NL,
pool));
@@ -404,11 +489,11 @@ test_parse_git_tree_and_text_diff(apr_po
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
"This is the file 'iota'." NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
"This is the file 'iota'." NL
"some more bytes to 'iota'" NL,
pool));
@@ -426,15 +511,90 @@ test_parse_git_tree_and_text_diff(apr_po
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
"This is the file 'mu'." NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
"This is the file 'mu'." NL
"some more bytes to 'mu'" NL,
pool));
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "/dev/null"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "new"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'new'." NL,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "A/B/lambda"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "/dev/null"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_deleted);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'lambda'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
+ pool));
+ return SVN_NO_ERROR;
+}
+
+/* Tests to parse non-valid git diffs. */
+static svn_error_t *
+test_bad_git_diff_headers(apr_pool_t *pool)
+{
+ apr_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_hunk_t *hunk;
+ const char *fname = "test_bad_git_diff_header.patch";
+
+ SVN_ERR(create_patch_file(&patch_file, fname, bad_git_diff_header,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "iota"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "iota.copied"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_copied);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'iota'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'iota'." NL
+ "some more bytes to 'iota'" NL,
+ pool));
+
return SVN_NO_ERROR;
}
@@ -445,6 +605,7 @@ test_parse_property_diff(apr_pool_t *poo
{
apr_file_t *patch_file;
svn_patch_t *patch;
+ svn_prop_patch_t *prop_patch;
svn_hunk_t *hunk;
apr_array_header_t *hunks;
const char *fname = "test_parse_property_diff.patch";
@@ -459,44 +620,82 @@ test_parse_property_diff(apr_pool_t *poo
SVN_TEST_ASSERT(! strcmp(patch->old_filename, "iota"));
SVN_TEST_ASSERT(! strcmp(patch->new_filename, "iota"));
SVN_TEST_ASSERT(patch->hunks->nelts == 0);
- SVN_TEST_ASSERT(apr_hash_count(patch->property_hunks) == 3);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 3);
+
+ /* Check the deleted property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_del",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_deleted);
+ hunks = prop_patch->hunks;
- /* Check the added property */
- hunks = apr_hash_get(patch->property_hunks, "prop_add", APR_HASH_KEY_STRING);
SVN_TEST_ASSERT(hunks->nelts == 1);
hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
- "",
+ SVN_ERR(check_content(hunk, TRUE,
+ "value" NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
- "value" NL,
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
pool));
- /* Check the deleted property */
- hunks = apr_hash_get(patch->property_hunks, "prop_del", APR_HASH_KEY_STRING);
+ /* Check the added property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_ASSERT(!strcmp("prop_add", prop_patch->name));
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+ hunks = prop_patch->hunks;
+
SVN_TEST_ASSERT(hunks->nelts == 1);
hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
- "value" NL,
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
pool));
- SVN_ERR(check_content(hunk->modified_text,
- "",
+ SVN_ERR(check_content(hunk, FALSE,
+ "value" NL,
pool));
/* Check the modified property */
- hunks = apr_hash_get(patch->property_hunks, "prop_mod", APR_HASH_KEY_STRING);
- SVN_TEST_ASSERT(hunks->nelts == 1);
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_mod",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_modified);
+ hunks = prop_patch->hunks;
+
+ SVN_TEST_ASSERT(hunks->nelts == 2);
hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
+ "value" NL
+ "context" NL
+ "context" NL
+ "context" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "new value" NL
+ "context" NL
+ "context" NL
+ "context" NL,
+ pool));
+
+ hunk = APR_ARRAY_IDX(hunks, 1 , svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "context" NL
+ "context" NL
+ "context" NL
"value" NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
+ "context" NL
+ "context" NL
+ "context" NL
"new value" NL,
pool));
@@ -508,6 +707,7 @@ test_parse_property_and_text_diff(apr_po
{
apr_file_t *patch_file;
svn_patch_t *patch;
+ svn_prop_patch_t *prop_patch;
svn_hunk_t *hunk;
apr_array_header_t *hunks;
const char *fname = "test_parse_property_and_text_diff.patch";
@@ -523,36 +723,193 @@ test_parse_property_and_text_diff(apr_po
SVN_TEST_ASSERT(! strcmp(patch->old_filename, "iota"));
SVN_TEST_ASSERT(! strcmp(patch->new_filename, "iota"));
SVN_TEST_ASSERT(patch->hunks->nelts == 1);
- SVN_TEST_ASSERT(apr_hash_count(patch->property_hunks) == 1);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 1);
/* Check contents of text hunk */
hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
"This is the file 'iota'." NL,
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
"This is the file 'iota'." NL
"some more bytes to 'iota'" NL,
pool));
/* Check the added property */
- hunks = apr_hash_get(patch->property_hunks, "prop_add", APR_HASH_KEY_STRING);
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+
+ hunks = prop_patch->hunks;
SVN_TEST_ASSERT(hunks->nelts == 1);
hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
- SVN_ERR(check_content(hunk->original_text,
+ SVN_ERR(check_content(hunk, TRUE,
"",
pool));
- SVN_ERR(check_content(hunk->modified_text,
+ SVN_ERR(check_content(hunk, FALSE,
"value" NL,
pool));
return SVN_NO_ERROR;
}
+static svn_error_t *
+test_parse_diff_symbols_in_prop_unidiff(apr_pool_t *pool)
+{
+ svn_patch_t *patch;
+ apr_file_t *patch_file;
+ svn_prop_patch_t *prop_patch;
+ svn_hunk_t *hunk;
+ apr_array_header_t *hunks;
+ const char *fname = "test_parse_diff_symbols_in_prop_unidiff.patch";
+
+ SVN_ERR(create_patch_file(&patch_file, fname, diff_symbols_in_prop_unidiff,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "iota"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "iota"));
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 3);
+
+ /* Check the added property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "Added: bogus_prop" NL
+ "## -0,0 +20 ##" NL
+ "@@ -1,2 +0,0 @@" NL,
+ pool));
+
+ /* Check the deleted property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_del",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_deleted);
+
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "--- iota" NL
+ "+++ iota" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
+ pool));
+
+ /* Check the modified property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_mod",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_modified);
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 2);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "## -1,2 +1,2 ##" NL
+ "## -1,5 -0,0 ##" NL
+ "@@ -1,5 -0,0 @@" NL
+ "Modified: prop_mod" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "## -1,3 +1,3 ##" NL
+ "## -1,5 -0,0 ##" NL
+ "@@ -1,5 -0,0 @@" NL
+ "Modified: prop_mod" NL,
+ pool));
+
+ hunk = APR_ARRAY_IDX(hunks, 1 , svn_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "## -0,0 +1 ##" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "## -1,2 +1,4 ##" NL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_git_diffs_with_spaces_diff(apr_pool_t *pool)
+{
+ apr_file_t *patch_file;
+ svn_patch_t *patch;
+ const char *fname = "test_git_diffs_with_spaces_diff.patch";
+
+ SVN_ERR(create_patch_file(&patch_file, fname, path_with_spaces_unidiff,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "path 1"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "path 1"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "path one 1"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "path one 1"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, "dir/ b/path"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, "dir/ b/path"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_ASSERT(! strcmp(patch->old_filename, " b/path 1"));
+ SVN_TEST_ASSERT(! strcmp(patch->new_filename, " b/path 1"));
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ return SVN_NO_ERROR;
+}
/* ========================================================================== */
struct svn_test_descriptor_t test_funcs[] =
@@ -563,10 +920,16 @@ struct svn_test_descriptor_t test_funcs[
SVN_TEST_PASS2(test_parse_git_diff,
"test git unidiff parsing"),
SVN_TEST_PASS2(test_parse_git_tree_and_text_diff,
- "test git unidiff parsing of tree and text changes"),
+ "test git unidiff parsing of tree and text changes"),
+ SVN_TEST_XFAIL2(test_bad_git_diff_headers,
+ "test badly formatted git diff headers"),
SVN_TEST_PASS2(test_parse_property_diff,
"test property unidiff parsing"),
SVN_TEST_PASS2(test_parse_property_and_text_diff,
"test property and text unidiff parsing"),
+ SVN_TEST_PASS2(test_parse_diff_symbols_in_prop_unidiff,
+ "test property diffs with odd symbols"),
+ SVN_TEST_PASS2(test_git_diffs_with_spaces_diff,
+ "test git diffs with spaces in paths"),
SVN_TEST_NULL
};
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/dirent_uri-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/dirent_uri-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/dirent_uri-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/dirent_uri-test.c Wed Aug 11 16:43:22 2010
@@ -2792,6 +2792,73 @@ test_file_url_from_dirent(apr_pool_t *po
return SVN_NO_ERROR;
}
+static svn_error_t *
+test_dirent_is_under_root(apr_pool_t *pool)
+{
+ struct {
+ const char *base_path;
+ const char *path;
+ svn_boolean_t under_root;
+ const char *result;
+ } tests[] = {
+ { "/", "/base", FALSE},
+ { "/aa", "/aa/bb", FALSE},
+ { "/base", "/base2", FALSE},
+ { "/b", "bb", TRUE, "/b/bb"},
+ { "/b", "../bb", FALSE},
+ { "/b", "r/./bb", TRUE, "/b/r/bb"},
+ { "/b", "r/../bb", TRUE, "/b/bb"},
+ { "/b", "r/../../bb", FALSE},
+ { "/b", "./bb", TRUE, "/b/bb"},
+ { "/b", ".", TRUE, "/b"},
+ { "/b", "", TRUE, "/b"},
+ { "b", "b", TRUE, "b/b"},
+#ifdef SVN_USE_DOS_PATHS
+ { "C:/file", "a\\d", TRUE, "C:/file/a/d"},
+ { "C:/file", "aa\\..\\d", TRUE, "C:/file/d"},
+ { "C:/file", "aa\\..\\..\\d", FALSE},
+#else
+ { "C:/file", "a\\d", TRUE, "C:/file/a\\d"},
+ { "C:/file", "aa\\..\\d", TRUE, "C:/file/aa\\..\\d"},
+ { "C:/file", "aa\\..\\..\\d", TRUE, "C:/file/aa\\..\\..\\d"},
+#endif /* SVN_USE_DOS_PATHS */
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ svn_boolean_t under_root;
+ const char *result;
+
+ SVN_ERR(svn_dirent_is_under_root(&under_root,
+ &result,
+ tests[i].base_path,
+ tests[i].path,
+ pool));
+
+ if (under_root != tests[i].under_root)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_under_root(..\"%s\", \"%s\"..)"
+ " returned %s expected %s.",
+ tests[i].base_path,
+ tests[i].path,
+ under_root ? "TRUE" : "FALSE",
+ tests[i].under_root ? "TRUE" : "FALSE");
+
+ if (under_root
+ && strcmp(result, tests[i].result) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_under_root(..\"%s\", \"%s\"..)"
+ " found \"%s\" expected \"%s\".",
+ tests[i].base_path,
+ tests[i].path,
+ result,
+ tests[i].result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
/* The test table. */
@@ -2890,5 +2957,7 @@ struct svn_test_descriptor_t test_funcs[
"test svn_uri_get_dirent_from_file_url errors"),
SVN_TEST_PASS2(test_file_url_from_dirent,
"test svn_uri_get_file_url_from_dirent"),
+ SVN_TEST_PASS2(test_dirent_is_under_root,
+ "test svn_dirent_is_under_root"),
SVN_TEST_NULL
};
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/stream-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/stream-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/stream-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_subr/stream-test.c Wed Aug 11 16:43:22 2010
@@ -307,142 +307,6 @@ test_stream_range(apr_pool_t *pool)
return SVN_NO_ERROR;
}
-/* An implementation of svn_io_line_filter_cb_t */
-static svn_error_t *
-line_filter(svn_boolean_t *filtered, const char *line, void *baton,
- apr_pool_t *scratch_pool)
-{
- *filtered = strchr(line, '!') != NULL;
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-test_stream_line_filter(apr_pool_t *pool)
-{
- static const char *lines[4] = {"Not filtered.", "Filtered!",
- "Not filtered either.", "End of the lines!"};
- svn_string_t *string;
- svn_stream_t *stream;
- svn_stringbuf_t *line;
- svn_boolean_t eof;
-
- string = svn_string_createf(pool, "%s\n%s\n%s\n%s", lines[0], lines[1],
- lines[2], lines[3]);
- stream = svn_stream_from_string(string, pool);
-
- svn_stream_set_line_filter_callback(stream, line_filter);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, lines[0]);
- /* line[1] should be filtered */
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, lines[2]);
-
- /* The last line should also be filtered, and the resulting
- * stringbuf should be empty. */
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_ASSERT(eof && svn_stringbuf_isempty(line));
-
- return SVN_NO_ERROR;
-}
-
-/* An implementation of svn_io_line_transformer_cb_t */
-static svn_error_t *
-line_transformer(svn_stringbuf_t **buf, const char *line, void *baton,
- apr_pool_t *result_pool, apr_pool_t *scratch_pool)
-{
- int i, len = strlen(line);
- char *temp = apr_palloc(scratch_pool, len + 1 );
-
- for (i = 0; i < len; i++)
- {
- temp[i] = line[len - 1 - i];
- }
-
- temp[len] = '\0';
-
- *buf = svn_stringbuf_create(temp, result_pool);
-
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-test_stream_line_transformer(apr_pool_t *pool)
-{
- static const char *lines[4] = {"gamma", "",
- "iota", "!"};
-
- static const char *inv_lines[4] = {"ammag", "",
- "atoi", "!"};
- svn_string_t *string;
- svn_stream_t *stream;
- svn_stringbuf_t *line;
- svn_boolean_t eof;
-
- string = svn_string_createf(pool, "%s\n%s\n%s\n%s", lines[0], lines[1],
- lines[2], lines[3]);
-
- stream = svn_stream_from_string(string, pool);
-
- svn_stream_set_line_transformer_callback(stream, line_transformer);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[0]);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[1]);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[2]);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[3]);
-
- /* We should have reached eof and the stringbuf should be emtpy. */
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_ASSERT(eof && svn_stringbuf_isempty(line));
-
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-test_stream_line_filter_and_transformer(apr_pool_t *pool)
-{
- static const char *lines[4] = {"!gamma", "",
- "iota", "!"};
-
- static const char *inv_lines[4] = {"ammag", "",
- "atoi", "!"};
- svn_string_t *string;
- svn_stream_t *stream;
- svn_stringbuf_t *line;
- svn_boolean_t eof;
-
- string = svn_string_createf(pool, "%s\n%s\n%s\n%s", lines[0], lines[1],
- lines[2], lines[3]);
-
- stream = svn_stream_from_string(string, pool);
-
- svn_stream_set_line_filter_callback(stream, line_filter);
-
- svn_stream_set_line_transformer_callback(stream, line_transformer);
-
- /* Line one should be filtered. */
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[1]);
-
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_STRING_ASSERT(line->data, inv_lines[2]);
-
- /* The last line should also be filtered, and the resulting
- * stringbuf should be empty. */
- svn_stream_readline(stream, &line, "\n", &eof, pool);
- SVN_TEST_ASSERT(eof && svn_stringbuf_isempty(line));
-
- return SVN_NO_ERROR;
-
-}
-
static svn_error_t *
test_stream_tee(apr_pool_t *pool)
{
@@ -647,12 +511,6 @@ struct svn_test_descriptor_t test_funcs[
"test compressed streams"),
SVN_TEST_PASS2(test_stream_range,
"test streams reading from range of file"),
- SVN_TEST_PASS2(test_stream_line_filter,
- "test stream line filtering"),
- SVN_TEST_PASS2(test_stream_line_transformer,
- "test stream line transforming"),
- SVN_TEST_PASS2(test_stream_line_filter_and_transformer,
- "test stream line filtering and transforming"),
SVN_TEST_PASS2(test_stream_tee,
"test 'tee' streams"),
SVN_TEST_PASS2(test_stream_seek_file,
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/db-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/db-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/db-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/db-test.c Wed Aug 11 16:43:22 2010
@@ -321,9 +321,13 @@ create_fake_wc(const char *subdir, int f
svn_sqlite__db_t *sdb;
const char * const my_statements[] = {
statements[STMT_CREATE_SCHEMA],
+#ifdef SVN_WC__NODE_DATA
+ statements[STMT_CREATE_NODE_DATA],
+#endif
TESTING_DATA,
NULL
};
+ int i;
SVN_ERR(svn_io_make_dir_recursively(dirpath, scratch_pool));
svn_error_clear(svn_io_remove_file(dbpath, scratch_pool));
@@ -332,11 +336,8 @@ create_fake_wc(const char *subdir, int f
0, NULL,
scratch_pool, scratch_pool));
- /* Create the database's schema. */
- SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ 0));
-
- /* Throw our extra data into the database. */
- SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ 1));
+ for (i = 0; my_statements[i] != NULL; i++)
+ SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ i));
return SVN_NO_ERROR;
}
@@ -681,7 +682,7 @@ test_inserting_nodes(apr_pool_t *pool)
props,
1, TIME_1a, AUTHOR_1,
children, svn_depth_infinity,
- NULL, NULL,
+ NULL, NULL, NULL,
pool));
/* Replace an incomplete node with a file node. */
@@ -692,7 +693,7 @@ test_inserting_nodes(apr_pool_t *pool)
props,
1, TIME_1a, AUTHOR_1,
checksum, 10,
- NULL, NULL,
+ NULL, NULL, NULL,
pool));
/* Create a new symlink node. */
@@ -703,7 +704,7 @@ test_inserting_nodes(apr_pool_t *pool)
props,
1, TIME_1a, AUTHOR_1,
"O-target",
- NULL, NULL,
+ NULL, NULL, NULL,
pool));
/* Replace an incomplete node with an absent file node. */
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/entries-compat.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/entries-compat.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/entries-compat.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/entries-compat.c Wed Aug 11 16:43:22 2010
@@ -337,6 +337,7 @@ make_one_db(const char *dirpath,
{
const char *dbpath = svn_dirent_join(dirpath, "wc.db", scratch_pool);
svn_sqlite__db_t *sdb;
+ int i;
/* Create fake-wc/SUBDIR/.svn/ for placing the metadata. */
SVN_ERR(svn_io_make_dir_recursively(dirpath, scratch_pool));
@@ -347,11 +348,8 @@ make_one_db(const char *dirpath,
0, NULL,
scratch_pool, scratch_pool));
- /* Create the database's schema. */
- SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ 0));
-
- /* Throw our extra data into the database. */
- SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ 1));
+ for (i = 0; my_statements[i] != NULL; i++)
+ SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ i));
return SVN_NO_ERROR;
}
@@ -360,24 +358,33 @@ make_one_db(const char *dirpath,
static svn_error_t *
create_fake_wc(const char *subdir, int format, apr_pool_t *scratch_pool)
{
+ const char *root;
const char *dirpath;
const char * const my_statements[] = {
statements[STMT_CREATE_SCHEMA],
+#ifdef SVN_WC__NODE_DATA
+ statements[STMT_CREATE_NODE_DATA],
+#endif
TESTING_DATA,
NULL
};
const char * const M_statements[] = {
statements[STMT_CREATE_SCHEMA],
+#ifdef SVN_WC__NODE_DATA
+ statements[STMT_CREATE_NODE_DATA],
+#endif
M_TESTING_DATA,
NULL
};
- dirpath = svn_dirent_join_many(scratch_pool,
- "fake-wc", subdir, ".svn", NULL);
+ root = svn_dirent_join("fake-wc", subdir, scratch_pool);
+
+ SVN_ERR(svn_io_remove_dir2(root, TRUE, NULL, NULL, scratch_pool));
+
+ dirpath = svn_dirent_join(root, ".svn", scratch_pool);
SVN_ERR(make_one_db(dirpath, my_statements, scratch_pool));
- dirpath = svn_dirent_join_many(scratch_pool,
- "fake-wc", subdir, "M", ".svn", NULL);
+ dirpath = svn_dirent_join_many(scratch_pool, root, "M", ".svn", NULL);
SVN_ERR(make_one_db(dirpath, M_statements, scratch_pool));
return SVN_NO_ERROR;
@@ -524,6 +531,169 @@ test_stubs(apr_pool_t *pool)
return SVN_NO_ERROR;
}
+static svn_error_t *
+test_access_baton_like_locking(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ svn_wc_context_t *wc_ctx, *wc_ctx2;
+ const char *local_abspath;
+ const char *D, *D1, *D2, *D3, *D4;
+ svn_boolean_t locked_here, locked;
+ svn_error_t *err;
+ svn_wc_adm_access_t *adm_access, *subdir_access;
+
+#undef WC_NAME
+#define WC_NAME "test_access_batons"
+ SVN_ERR(create_open(&db, &local_abspath, WC_NAME, pool));
+
+ D = svn_dirent_join(local_abspath, "DD", pool);
+
+ D1 = svn_dirent_join(D, "DD", pool);
+ D2 = svn_dirent_join(D1, "DD", pool);
+ D3 = svn_dirent_join(D2, "DD", pool);
+ D4 = svn_dirent_join(D3, "DD", pool);
+
+ SVN_ERR(svn_io_make_dir_recursively(D4, pool));
+
+ /* Use the legacy interface */
+ SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, local_abspath, TRUE, 0,
+ NULL, NULL, pool));
+ SVN_ERR(svn_wc_add3(D, adm_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D, pool));
+ SVN_ERR(svn_wc_add3(D1, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D1, pool));
+ SVN_ERR(svn_wc_add3(D2, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D2, pool));
+ SVN_ERR(svn_wc_add3(D3, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_locked(&locked, D3, pool));
+ SVN_TEST_ASSERT(locked);
+ SVN_ERR(svn_wc_revert3(D, adm_access, -1, FALSE,
+ NULL, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_locked(&locked, D3, pool));
+ SVN_TEST_ASSERT(!locked);
+ SVN_ERR(svn_wc_adm_close2(adm_access, pool));
+
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+
+ /* Obtain a lock for the root, which is extended on each level */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, local_abspath, 0, FALSE, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D1, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D2, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D3, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D3, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ /* Test if the not added path is already locked */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D4, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+
+ SVN_ERR(svn_wc_add4(wc_ctx, D4, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D4, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, local_abspath, pool));
+ /* Should be unlocked */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, local_abspath, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+
+ /* Lock shouldn't be released */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D1, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D2, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D3, pool));
+
+ /* Try reobtaining lock on D3; should succeed */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, D3, 0, FALSE, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D4, pool));
+
+
+ /* D3 should still be locked; try stealing in a different context */
+ SVN_ERR(svn_wc_context_create(&wc_ctx2, NULL, pool, pool));
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, D3, pool));
+ SVN_TEST_ASSERT(!locked_here && locked);
+
+ err = svn_wc__db_wclock_obtain(wc_ctx2->db, D3, 0, FALSE, pool);
+
+ if (err && err->apr_err != SVN_ERR_WC_LOCKED)
+ return svn_error_return(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't lock, as it is still locked */
+
+ err = svn_wc__db_wclock_release(wc_ctx2->db, D4, pool);
+ if (err && err->apr_err != SVN_ERR_WC_NOT_LOCKED)
+ return svn_error_return(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't unlock, as it is not ours */
+
+ /* Now steal the lock */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx2->db, D3, 0, TRUE, pool));
+
+ /* We should own the lock now */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, D3, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ err = svn_wc__db_wclock_release(wc_ctx2->db, D4, pool);
+ if (err && err->apr_err != SVN_ERR_WC_NOT_LOCKED)
+ return svn_error_return(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't unlock a not locked path */
+
+ /* Now create a separate working copy from the same repository directly
+ below this WC and test if our code really sees it as a separate wc,
+ for locking and normal operation */
+ {
+ const char *url, *repos_root_url, *repos_uuid;
+ const char *subdir = svn_dirent_join(local_abspath, "sub-wc", pool);
+
+ svn_boolean_t is_root;
+ SVN_ERR(svn_wc__node_get_url(&url, wc_ctx, local_abspath, pool, pool));
+ SVN_ERR(svn_wc__node_get_repos_info(&repos_root_url, &repos_uuid,
+ wc_ctx, local_abspath, FALSE, FALSE,
+ pool, pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(subdir, pool));
+ SVN_ERR(svn_wc_ensure_adm3(subdir, repos_uuid,
+ svn_uri_join(url, "sub-wc", pool),
+ repos_root_url, 0, svn_depth_infinity,
+ pool));
+
+ SVN_ERR(svn_wc__check_wc_root(&is_root, NULL, NULL, wc_ctx->db, subdir,
+ pool));
+
+ SVN_TEST_ASSERT(is_root);
+
+ SVN_ERR(svn_wc__check_wc_root(&is_root, NULL, NULL, wc_ctx2->db, subdir,
+ pool));
+
+ /* This test was added to show a regression where the next check failed,
+ but the check above this succeeded */
+ SVN_TEST_ASSERT(is_root);
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, subdir, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+ }
+
+ return SVN_NO_ERROR;
+}
+
struct svn_test_descriptor_t test_funcs[] =
{
@@ -532,5 +702,7 @@ struct svn_test_descriptor_t test_funcs[
"entries are allocated in access baton"),
SVN_TEST_PASS2(test_stubs,
"access baton mojo can return stubs"),
+ SVN_TEST_PASS2(test_access_baton_like_locking,
+ "access baton like locks must work with wc-ng"),
SVN_TEST_NULL
};
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/pristine-store-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/pristine-store-test.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/pristine-store-test.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/libsvn_wc/pristine-store-test.c Wed Aug 11 16:43:22 2010
@@ -162,7 +162,7 @@ pristine_write_read(const svn_test_opts_
svn_boolean_t present;
SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
- svn_wc__db_checkmode_usable, pool));
+ pool));
SVN_ERR_ASSERT(! present);
}
@@ -175,7 +175,7 @@ pristine_write_read(const svn_test_opts_
svn_boolean_t present;
SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
- svn_wc__db_checkmode_usable, pool));
+ pool));
SVN_ERR_ASSERT(present);
}
@@ -220,7 +220,7 @@ pristine_write_read(const svn_test_opts_
svn_boolean_t present;
SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
- svn_wc__db_checkmode_usable, pool));
+ pool));
SVN_ERR_ASSERT(! present);
}
@@ -249,15 +249,21 @@ pristine_get_translated(const svn_test_o
translation. Set some properties on it. */
{
svn_wc_context_t *wc_ctx;
+ const char *dirname = svn_dirent_dirname(versioned_abspath, pool);
SVN_ERR(svn_wc__context_create_with_db(&wc_ctx, NULL, db, pool));
SVN_ERR(svn_io_file_create(versioned_abspath, data, pool));
+
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, dirname, 0, FALSE, pool));
+
SVN_ERR(svn_wc_add4(wc_ctx, versioned_abspath, svn_depth_empty,
NULL, SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL,
pool));
SVN_ERR(svn_wc_prop_set4(wc_ctx, versioned_abspath,
"svn:keywords", svn_string_create("Rev", pool),
FALSE, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, dirname, pool));
}
/* Store a pristine text, and set DATA_SHA1 and DATA_MD5. */
Modified: subversion/branches/ignore-mergeinfo/subversion/tests/svn_test_main.c
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/subversion/tests/svn_test_main.c?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/subversion/tests/svn_test_main.c (original)
+++ subversion/branches/ignore-mergeinfo/subversion/tests/svn_test_main.c Wed Aug 11 16:43:22 2010
@@ -138,6 +138,7 @@ svn_test_add_dir_cleanup(const char *pat
{
const char *abspath;
svn_error_t *err = svn_path_get_absolute(&abspath, path, cleanup_pool);
+ svn_error_clear(err);
if (!err)
apr_pool_cleanup_register(cleanup_pool, abspath, cleanup_rmtree,
apr_pool_cleanup_null);
Modified: subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd (original)
+++ subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd Wed Aug 11 16:43:22 2010
@@ -23,6 +23,23 @@ SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDE
CALL ..\svn-config.cmd
IF ERRORLEVEL 1 EXIT /B 1
+svnversion . /1.6.x | find "S" > nul:
+IF ERRORLEVEL 1 (
+ ECHO --- Building 1.6.x: Skipping bindings ---
+ EXIT /B 0
+)
+
+PATH %PATH%;%TESTDIR%\bin
+SET result=0
+
+
+echo python win-tests.py -r -f fsfs --javahl "%TESTDIR%\tests"
+python win-tests.py -r -f fsfs --javahl "%TESTDIR%\tests"
+IF ERRORLEVEL 1 (
+ echo [python reported error %ERRORLEVEL%]
+ SET result=1
+)
+
IF EXIST "%TESTDIR%\swig" rmdir /s /q "%TESTDIR%\swig"
mkdir "%TESTDIR%\swig\py-release\libsvn"
mkdir "%TESTDIR%\swig\py-release\svn"
@@ -32,8 +49,12 @@ xcopy "release\subversion\bindings\swig\
xcopy "subversion\bindings\swig\python\*.py" "%TESTDIR%\swig\py-release\libsvn\*.py"
xcopy "subversion\bindings\swig\python\svn\*.py" "%TESTDIR%\swig\py-release\svn\*.py"
-PATH %PATH%;%TESTDIR%\bin
SET PYTHONPATH=%TESTDIR%\swig\py-release
python subversion\bindings\swig\python\tests\run_all.py
-IF ERRORLEVEL 1 EXIT /B 1
+IF ERRORLEVEL 1 (
+ echo [Python reported error %ERRORLEVEL%]
+ SET result=1
+)
+
+exit /b %result%
Modified: subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd (original)
+++ subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd Wed Aug 11 16:43:22 2010
@@ -23,5 +23,11 @@ SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDE
CALL ..\svn-config.cmd
IF ERRORLEVEL 1 EXIT /B 1
+svnversion . /1.6.x | find "S" > nul:
+IF ERRORLEVEL 1 (
+ ECHO --- Building 1.6.x: Skipping bindings ---
+ EXIT /B 0
+)
+
msbuild subversion_vcnet.sln /p:Configuration=Release /p:Platform=win32 /t:__JAVAHL__ /t:__SWIG_PYTHON__ /t:__SWIG_PERL__ /t:__JAVAHL_TESTS__
IF ERRORLEVEL 1 EXIT /B 1
Modified: subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd (original)
+++ subversion/branches/ignore-mergeinfo/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd Wed Aug 11 16:43:22 2010
@@ -51,9 +51,14 @@ POPD
taskkill /im svn.exe /f 2> nul:
taskkill /im svnadmin.exe /f 2> nul:
taskkill /im svnserve.exe /f 2> nul:
+taskkill /im svnrdump.exe /f 2> nul:
+taskkill /im svnsync.exe /f 2> nul:
taskkill /im httpd.exe /f 2> nul:
IF EXIST "%TESTDIR%\tests\subversion\tests\cmdline\httpd\" (
rmdir /s /q "%TESTDIR%\tests\subversion\tests\cmdline\httpd"
)
+del "%TESTDIR%\tests\*.log" 2> nul:
+
+
exit /B 0
Modified: subversion/branches/ignore-mergeinfo/tools/client-side/svn-viewspec.py
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/tools/client-side/svn-viewspec.py?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/tools/client-side/svn-viewspec.py (original)
+++ subversion/branches/ignore-mergeinfo/tools/client-side/svn-viewspec.py Wed Aug 11 16:43:22 2010
@@ -20,9 +20,10 @@
# ====================================================================
"""\
-Usage: 1. __SCRIPTNAME__ VIEWSPEC-FILE TARGET-DIR
- 2. __SCRIPTNAME__ VIEWSPEC-FILE --dump-tree
- 3. __SCRIPTNAME__ --help
+Usage: 1. __SCRIPTNAME__ checkout VIEWSPEC-FILE TARGET-DIR
+ 2. __SCRIPTNAME__ examine VIEWSPEC-FILE
+ 3. __SCRIPTNAME__ help
+ 4. __SCRIPTNAME__ help-format
VIEWSPEC-FILE is the path of a file whose contents describe a
Subversion sparse checkouts layout, or '-' if that description should
@@ -34,9 +35,14 @@ by this script as it checks out the spec
2. Parse VIEWSPEC-FILE and dump out a human-readable representation of
the tree described in the specification.
-
+
3. Show this usage message.
+4. Show information about the file format this program expects.
+
+"""
+
+FORMAT_HELP = """\
Viewspec File Format
====================
@@ -93,7 +99,7 @@ script in conjunction with 'svn cat' to
versioned viewspec file:
$ svn cat http://svn.example.com/specs/dev-spec.txt |
- __SCRIPTNAME__ - /path/to/target/directory
+ __SCRIPTNAME__ checkout - /path/to/target/directory
"""
@@ -294,32 +300,47 @@ def checkout_spec(viewspec, target_dir):
viewspec.tree,
target_dir)
+def usage_and_exit(errmsg=None):
+ stream = errmsg and sys.stderr or sys.stdout
+ msg = __doc__.replace("__SCRIPTNAME__", os.path.basename(sys.argv[0]))
+ stream.write(msg)
+ if errmsg:
+ stream.write("ERROR: %s\n" % (errmsg))
+ sys.exit(errmsg and 1 or 0)
+
def main():
- if len(sys.argv) < 3 or '--help' in sys.argv:
- msg = __doc__.replace("__SCRIPTNAME__", os.path.basename(sys.argv[0]))
- sys.stderr.write(msg)
- sys.exit(1)
- if sys.argv[1] == '-':
- fp = sys.stdin
- else:
- fp = open(sys.argv[1], 'r')
- if sys.argv[2] == '--dump-tree':
- target_dir = None
- else:
- target_dir = sys.argv[2]
-
- viewspec = parse_viewspec(fp)
- if target_dir is None:
- sys.stderr.write("Url: %s\n" % (viewspec.base_url))
+ argc = len(sys.argv)
+ if argc < 2:
+ usage_and_exit('Not enough arguments.')
+ subcommand = sys.argv[1]
+ if subcommand == 'help':
+ usage_and_exit()
+ elif subcommand == 'help-format':
+ msg = FORMAT_HELP.replace("__SCRIPTNAME__",
+ os.path.basename(sys.argv[0]))
+ sys.stdout.write(msg)
+ elif subcommand == 'examine':
+ if argc < 3:
+ usage_and_exit('No viewspec file specified.')
+ fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r')
+ viewspec = parse_viewspec(fp)
+ sys.stdout.write("Url: %s\n" % (viewspec.base_url))
revision = viewspec.revision
if revision != -1:
- sys.stderr.write("Revision: %s\n" % (revision))
+ sys.stdout.write("Revision: %s\n" % (revision))
else:
- sys.stderr.write("Revision: HEAD\n")
- sys.stderr.write("\n")
+ sys.stdout.write("Revision: HEAD\n")
+ sys.stdout.write("\n")
viewspec.tree.dump(True)
+ elif subcommand == 'checkout':
+ if argc < 3:
+ usage_and_exit('No viewspec file specified.')
+ if argc < 4:
+ usage_and_exit('No target directory specified.')
+ fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r')
+ checkout_spec(parse_viewspec(fp), sys.argv[3])
else:
- checkout_spec(viewspec, target_dir)
-
+ usage_and_exit('Unknown subcommand "%s".' % (subcommand))
+
if __name__ == "__main__":
main()
Modified: subversion/branches/ignore-mergeinfo/tools/dev/unix-build/Makefile.svn
URL: http://svn.apache.org/viewvc/subversion/branches/ignore-mergeinfo/tools/dev/unix-build/Makefile.svn?rev=984468&r1=984467&r2=984468&view=diff
==============================================================================
--- subversion/branches/ignore-mergeinfo/tools/dev/unix-build/Makefile.svn (original)
+++ subversion/branches/ignore-mergeinfo/tools/dev/unix-build/Makefile.svn Wed Aug 11 16:43:22 2010
@@ -2,31 +2,6 @@
#
# WARNING: This may or may not work on your system. This Makefile is
# an example, rather than a ready-made universal solution.
-#
-# This Makefile builds and installs Subversion, and many of its
-# dependencies, on UNIX-like systems, in the current working directory.
-# Indirect dependencies are not covered, e.g. you need OpenSSL installed
-# to get SSL support in neon and serf.
-#
-# The Makefile can also run Subversion's regression test suite via all
-# repository backends and RA methods. It generates the necessary configuration
-# files and starts svnserve and httpd daemons automatically on non-privileged
-# ports.
-#
-# Some version of Subversion is required to be in $PATH and will be used
-# to check out working copies.
-#
-# The default is to compile trunk.
-# Pass the branch you want to build in BRANCH, e.g.
-# $ make BRANCH="1.5.x"
-# You can also pass a tag to build:
-# $ make TAG="1.6.6"
-# And you can specify a working copy to use, in case you need more
-# than one working copy of the same branch:
-# $ make BRANCH="1.6.x" WC="1.6.x-test2"
-#
-# After the build, point your PATH to the Subversion build you want to use.
-# Note that this Makefile requires GNU make.
ENABLE_PYTHON_BINDINGS ?= yes
ENABLE_RUBY_BINDINGS ?= yes
@@ -35,6 +10,7 @@ ENABLE_JAVA_BINDINGS ?= no # they don't
USE_APR_ICONV ?= no # set to yes to use APR iconv instead of GNU iconv
PWD = $(shell pwd)
+UNAME = $(shell uname)
TAG ?= none
ifeq ($(TAG),none)
@@ -84,8 +60,8 @@ FETCH_CMD = wget -c
SUBVERSION_REPOS_URL = https://svn.apache.org/repos/asf/subversion
BDB_URL = http://ftp2.de.freebsd.org/pub/FreeBSD/distfiles/bdb/$(BDB_DIST)
APR_URL = http://svn.apache.org/repos/asf/apr/apr
-APR_ICONV_URL = ftp://ftp.fu-berlin.de/unix/www/apache/apr/$(APR_ICONV_DIST)
-GNU_ICONV_URL = ftp://ftp.fu-berlin.de/unix/gnu/libiconv/$(GNU_ICONV_DIST)
+APR_ICONV_URL = http://www.apache.org/dist/apr/$(APR_ICONV_DIST)
+GNU_ICONV_URL = http://ftp.gnu.org/pub/gnu/libiconv/$(GNU_ICONV_DIST)
APR_UTIL_URL = http://svn.apache.org/repos/asf/apr/apr-util
HTTPD_URL = http://archive.apache.org/dist/httpd/$(HTTPD_DIST)
NEON_URL = http://webdav.org/neon/$(NEON_DIST)
@@ -154,7 +130,7 @@ nuke:
yes) echo "You said $$ANSWER. I will continue."; \
echo rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \
rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \
- echo "Remember to reset the build!"; \
+ $(MAKE) reset; \
;; \
"") echo "You said no."; \
;; \
@@ -553,11 +529,22 @@ $(NEON_OBJDIR)/.retrieved: $(DISTDIR)/$(
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(NEON_DIST)
touch $@
+# OpenBSD does not have krb5-config in PATH, but the neon port has
+# a suitable replacement.
+ifeq ($(UNAME),OpenBSD)
+KRB5_CONFIG_PATH=/usr/ports/net/neon/files
+endif
+
# configure neon
$(NEON_OBJDIR)/.configured: $(NEON_OBJDIR)/.retrieved
cd $(NEON_SRCDIR) && ./autogen.sh
+ if [ -n "$(KRB5_CONFIG_PATH)" -a -d "$(KRB5_CONFIG_PATH)" ]; then \
+ cp $(KRB5_CONFIG_PATH)/krb5-config $(NEON_OBJDIR); \
+ chmod +x $(NEON_OBJDIR)/krb5-config; \
+ fi
cd $(NEON_OBJDIR) \
&& env CFLAGS="-g" $(NEON_SRCDIR)/configure \
+ PATH=$(NEON_OBJDIR):$$PATH \
--prefix=$(PREFIX)/neon \
--with-ssl \
--enable-shared