You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by st...@apache.org on 2010/11/26 19:55:54 UTC

svn commit: r1039511 [5/5] - in /subversion/branches/performance: ./ build/ subversion/bindings/swig/perl/native/ subversion/bindings/swig/ruby/test/ subversion/include/ subversion/include/private/ subversion/libsvn_client/ subversion/libsvn_delta/ sub...

Modified: subversion/branches/performance/subversion/tests/cmdline/svnsync_tests_data/descend_into_replace.expected.dump
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/svnsync_tests_data/descend_into_replace.expected.dump?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/cmdline/svnsync_tests_data/descend_into_replace.expected.dump (original)
+++ subversion/branches/performance/subversion/tests/cmdline/svnsync_tests_data/descend_into_replace.expected.dump Fri Nov 26 18:55:51 2010
@@ -233,3 +233,179 @@ PROPS-END
 This is the file 'mu'.
 
 
+Revision-number: 4
+Prop-content-length: 108
+Content-length: 108
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2010-11-22T15:40:03.023927Z
+K 7
+svn:log
+V 7
+log msg
+PROPS-END
+
+Node-path: trunk/H/Z
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/H/Z/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/H/Z/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/H/Z/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/H/Z/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/H/Z/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: trunk/H/Z/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: trunk/H/Z/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+

Modified: subversion/branches/performance/subversion/tests/cmdline/svntest/actions.py
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/svntest/actions.py?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/cmdline/svntest/actions.py (original)
+++ subversion/branches/performance/subversion/tests/cmdline/svntest/actions.py Fri Nov 26 18:55:51 2010
@@ -356,6 +356,13 @@ def load_repo(sbox, dumpfile_path = None
 
   return dump_str
 
+def expected_noop_update_output(rev):
+  """Return an ExpectedOutput object describing what we'd expect to
+  see from an update to revision REV that was effectively a no-op (no
+  server changes transmitted)."""
+  return verify.createExpectedOutput("Updating '.*' ...|At revision %d."
+                                     % (rev),
+                                     "no-op update")
 
 ######################################################################
 # Subversion Actions

Modified: subversion/branches/performance/subversion/tests/cmdline/svntest/main.py
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/svntest/main.py?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/cmdline/svntest/main.py (original)
+++ subversion/branches/performance/subversion/tests/cmdline/svntest/main.py Fri Nov 26 18:55:51 2010
@@ -1351,6 +1351,7 @@ def create_default_options():
 def _create_parser():
   """Return a parser for our test suite."""
   # set up the parser
+  _default_http_library = 'serf'
   usage = 'usage: %prog [options] [<test> ...]'
   parser = optparse.OptionParser(usage=usage)
   parser.add_option('-l', '--list', action='store_true', dest='list_tests',
@@ -1381,7 +1382,7 @@ def _create_parser():
   parser.add_option('--http-library', action='store',
                     help="Make svn use this DAV library (neon or serf) if " +
                          "it supports both, else assume it's using this " +
-                         "one; the default is neon")
+                         "one; the default is " + _default_http_library)
   parser.add_option('--server-minor-version', type='int', action='store',
                     help="Set the minor version for the server ('4', " +
                          "'5', or '6').")
@@ -1406,7 +1407,7 @@ def _create_parser():
   parser.set_defaults(
         server_minor_version=7,
         url=file_scheme_prefix + pathname2url(os.path.abspath(os.getcwd())),
-        http_library='serf')
+        http_library=_default_http_library)
 
   return parser
 

Modified: subversion/branches/performance/subversion/tests/cmdline/update_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/update_tests.py?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/cmdline/update_tests.py (original)
+++ subversion/branches/performance/subversion/tests/cmdline/update_tests.py Fri Nov 26 18:55:51 2010
@@ -39,6 +39,7 @@ Skip = svntest.testcase.Skip
 SkipUnless = svntest.testcase.SkipUnless
 XFail = svntest.testcase.XFail
 Item = svntest.wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
 
 from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo
 
@@ -1168,11 +1169,13 @@ def another_hudson_problem(sbox):
   # as 'deleted' and should not alter gamma's entry.
 
   if not svntest.main.wc_is_singledb(wc_dir):
-    expected_output = ['D    '+G_path+'\n',
+    expected_output = ["Updating '%s' ...\n" % (G_path),
+                       'D    '+G_path+'\n',
                        'Updated to revision 3.\n',
                        ]
   else:
-    expected_output = ['Restored \'' + G_path + '\'\n',
+    expected_output = ["Updating '%s' ...\n" % (G_path),
+                       'Restored \'' + G_path + '\'\n',
                        'Restored \'' + G_path + os.path.sep + 'pi\'\n',
                        'Restored \'' + G_path + os.path.sep + 'rho\'\n',
                        'Restored \'' + G_path + os.path.sep + 'tau\'\n',
@@ -1226,9 +1229,9 @@ def update_deleted_targets(sbox):
                                         None, wc_dir)
 
   # Explicit update must not remove the 'deleted=true' entries
-  svntest.actions.run_and_verify_svn(None, ['At revision 2.\n'], [],
+  svntest.actions.run_and_verify_svn(None, exp_noop_up_out(2), [],
                                      'update', gamma_path)
-  svntest.actions.run_and_verify_svn(None, ['At revision 2.\n'], [],
+  svntest.actions.run_and_verify_svn(None, exp_noop_up_out(2), [],
                                      'update', F_path)
 
   # Update to r1 to restore items, since the parent directory is already
@@ -3350,7 +3353,7 @@ def mergeinfo_update_elision(sbox):
                                         expected_status, None, wc_dir)
 
   # Update A to get all paths to the same working revision.
-  svntest.actions.run_and_verify_svn(None, ["At revision 7.\n"], [],
+  svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [],
                                      'up', wc_dir)
 
   # Merge r6:7 into A/B_COPY/E
@@ -3394,7 +3397,7 @@ def mergeinfo_update_elision(sbox):
 
   # r8 - Commit the merge
   svntest.actions.run_and_verify_svn(None,
-                                     ["At revision 7.\n"],
+                                     exp_noop_up_out(7),
                                      [], 'update', wc_dir)
 
   expected_output = wc.State(wc_dir,
@@ -3792,7 +3795,8 @@ def update_accept_conflicts(sbox):
   # Just leave the conflicts alone, since run_and_verify_svn already uses
   # the --non-interactive option.
   svntest.actions.run_and_verify_svn(None,
-                                     ['C    %s\n' % (iota_path_backup,),
+                                     ["Updating '%s' ...\n" % (iota_path_backup),
+                                      'C    %s\n' % (iota_path_backup,),
                                       'Updated to revision 2.\n',
                                       'Summary of conflicts:\n',
                                       '  Text conflicts: 1\n'],
@@ -3802,7 +3806,8 @@ def update_accept_conflicts(sbox):
   # lambda: --accept=postpone
   # Just leave the conflicts alone.
   svntest.actions.run_and_verify_svn(None,
-                                     ['C    %s\n' % (lambda_path_backup,),
+                                     ["Updating '%s' ...\n" % (lambda_path_backup),
+                                      'C    %s\n' % (lambda_path_backup,),
                                       'Updated to revision 2.\n',
                                       'Summary of conflicts:\n',
                                       '  Text conflicts: 1\n'],
@@ -3813,7 +3818,8 @@ def update_accept_conflicts(sbox):
   # mu: --accept=base
   # Accept the pre-update base file.
   svntest.actions.run_and_verify_svn(None,
-                                     ['G    %s\n' % (mu_path_backup,),
+                                     ["Updating '%s' ...\n" % (mu_path_backup),
+                                      'G    %s\n' % (mu_path_backup,),
                                       'Updated to revision 2.\n'],
                                      [],
                                      'update', '--accept=base',
@@ -3822,7 +3828,8 @@ def update_accept_conflicts(sbox):
   # alpha: --accept=mine
   # Accept the user's working file.
   svntest.actions.run_and_verify_svn(None,
-                                     ['G    %s\n' % (alpha_path_backup,),
+                                     ["Updating '%s' ...\n" % (alpha_path_backup),
+                                      'G    %s\n' % (alpha_path_backup,),
                                       'Updated to revision 2.\n'],
                                      [],
                                      'update', '--accept=mine-full',
@@ -3831,7 +3838,8 @@ def update_accept_conflicts(sbox):
   # beta: --accept=theirs
   # Accept their file.
   svntest.actions.run_and_verify_svn(None,
-                                     ['G    %s\n' % (beta_path_backup,),
+                                     ["Updating '%s' ...\n" % (beta_path_backup),
+                                      'G    %s\n' % (beta_path_backup,),
                                       'Updated to revision 2.\n'],
                                      [],
                                      'update', '--accept=theirs-full',
@@ -3842,7 +3850,8 @@ def update_accept_conflicts(sbox):
   # conflicts in place, so expect a message on stderr, but expect
   # svn to exit with an exit code of 0.
   svntest.actions.run_and_verify_svn2(None,
-                                      ['G    %s\n' % (pi_path_backup,),
+                                      ["Updating '%s' ...\n" % (pi_path_backup),
+                                       'G    %s\n' % (pi_path_backup,),
                                        'Updated to revision 2.\n'],
                                       "system(.*) returned.*", 0,
                                       'update', '--accept=edit',
@@ -3851,7 +3860,8 @@ def update_accept_conflicts(sbox):
   # rho: --accept=launch
   # Run the external merge tool, it should leave conflict markers in place.
   svntest.actions.run_and_verify_svn(None,
-                                     ['C    %s\n' % (rho_path_backup,),
+                                     ["Updating '%s' ...\n" % (rho_path_backup),
+                                      'C    %s\n' % (rho_path_backup,),
                                       'Updated to revision 2.\n',
                                       'Summary of conflicts:\n',
                                       '  Text conflicts: 1\n'],

Modified: subversion/branches/performance/subversion/tests/cmdline/upgrade_tests.py
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/upgrade_tests.py?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/cmdline/upgrade_tests.py (original)
+++ subversion/branches/performance/subversion/tests/cmdline/upgrade_tests.py Fri Nov 26 18:55:51 2010
@@ -698,17 +698,17 @@ def delete_in_copy_upgrade(sbox):
   replace_sbox_with_tarfile(sbox, 'delete-in-copy.tar.bz2')
 
   # Doesn't work, creates spurious base nodes for the copy
-  svntest.actions.run_and_verify_svn(None, expected_output, [],
+  svntest.actions.run_and_verify_svn(None, None, [],
                                      'upgrade', sbox.wc_dir)
 
   expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
   expected_status.add({
-      'A/B-copied'         : Item(status='A  ', copied='+', wc_rev='-'),
-      'A/B-copied/lambda'  : Item(status='   ', copied='+', wc_rev='-'),
-      'A/B-copied/E'       : Item(status='D  ', wc_rev='-'),
-      'A/B-copied/E/alpha' : Item(status='D  ', wc_rev='-'),
-      'A/B-copied/E/beta'  : Item(status='D  ', wc_rev='-'),
-      'A/B-copied/F'       : Item(status='   ', copied='+', wc_rev='-'),
+      'A/B-copied'         : Item(status='A ', copied='+', wc_rev='-'),
+      'A/B-copied/lambda'  : Item(status='  ', copied='+', wc_rev='-'),
+      'A/B-copied/E'       : Item(status='D ', wc_rev='?'),
+      'A/B-copied/E/alpha' : Item(status='D ', wc_rev='?'),
+      'A/B-copied/E/beta'  : Item(status='D ', wc_rev='?'),
+      'A/B-copied/F'       : Item(status='  ', copied='+', wc_rev='-'),
       })
   run_and_verify_status_no_server(sbox.wc_dir, expected_status)
 
@@ -728,14 +728,14 @@ test_list = [ None,
               # Upgrading from 1.4.0-1.4.5 with specific states fails
               # See issue #2530
               XFail(x3_1_4_0),
-              x3_1_4_6,
-              x3_1_6_12,
+              XFail(x3_1_4_6),
+              XFail(x3_1_6_12),
               missing_dirs,
               missing_dirs2,
               XFail(delete_and_keep_local),
               dirs_only_upgrade,
               upgrade_tree_conflict_data,
-              XFail(delete_in_copy_upgrade),
+              delete_in_copy_upgrade,
              ]
 
 

Modified: subversion/branches/performance/subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
Binary files - no diff available.

Modified: subversion/branches/performance/subversion/tests/libsvn_subr/dirent_uri-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/libsvn_subr/dirent_uri-test.c?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/libsvn_subr/dirent_uri-test.c (original)
+++ subversion/branches/performance/subversion/tests/libsvn_subr/dirent_uri-test.c Fri Nov 26 18:55:51 2010
@@ -1514,7 +1514,6 @@ test_relpath_is_ancestor(apr_pool_t *poo
   } tests[] = {
     { "foo",            "foo/bar",        TRUE},
     { "food/bar",       "foo/bar",        FALSE},
-    { "/",               "/foo",          TRUE},
     { "",                "foo",           TRUE},
     { "",                ".bar",          TRUE},
     { "foo/bar",         "foo",           FALSE},
@@ -1640,7 +1639,7 @@ test_dirent_skip_ancestor(apr_pool_t *po
       if (strcmp(tests[i].result, retval))
         return svn_error_createf(
              SVN_ERR_TEST_FAILED, NULL,
-             "test_dirent_skip_ancestor (%s, %s) returned %s instead of %s",
+             "svn_dirent_skip_ancestor (%s, %s) returned %s instead of %s",
              tests[i].path1, tests[i].path2, retval, tests[i].result);
     }
   return SVN_NO_ERROR;
@@ -1846,8 +1845,8 @@ test_relpath_get_longest_ancestor(apr_po
     {
       const char *retval;
 
-      retval = svn_uri_get_longest_ancestor(tests[i].path1, tests[i].path2,
-                                             pool);
+      retval = svn_relpath_get_longest_ancestor(tests[i].path1, tests[i].path2,
+                                                pool);
 
       if (strcmp(tests[i].result, retval))
         return svn_error_createf
@@ -2859,6 +2858,198 @@ test_dirent_is_under_root(apr_pool_t *po
   return SVN_NO_ERROR;
 }
 
+static svn_error_t *
+test_fspath_is_canonical(apr_pool_t *pool)
+{
+  struct {
+    const char *path;
+    svn_boolean_t canonical;
+  } tests[] = {
+    { "",                      FALSE },
+    { ".",                     FALSE },
+    { "/",                     TRUE },
+    { "/a",                    TRUE },
+    { "/a/",                   FALSE },
+    { "//a",                   FALSE },
+    { "/a/b",                  TRUE },
+    { "/a//b",                 FALSE },
+    { "\\",                    FALSE },
+    { "\\a",                   FALSE },
+    { "/\\a",                  TRUE },  /* a single component */
+    { "/a\\",                  TRUE },  /* a single component */
+    { "/a\\b",                 TRUE },  /* a single component */
+  };
+  int i;
+
+  for (i = 0; i < COUNT_OF(tests); i++)
+    {
+      svn_boolean_t canonical
+        = svn_fspath__is_canonical(tests[i].path);
+
+      if (tests[i].canonical != canonical)
+        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+                                 "svn_fspath__is_canonical(\"%s\") returned "
+                                 "\"%s\" expected \"%s\"",
+                                 tests[i].path,
+                                 canonical ? "TRUE" : "FALSE",
+                                 tests[i].canonical ? "TRUE" : "FALSE");
+    }
+
+  return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_join(apr_pool_t *pool)
+{
+  int i;
+
+  static const char * const joins[][3] = {
+    { "/",    "",     "/" },
+    { "/",    "d",    "/d" },
+    { "/",    "d/e",  "/d/e" },
+    { "/abc", "",     "/abc" },
+    { "/abc", "d",    "/abc/d" },
+    { "/abc", "d/e",  "/abc/d/e" },
+  };
+
+  for (i = 0; i < COUNT_OF(joins); i++ )
+    {
+      char *result = svn_fspath__join(joins[i][0], joins[i][1], pool);
+
+      SVN_TEST_STRING_ASSERT(result, joins[i][2]);
+    }
+
+  return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_is_child(apr_pool_t *pool)
+{
+  int i, j;
+
+  static const char * const paths[] = {
+    "/",
+    "/f",
+    "/foo",
+    "/foo/bar",
+    "/foo/bars",
+    "/foo/bar/baz",
+    };
+
+  static const char * const
+    remainders[COUNT_OF(paths)][COUNT_OF(paths)] = {
+    { 0,  "f",  "foo",  "foo/bar",  "foo/bars", "foo/bar/baz" },
+    { 0,  0,    0,      0,          0,          0             },
+    { 0,  0,    0,      "bar",      "bars",     "bar/baz"     },
+    { 0,  0,    0,      0,          0,          "baz"         },
+    { 0,  0,    0,      0,          0,          0             },
+    { 0,  0,    0,      0,          0,          0             },
+  };
+
+  for (i = 0; i < COUNT_OF(paths); i++)
+    {
+      for (j = 0; j < COUNT_OF(paths); j++)
+        {
+          const char *remainder
+            = svn_fspath__is_child(paths[i], paths[j], pool);
+
+          SVN_TEST_STRING_ASSERT(remainder, remainders[i][j]);
+        }
+    }
+
+  return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_dirname_basename_split(apr_pool_t *pool)
+{
+  int i;
+
+  static const struct {
+    const char *path;
+    const char *dirname;
+    const char *basename;
+  } tests[] = {
+    { "/", "/", "" },
+    { "/a", "/", "a" },
+    { "/abc", "/", "abc" },
+    { "/x/abc", "/x", "abc" },
+    { "/x/y/abc", "/x/y", "abc" },
+  };
+
+  for (i = 0; i < COUNT_OF(tests); i++)
+    {
+      const char *result_dirname, *result_basename;
+
+      result_dirname = svn_fspath__dirname(tests[i].path, pool);
+      SVN_TEST_STRING_ASSERT(result_dirname, tests[i].dirname);
+
+      result_basename = svn_fspath__basename(tests[i].path, pool);
+      SVN_TEST_STRING_ASSERT(result_basename, tests[i].basename);
+
+      svn_fspath__split(&result_dirname, &result_basename, tests[i].path,
+                        pool);
+      SVN_TEST_STRING_ASSERT(result_dirname, tests[i].dirname);
+      SVN_TEST_STRING_ASSERT(result_basename, tests[i].basename);
+    }
+
+  return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_get_longest_ancestor(apr_pool_t *pool)
+{
+  int i;
+
+  /* Paths to test and their expected results.  Same as in
+   * test_relpath_get_longest_ancestor() but with '/' prefix. */
+  struct {
+    const char *path1;
+    const char *path2;
+    const char *result;
+  } tests[] = {
+    { "/foo",            "/foo/bar",         "/foo" },
+    { "/foo/bar",        "/foo/bar",         "/foo/bar" },
+    { "/",               "/foo",             "/" },
+    { "/",               "/foo",             "/" },
+    { "/",               "/.bar",            "/" },
+    { "/.bar",           "/",                "/" },
+    { "/foo/bar",        "/foo",             "/foo" },
+    { "/foo/bar",        "/foo",             "/foo" },
+    { "/rif",            "/raf",             "/" },
+    { "/foo",            "/bar",             "/" },
+    { "/foo",            "/foo/bar",         "/foo" },
+    { "/foo.",           "/foo./.bar",       "/foo." },
+    { "/",               "/",                "/" },
+    { "/http:/test",     "/http:/test",      "/http:/test" },
+    { "/http:/test",     "/http:/taste",     "/http:" },
+    { "/http:/test",     "/http:/test/foo",  "/http:/test" },
+    { "/http:/test",     "/file:/test/foo",  "/" },
+    { "/http:/test",     "/http:/testF",     "/http:" },
+    { "/file:/A/C",      "/file:/B/D",       "/file:" },
+    { "/file:/A/C",      "/file:/A/D",       "/file:/A" },
+    { "/X:/foo",         "/X:",              "/X:" },
+    { "/X:/folder1",     "/X:/folder2",      "/X:" },
+    { "/X:",             "/X:foo",           "/" },
+    { "/X:foo",          "/X:bar",           "/" },
+  };
+
+  for (i = 0; i < COUNT_OF(tests); i++)
+    {
+      const char *result;
+
+      result = svn_fspath__get_longest_ancestor(tests[i].path1, tests[i].path2,
+                                                pool);
+      SVN_TEST_STRING_ASSERT(tests[i].result, result);
+
+      /* changing the order of the paths should return the same result */
+      result = svn_fspath__get_longest_ancestor(tests[i].path2, tests[i].path1,
+                                                pool);
+      SVN_TEST_STRING_ASSERT(tests[i].result, result);
+    }
+  return SVN_NO_ERROR;
+}
+
 
 /* The test table.  */
 
@@ -2906,9 +3097,9 @@ struct svn_test_descriptor_t test_funcs[
     SVN_TEST_PASS2(test_dirent_split,
                    "test svn_dirent_split"),
     SVN_TEST_PASS2(test_relpath_split,
-                   "test test_relpath_split"),
+                   "test svn_relpath_split"),
     SVN_TEST_PASS2(test_uri_split,
-                   "test test_uri_split"),
+                   "test svn_uri_split"),
     SVN_TEST_PASS2(test_dirent_get_longest_ancestor,
                    "test svn_dirent_get_longest_ancestor"),
     SVN_TEST_PASS2(test_relpath_get_longest_ancestor,
@@ -2928,11 +3119,11 @@ struct svn_test_descriptor_t test_funcs[
     SVN_TEST_PASS2(test_uri_is_ancestor,
                    "test svn_uri_is_ancestor"),
     SVN_TEST_PASS2(test_dirent_skip_ancestor,
-                   "test test_dirent_skip_ancestor"),
+                   "test svn_dirent_skip_ancestor"),
     SVN_TEST_PASS2(test_relpath_skip_ancestor,
-                   "test test_relpath_skip_ancestor"),
+                   "test svn_relpath_skip_ancestor"),
     SVN_TEST_PASS2(test_uri_skip_ancestor,
-                   "test test_uri_skip_ancestor"),
+                   "test svn_uri_skip_ancestor"),
     SVN_TEST_PASS2(test_dirent_get_absolute,
                    "test svn_dirent_get_absolute"),
 #ifdef WIN32
@@ -2959,5 +3150,15 @@ struct svn_test_descriptor_t test_funcs[
                    "test svn_uri_get_file_url_from_dirent"),
     SVN_TEST_PASS2(test_dirent_is_under_root,
                    "test svn_dirent_is_under_root"),
+    SVN_TEST_PASS2(test_fspath_is_canonical,
+                   "test svn_fspath__is_canonical"),
+    SVN_TEST_PASS2(test_fspath_join,
+                   "test svn_fspath__join"),
+    SVN_TEST_PASS2(test_fspath_is_child,
+                   "test svn_fspath__is_child"),
+    SVN_TEST_PASS2(test_fspath_dirname_basename_split,
+                   "test svn_fspath__dirname/basename/split"),
+    SVN_TEST_PASS2(test_fspath_get_longest_ancestor,
+                   "test svn_fspath__get_longest_ancestor"),
     SVN_TEST_NULL
   };

Modified: subversion/branches/performance/subversion/tests/libsvn_wc/op-depth-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/performance/subversion/tests/libsvn_wc/op-depth-test.c?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/subversion/tests/libsvn_wc/op-depth-test.c (original)
+++ subversion/branches/performance/subversion/tests/libsvn_wc/op-depth-test.c Fri Nov 26 18:55:51 2010
@@ -21,6 +21,9 @@
  * ====================================================================
  */
 
+/* To avoid warnings... */
+#define SVN_DEPRECATED
+
 #include <apr_pools.h>
 #include <apr_general.h>
 
@@ -231,6 +234,19 @@ wc_resolved(wc_baton_t *b, const char *p
   return svn_client_resolved(wc_path(b, path), TRUE, ctx, b->pool);
 }
 
+static svn_error_t *
+wc_move(wc_baton_t *b, const char *src, const char *dst)
+{
+  svn_client_ctx_t *ctx;
+  apr_array_header_t *paths = apr_array_make(b->pool, 1,
+                                             sizeof(const char *));
+
+  SVN_ERR(svn_client_create_context(&ctx, b->pool));
+  APR_ARRAY_PUSH(paths, const char *) = wc_path(b, src);
+  return svn_client_move6(paths, wc_path(b, dst),
+                          FALSE, FALSE, NULL, NULL, NULL, ctx, b->pool);
+}
+
 /* Create the Greek tree on disk in the WC, and commit it. */
 static svn_error_t *
 add_and_commit_greek_tree(wc_baton_t *b)
@@ -322,12 +338,6 @@ typedef struct {
  * Append an error message to BATON->errors if they differ or are not both
  * present.
  *
- * If the FOUND row has field values that should have been elided
- * (because they match the parent row), then do so now.  We want to ignore
- * any such lack of elision, for the purposes of these tests, because the
- * method of copying in use (at the time this tweak is introduced) does
- * calculate these values itself, it simply copies from the source rows.
- *
  * Implements svn_hash_diff_func_t. */
 static svn_error_t *
 compare_nodes_rows(const void *key, apr_ssize_t klen,
@@ -337,37 +347,6 @@ compare_nodes_rows(const void *key, apr_
   comparison_baton_t *b = baton;
   nodes_row_t *expected = apr_hash_get(b->expected_hash, key, klen);
   nodes_row_t *found = apr_hash_get(b->found_hash, key, klen);
-  nodes_row_t elided;
-
-  /* If the FOUND row has field values that should have been elided
-   * (because they match the parent row), then do so now. */
-  if (found && found->op_depth > 0 && found->repo_relpath)
-    {
-      const char *parent_relpath, *name, *parent_key;
-      nodes_row_t *parent_found;
-
-      svn_relpath_split(&parent_relpath, &name, found->local_relpath,
-                        b->scratch_pool);
-      parent_key = apr_psprintf(b->scratch_pool, "%d %s",
-                                found->op_depth, parent_relpath);
-      parent_found = apr_hash_get(b->found_hash, parent_key,
-                                  APR_HASH_KEY_STRING);
-      if (parent_found && parent_found->op_depth > 0
-          && parent_found->repo_relpath
-          && found->op_depth == parent_found->op_depth
-          && found->repo_revnum == parent_found->repo_revnum
-          && strcmp(found->repo_relpath,
-                    svn_relpath_join(parent_found->repo_relpath, name,
-                                     b->scratch_pool)) == 0)
-        {
-          /* Iterating in hash order, which is arbitrary, so only make
-             changes in a local copy */
-          elided = *found;
-          found = &elided;
-          found->repo_relpath = NULL;
-          found->repo_revnum = SVN_INVALID_REVNUM;
-        }
-    }
 
   if (! expected)
     {
@@ -523,8 +502,8 @@ wc_wc_copies(wc_baton_t *b)
         /* base dir */
         { source_base_dir, "A/C/copy2", {
             { 3, "",                "normal",   1, source_base_dir },
-            { 3, "alpha",           "normal",   NO_COPY_FROM },
-            { 3, "beta",            "normal",   NO_COPY_FROM }
+            { 3, "alpha",           "normal",   1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",   1, "A/B/E/beta" }
           } },
 
         /* added file */
@@ -546,18 +525,18 @@ wc_wc_copies(wc_baton_t *b)
         /* copied dir */
         { source_copied_dir, "A/C/copy6", {
             { 3, "",                "normal",   1, source_base_dir },
-            { 3, "alpha",           "normal",   NO_COPY_FROM },
-            { 3, "beta",            "normal",   NO_COPY_FROM }
+            { 3, "alpha",           "normal",   1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",   1, "A/B/E/beta" }
           } },
 
         /* copied tree with everything in it */
         { source_everything, "A/C/copy7", {
             { 3, "",                "normal",   1, source_everything },
-            { 3, "lambda",          "normal",   NO_COPY_FROM },
-            { 3, "E",               "normal",   NO_COPY_FROM },
-            { 3, "E/alpha",         "normal",   NO_COPY_FROM },
-            { 3, "E/beta",          "normal",   NO_COPY_FROM },
-            { 3, "F",               "normal",   NO_COPY_FROM },
+            { 3, "lambda",          "normal",   1, "A/B/lambda" },
+            { 3, "E",               "normal",   1, "A/B/E" },
+            { 3, "E/alpha",         "normal",   1, "A/B/E/alpha" },
+            { 3, "E/beta",          "normal",   1, "A/B/E/beta" },
+            { 3, "F",               "normal",   1, "A/B/F" },
             /* Each add is an op_root */
             { 4, "file-added",      "normal",   NO_COPY_FROM },
             { 4, "D-added",         "normal",   NO_COPY_FROM },
@@ -565,16 +544,16 @@ wc_wc_copies(wc_baton_t *b)
             /* Each copied-copy subtree is an op_root */
             { 4, "lambda-copied",   "normal",   1, source_base_file },
             { 4, "E-copied",        "normal",   1, source_base_dir },
-            { 4, "E-copied/alpha",  "normal",   NO_COPY_FROM },
-            { 4, "E-copied/beta",   "normal",   NO_COPY_FROM }
+            { 4, "E-copied/alpha",  "normal",   1, "A/B/E/alpha" },
+            { 4, "E-copied/beta",   "normal",   1, "A/B/E/beta" }
           } },
 
         /* dir onto a schedule-delete file */
         { source_base_dir, "A/D/gamma", {
             { 0, "",                "normal",   1, "A/D/gamma" },
             { 3, "",                "normal",   1, source_base_dir },
-            { 3, "alpha",           "normal",   NO_COPY_FROM },
-            { 3, "beta",            "normal",   NO_COPY_FROM }
+            { 3, "alpha",           "normal",   1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",   1, "A/B/E/beta" }
           } },
 
         /* file onto a schedule-delete dir */
@@ -645,8 +624,8 @@ repo_wc_copies(wc_baton_t *b)
         /* dir onto nothing */
         { "A/B/E", "A/C/copy2", {
             { 3, "",                "normal",       1, "A/B/E" },
-            { 3, "alpha",           "normal",       NO_COPY_FROM },
-            { 3, "beta",            "normal",       NO_COPY_FROM },
+            { 3, "alpha",           "normal",       1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",       1, "A/B/E/beta" },
           } },
 
         /* file onto a schedule-delete file */
@@ -665,16 +644,16 @@ repo_wc_copies(wc_baton_t *b)
             { 3, "pi",              "base-deleted", NO_COPY_FROM },
             { 3, "rho",             "base-deleted", NO_COPY_FROM },
             { 3, "tau",             "base-deleted", NO_COPY_FROM },
-            { 3, "alpha",           "normal",       NO_COPY_FROM },
-            { 3, "beta",            "normal",       NO_COPY_FROM },
+            { 3, "alpha",           "normal",       1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",       1, "A/B/E/beta" },
           } },
 
         /* dir onto a schedule-delete file */
         { "A/B/E", "A/D/gamma", {
             { 0, "",                "normal",       1, "A/D/gamma" },
             { 3, "",                "normal",       1, "A/B/E" },
-            { 3, "alpha",           "normal",       NO_COPY_FROM },
-            { 3, "beta",            "normal",       NO_COPY_FROM },
+            { 3, "alpha",           "normal",       1, "A/B/E/alpha" },
+            { 3, "beta",            "normal",       1, "A/B/E/beta" },
           } },
 
         /* file onto a schedule-delete dir */
@@ -960,8 +939,8 @@ test_delete_of_copies(const svn_test_opt
   {
     nodes_row_t rows[] = {
       { 2, "A/B-copied/E",       "not-present",       1, "A/B/E" },
-      { 2, "A/B-copied/E/alpha", "not-present",       NO_COPY_FROM},
-      { 2, "A/B-copied/E/beta",  "not-present",       NO_COPY_FROM},
+      { 2, "A/B-copied/E/alpha", "not-present",       1, "A/B/E/alpha" },
+      { 2, "A/B-copied/E/beta",  "not-present",       1, "A/B/E/beta" },
       { 0 }
     };
     SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
@@ -971,12 +950,12 @@ test_delete_of_copies(const svn_test_opt
   {
     nodes_row_t rows[] = {
       { 2, "A/B-copied/E",       "not-present",       1, "A/B/E" },
-      { 2, "A/B-copied/E/alpha", "not-present",       NO_COPY_FROM},
-      { 2, "A/B-copied/E/beta",  "not-present",       NO_COPY_FROM},
+      { 2, "A/B-copied/E/alpha", "not-present",       1, "A/B/E/alpha" },
+      { 2, "A/B-copied/E/beta",  "not-present",       1, "A/B/E/beta" },
       { 3, "A/B-copied/E",       "normal",            1, "A/D/G" },
-      { 3, "A/B-copied/E/pi",    "normal",            NO_COPY_FROM},
-      { 3, "A/B-copied/E/rho",   "normal",            NO_COPY_FROM},
-      { 3, "A/B-copied/E/tau",   "normal",            NO_COPY_FROM},
+      { 3, "A/B-copied/E/pi",    "normal",            1, "A/D/G/pi" },
+      { 3, "A/B-copied/E/rho",   "normal",            1, "A/D/G/rho" },
+      { 3, "A/B-copied/E/tau",   "normal",            1, "A/D/G/tau" },
       { 0 }
     };
     SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
@@ -987,12 +966,12 @@ test_delete_of_copies(const svn_test_opt
   {
     nodes_row_t rows[] = {
       { 2, "A/B-copied/E",       "not-present",       1, "A/B/E" },
-      { 2, "A/B-copied/E/alpha", "not-present",       NO_COPY_FROM},
-      { 2, "A/B-copied/E/beta",  "not-present",       NO_COPY_FROM},
+      { 2, "A/B-copied/E/alpha", "not-present",       1, "A/B/E/alpha" },
+      { 2, "A/B-copied/E/beta",  "not-present",       1, "A/B/E/beta" },
       { 3, "A/B-copied/E",       "normal",            1, "A/D/G" },
-      { 3, "A/B-copied/E/pi",    "normal",            NO_COPY_FROM},
-      { 3, "A/B-copied/E/rho",   "not-present",       NO_COPY_FROM},
-      { 3, "A/B-copied/E/tau",   "normal",            NO_COPY_FROM},
+      { 3, "A/B-copied/E/pi",    "normal",            1, "A/D/G/pi" },
+      { 3, "A/B-copied/E/rho",   "not-present",       1, "A/D/G/rho" },
+      { 3, "A/B-copied/E/tau",   "normal",            1, "A/D/G/tau" },
       { 0 }
     };
     SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
@@ -1003,8 +982,8 @@ test_delete_of_copies(const svn_test_opt
   {
     nodes_row_t rows[] = {
       { 2, "A/B-copied/E",       "not-present",       1, "A/B/E" },
-      { 2, "A/B-copied/E/alpha", "not-present",       NO_COPY_FROM},
-      { 2, "A/B-copied/E/beta",  "not-present",       NO_COPY_FROM},
+      { 2, "A/B-copied/E/alpha", "not-present",       1, "A/B/E/alpha" },
+      { 2, "A/B-copied/E/beta",  "not-present",       1, "A/B/E/beta" },
       { 0 }
     };
     SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
@@ -1227,7 +1206,7 @@ insert_dirs(wc_baton_t *b,
   return SVN_NO_ERROR;
 }
 
-static int count_rows(nodes_row_t *rows)
+static apr_int64_t count_rows(nodes_row_t *rows)
 {
   nodes_row_t *first = rows;
   while(rows->local_relpath)
@@ -1245,7 +1224,8 @@ base_dir_insert_remove(wc_baton_t *b,
   nodes_row_t *after;
   const char *dir_abspath = svn_path_join(b->wc_abspath, local_relpath,
                                           b->pool);
-  int i, num_before = count_rows(before), num_added = count_rows(added);
+  int i;
+  apr_int64_t num_before = count_rows(before), num_added = count_rows(added);
 
   SVN_ERR(insert_dirs(b, before));
 
@@ -1598,7 +1578,7 @@ test_temp_op_make_copy(const svn_test_op
 
   b.pool = pool;
   SVN_ERR(svn_test__create_repos_and_wc(&b.repos_url, &b.wc_abspath,
-                                        "base_dir_insert_remove", opts, pool));
+                                        "temp_op_make_copy", opts, pool));
   SVN_ERR(svn_wc_context_create(&b.wc_ctx, NULL, pool, pool));
 
   {
@@ -1628,9 +1608,9 @@ test_temp_op_make_copy(const svn_test_op
       { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
       { 3, "A/B/C", "normal",       NO_COPY_FROM },
       { 2, "A/F",   "normal",       1, "S2" },
-      { 2, "A/F/G", "normal",       NO_COPY_FROM },
-      { 2, "A/F/H", "not-present",  NO_COPY_FROM },
-      { 2, "A/F/E", "base-deleted", NO_COPY_FROM },
+      { 2, "A/F/G", "normal",       1, "S2/G" },
+      { 2, "A/F/H", "not-present",  1, "S2/H" },
+      { 2, "A/F/E", "base-deleted", 2, "A/F/E" },
       { 0 }
     };
     /*  /           norm        -
@@ -1656,19 +1636,19 @@ test_temp_op_make_copy(const svn_test_op
       { 0, "A/X",   "normal",       2, "A/X" },
       { 0, "A/X/Y", "incomplete",   2, "A/X/Y" },
       { 1, "A",     "normal",       2, "A" },
-      { 1, "A/B",   "base-deleted", NO_COPY_FROM },
-      { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
-      { 1, "A/F",   "base-deleted", NO_COPY_FROM },
-      { 1, "A/F/G", "base-deleted", NO_COPY_FROM },
-      { 1, "A/F/H", "base-deleted", NO_COPY_FROM },
-      { 1, "A/F/E", "base-deleted", NO_COPY_FROM },
-      { 1, "A/X",   "normal",       NO_COPY_FROM },
-      { 1, "A/X/Y", "incomplete",   NO_COPY_FROM },
+      { 1, "A/B",   "base-deleted", 2, "A/B" },
+      { 1, "A/B/C", "base-deleted", 2, "A/B/C" },
+      { 1, "A/F",   "base-deleted", 2, "A/F" },
+      { 1, "A/F/G", "base-deleted", 2, "A/F/G" },
+      { 1, "A/F/H", "base-deleted", 2, "A/F/H" },
+      { 1, "A/F/E", "base-deleted", 2, "A/F/E" },
+      { 1, "A/X",   "normal",       2, "A/X" },
+      { 1, "A/X/Y", "incomplete",   2, "A/X/Y" },
       { 2, "A/B",   "normal",       NO_COPY_FROM },
       { 3, "A/B/C", "normal",       NO_COPY_FROM },
       { 2, "A/F",   "normal",       1, "S2" },
-      { 2, "A/F/G", "normal",       NO_COPY_FROM },
-      { 2, "A/F/H", "not-present",  NO_COPY_FROM },
+      { 2, "A/F/G", "normal",       1, "S2/G" },
+      { 2, "A/F/H", "not-present",  1, "S2/H" },
       { 0 }
     };
 
@@ -1678,6 +1658,55 @@ test_temp_op_make_copy(const svn_test_op
   return SVN_NO_ERROR;
 }
 
+static svn_error_t *
+test_wc_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+  wc_baton_t b;
+
+  b.pool = pool;
+  SVN_ERR(svn_test__create_repos_and_wc(&b.repos_url, &b.wc_abspath,
+                                        "wc_move", opts, pool));
+  SVN_ERR(svn_wc_context_create(&b.wc_ctx, NULL, pool, pool));
+  SVN_ERR(wc_mkdir(&b, "A"));
+  SVN_ERR(wc_mkdir(&b, "A/B"));
+  SVN_ERR(wc_mkdir(&b, "A/B/C"));
+  SVN_ERR(wc_commit(&b, ""));
+  SVN_ERR(wc_update(&b, "", 1));
+
+  SVN_ERR(wc_move(&b, "A/B/C", "A/B/C-move"));
+  {
+    nodes_row_t rows[] = {
+      { 0, "",           "normal",       1, "" },
+      { 0, "A",          "normal",       1, "A" },
+      { 0, "A/B",        "normal",       1, "A/B" },
+      { 0, "A/B/C",      "normal",       1, "A/B/C" },
+      { 3, "A/B/C",      "base-deleted", NO_COPY_FROM },
+      { 3, "A/B/C-move", "normal",       1, "A/B/C" },
+      { 0 }
+    };
+    SVN_ERR(check_db_rows(&b, "", rows));
+  }
+
+  SVN_ERR(wc_move(&b, "A/B", "A/B-move"));
+  {
+    nodes_row_t rows[] = {
+      { 0, "",                "normal",       1, "" },
+      { 0, "A",               "normal",       1, "A" },
+      { 0, "A/B",             "normal",       1, "A/B" },
+      { 0, "A/B/C",           "normal",       1, "A/B/C" },
+      { 2, "A/B",             "base-deleted", NO_COPY_FROM },
+      { 2, "A/B/C",           "base-deleted", NO_COPY_FROM },
+      { 2, "A/B-move",        "normal",       1, "A/B" },
+      { 2, "A/B-move/C",      "not-present",  1, "A/B/C" },
+      { 3, "A/B-move/C-move", "normal",       1, "A/B/C" },
+      { 0 }
+    };
+    SVN_ERR(check_db_rows(&b, "", rows));
+  }
+
+  return SVN_NO_ERROR;
+}
+
 /* ---------------------------------------------------------------------- */
 /* The list of test functions */
 
@@ -1711,11 +1740,13 @@ struct svn_test_descriptor_t test_funcs[
     SVN_TEST_OPTS_WIMP(test_adds_change_kind,
                        "test_adds_change_kind",
                        "needs op_depth"),
-    SVN_TEST_OPTS_WIMP(test_base_dir_insert_remove,
-                       "test_base_dir_insert_remove",
-                       "needs op_depth"),
+    SVN_TEST_OPTS_PASS(test_base_dir_insert_remove,
+                       "test_base_dir_insert_remove"),
     SVN_TEST_OPTS_WIMP(test_temp_op_make_copy,
                        "test_temp_op_make_copy",
                        "needs op_depth"),
+    SVN_TEST_OPTS_WIMP(test_wc_move,
+                       "test_wc_move",
+                       "needs op_depth"),
     SVN_TEST_NULL
   };

Modified: subversion/branches/performance/tools/dev/unix-build/Makefile.svn
URL: http://svn.apache.org/viewvc/subversion/branches/performance/tools/dev/unix-build/Makefile.svn?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/tools/dev/unix-build/Makefile.svn (original)
+++ subversion/branches/performance/tools/dev/unix-build/Makefile.svn Fri Nov 26 18:55:51 2010
@@ -29,8 +29,8 @@ DISTDIR		= $(PWD)/distfiles
 SRCDIR		= $(PWD)/src
 OBJDIR		= $(PWD)/objdir
 
-BDB_MAJOR_VER	= 4.8
-BDB_VER		= $(BDB_MAJOR_VER).30
+BDB_MAJOR_VER	= 4.7
+BDB_VER		= $(BDB_MAJOR_VER).25
 APR_VER		= 1.4.2
 APR_ICONV_VER	= 1.2.1
 GNU_ICONV_VER	= 1.13.1
@@ -38,7 +38,7 @@ APR_UTIL_VER	= 1.3.9
 HTTPD_VER	= 2.2.15
 NEON_VER	= 0.29.3
 SERF_VER	= 0.7.x
-SERF_OLD_VER	= 0.6.x
+SERF_OLD_VER	= 0.3.1
 CYRUS_SASL_VER	= 2.1.23
 SQLITE_VER	= 3.6.23.1
 
@@ -70,7 +70,7 @@ HTTPD_URL	= http://archive.apache.org/di
 NEON_URL	= http://webdav.org/neon/$(NEON_DIST)
 #SERF_URL	= http://serf.googlecode.com/files/$(SERF_DIST)
 SERF_URL	= http://serf.googlecode.com/svn/branches/$(SERF_VER)
-SERF_OLD_URL	= http://serf.googlecode.com/svn/branches/$(SERF_OLD_VER)
+SERF_OLD_URL	= http://serf.googlecode.com/svn/tags/$(SERF_OLD_VER)
 SQLITE_URL	= http://www.sqlite.org/$(SQLITE_DIST)
 CYRUS_SASL_URL	= ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/$(CYRUS_SASL_DIST)
 
@@ -117,12 +117,12 @@ all: dirs-create bdb-install apr-install
 
 # Use these to start a build from the beginning.
 reset: dirs-reset bdb-reset apr-reset iconv-reset apr-util-reset \
-	httpd-reset neon-reset serf-reset sqlite-reset cyrus-sasl-reset \
-	svn-reset
+	httpd-reset neon-reset serf-reset serf-old-reset sqlite-reset \
+	cyrus-sasl-reset svn-reset
 
 # Use to save disc space.
 clean: bdb-clean apr-clean iconv-clean apr-util-clean httpd-clean \
-	neon-clean serf-clean cyrus-sasl-clean svn-clean
+	neon-clean serf-clean serf-old-clean cyrus-sasl-clean svn-clean
 
 # Nukes everything (including installed binaries!)
 # Use this to start ALL OVER AGAIN! Use with caution!
@@ -548,7 +548,7 @@ endif
 # configure neon
 $(NEON_OBJDIR)/.configured: $(NEON_OBJDIR)/.retrieved
 	cd $(NEON_SRCDIR) && ./autogen.sh
-	if [ -n "$(KRB5_CONFIG_PATH)" -a -d "$(KRB5_CONFIG_PATH)" ]; then \
+	if [ -n "$(KRB5_CONFIG_PATH)" ] && [ -d "$(KRB5_CONFIG_PATH)" ]; then \
 		cp $(KRB5_CONFIG_PATH)/krb5-config $(NEON_OBJDIR); \
 		chmod +x $(NEON_OBJDIR)/krb5-config; \
 	fi
@@ -720,7 +720,7 @@ cyrus-sasl-reset:
 		rm -f $(CYRUS_SASL_OBJDIR)/$(f);)
 
 cyrus-sasl-clean:
-	-(cd $(CYRUS_SASL_OBJDIR) && make clean)
+	-(cd $(CYRUS_SASL_OBJDIR) && make distclean)
 
 # fetch distfile for cyrus-sasl
 $(DISTDIR)/$(CYRUS_SASL_DIST):
@@ -736,12 +736,21 @@ $(CYRUS_SASL_OBJDIR)/.retrieved: $(DISTD
 		> $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched
 	mv $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched \
 		$(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c
+ifeq ($(UNAME),OpenBSD)
+	# Fixes GSSAPI support on OpenBSD, which hasn't got libroken:
+	for f in `grep -l -R -- -lroken $(CYRUS_SASL_SRCDIR)`; do \
+		sed -e 's/-lroken//g' < $$f > $$f.tmp && \
+		mv $$f.tmp $$f; \
+	done
+	chmod +x $(CYRUS_SASL_SRCDIR)/configure
+endif
 	touch $@
 
 # configure cyrus-sasl
 $(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved
 	cd $(CYRUS_SASL_OBJDIR) \
-		&& env CFLAGS="-g" $(CYRUS_SASL_SRCDIR)/configure \
+		&& env CFLAGS="-g" CPPFLAGS="-I/usr/include/kerberosV" \
+		$(CYRUS_SASL_SRCDIR)/configure \
 		--with-dbpath=$(PREFIX)/cyrus-sasl/etc/sasldb2 \
 		--with-plugindir=$(PREFIX)/cyrus-sasl/lib/sasl2 \
 		--with-configdir=$(PREFIX)/cyrus-sasl/lib/sasl2 \
@@ -801,7 +810,7 @@ $(SVN_OBJDIR)/.retrieved:
 		branchdir="$(BRANCH)"; \
 		co="co"; \
 	fi; \
-	if [ ! -d $(SVN_WC) ]; then \
+	if [ ! -d $(SVN_WC) ] && [ ! -h $(SVN_WC) ]; then \
 		svn $${co} $(SUBVERSION_REPOS_URL)/$${branchdir} \
 			$(SVN_WC); \
 	fi
@@ -810,19 +819,33 @@ $(SVN_OBJDIR)/.retrieved:
 ifeq ($(BRANCH_MAJOR),1.5)
 BDB_FLAG=$(PREFIX)/bdb
 SERF_FLAG=--with-serf="$(PREFIX)/serf-old"
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else ifeq ($(BRANCH_MAJOR),1.4)
 BDB_FLAG=$(PREFIX)/bdb
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else ifeq ($(BRANCH_MAJOR),1.3)
 BDB_FLAG=$(PREFIX)/bdb
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else ifeq ($(BRANCH_MAJOR),1.2)
 BDB_FLAG=$(PREFIX)/bdb
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else ifeq ($(BRANCH_MAJOR),1.1)
 BDB_FLAG=$(PREFIX)/bdb
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else ifeq ($(BRANCH_MAJOR),1.0)
 BDB_FLAG=$(PREFIX)/bdb
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 else
 BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
 SERF_FLAG=--with-serf="$(PREFIX)/serf"
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
 endif
 
 ifeq ($(ENABLE_JAVA_BINDINGS),yes)
@@ -925,8 +948,8 @@ httpd-conf:
 	echo "# httpd config for make check" > $(HTTPD_CHECK_CONF)
 	echo "ServerRoot \"$(PREFIX)/httpd\"" >> $(HTTPD_CHECK_CONF)
 	echo "Listen localhost:$(HTTPD_CHECK_PORT)" >> $(HTTPD_CHECK_CONF)
-	echo "LoadModule dav_svn_module modules/svn-$(WC)/mod_dav_svn.so" >> $(HTTPD_CHECK_CONF)
-	echo "LoadModule authz_svn_module modules/svn-$(WC)/mod_authz_svn.so" >> $(HTTPD_CHECK_CONF)
+	echo "LoadModule dav_svn_module $(MOD_DAV_SVN)" >> $(HTTPD_CHECK_CONF)
+	echo "LoadModule authz_svn_module $(MOD_AUTHZ_SVN)" >> $(HTTPD_CHECK_CONF)
 	echo "DocumentRoot "$(PREFIX)/httpd/htdocs"" >> $(HTTPD_CHECK_CONF)
 	echo "# These two Locations are used for 'make check'" >> $(HTTPD_CHECK_CONF)
 	echo "<Directory />" >> $(HTTPD_CHECK_CONF)
@@ -965,6 +988,10 @@ httpd-conf:
 # We need this to make sure some targets below pick up the right libraries
 LD_LIBRARY_PATH=$(PREFIX)/apr/lib:$(PREFIX)/iconv/lib:$(PREFIX)/bdb/lib:$(PREFIX)/neon/lib:$(PREFIX)/serf/lib:$(PREFIX)/sqlite/lib:$(PREFIX)/cyrus-sasl/lib:$(PREFIX)/iconv/lib:$(PREFIX)/svn-$(WC)/lib
 
+.PHONY: libpath
+libpath:
+	@echo export LD_LIBRARY_PATH=$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH
+
 .PHONY: start-svnserve stop-svnserve start-httpd stop-httpd
 
 HTTPD_CMD = env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
@@ -973,7 +1000,7 @@ HTTPD_CMD = env LD_LIBRARY_PATH=$(LD_LIB
 HTTPD_START_CMD = $(HTTPD_CMD) -k start
 HTTPD_STOP_CMD = $(HTTPD_CMD) -k stop
 
-SVNSERVE_START_CMD = $(svn_builddir)/subversion/svnserve/svnserve \
+SVNSERVE_START_CMD = $(SVN_PREFIX)/bin/svnserve \
 			--listen-host 127.0.0.1 \
 			--pid-file $(PWD)/svnserve-$(WC).pid \
 			-d -r $(svn_builddir)/subversion/tests/cmdline
@@ -998,45 +1025,62 @@ stop-svnserve:
 
 # run tests in parallel
 PARALLEL=PARALLEL=1 CLEANUP=1
+TEST_LOGS=tests.log fails.log
 
 svn-check-neon: httpd-conf $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled
 	$(HTTPD_START_CMD)
-	cd $(svn_builddir) && make check FS_TYPE=fsfs \
+	-cd $(svn_builddir) && make check FS_TYPE=fsfs \
 	       BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=neon \
 	       $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_neon-fsfs
-	cd $(svn_builddir) && make check FS_TYPE=bdb \
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-fsfs;)
+	-cd $(svn_builddir) && make check FS_TYPE=bdb \
 	       BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=neon \
 	       $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_neon-bdb
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-bdb;)
 	$(HTTPD_STOP_CMD)
 
 svn-check-serf: httpd-conf $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled
 	$(HTTPD_START_CMD)
-	cd $(svn_builddir) && make check FS_TYPE=fsfs \
+	-cd $(svn_builddir) && make check FS_TYPE=fsfs \
 	       BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=serf \
 	       $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_serf-fsfs
-	cd $(svn_builddir) && make check FS_TYPE=bdb \
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-fsfs;)
+	-cd $(svn_builddir) && make check FS_TYPE=bdb \
 	       BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=serf \
 	       $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_serf-bdb
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-bdb;)
 	$(HTTPD_STOP_CMD)
 
 svn-check-local:
-	cd $(svn_builddir) && make check FS_TYPE=fsfs $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_local-fsfs
-	cd $(svn_builddir) && make check FS_TYPE=bdb $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_local-bdb
+	-cd $(svn_builddir) && make check FS_TYPE=fsfs $(PARALLEL)
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-fsfs;)
+	-cd $(svn_builddir) && make check FS_TYPE=bdb $(PARALLEL)
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-bdb;)
 
 svn-check-svn:
 	$(SVNSERVE_START_CMD)
-	cd $(svn_builddir) && make check FS_TYPE=fsfs BASE_URL=svn://127.0.0.1 \
-		 $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_svn-fsfs
-	cd $(svn_builddir) && make check FS_TYPE=bdb BASE_URL=svn://127.0.0.1 \
-		 $(PARALLEL)
-	ln -f $(svn_builddir)/tests.log $(svn_builddir)/tests.log.ra_svn-bdb
+	-cd $(svn_builddir) && make check FS_TYPE=fsfs \
+		BASE_URL=svn://127.0.0.1 $(PARALLEL)
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-fsfs;)
+	-cd $(svn_builddir) && make check FS_TYPE=bdb \
+		BASE_URL=svn://127.0.0.1 $(PARALLEL)
+	-$(foreach log, $(TEST_LOGS), \
+		test -f $(svn_builddir)/$(log) && cp -f $(svn_builddir)/$(log) \
+			$(svn_builddir)/$(log).$@-bdb;)
 	$(SVNSERVE_STOP_CMD)
 
 .PHONY: svn-check-swig-pl svn-check-swig-py svn-check-swig-rb svn-check-javahl
@@ -1049,20 +1093,40 @@ RUBYLIB=$(SVN_PREFIX)/lib/ruby/site_ruby
 	grep ^svn_cv_ruby_sitedir_libsuffix $(svn_builddir)/config.log | \
 	cut -d'=' -f2)
 svn-check-swig-pl:
-	cd $(svn_builddir) && \
-		env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) make check-swig-pl
+	-if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
+		(cd $(svn_builddir) && \
+			env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+			make check-swig-pl 2>&1) | \
+				tee $(svn_builddir)/tests.log.bindings.pl; \
+	fi
+
 svn-check-swig-py:
-	cd $(svn_builddir) && \
-		env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) make check-swig-py
+	-if [ $(ENABLE_PYTHON_BINDINGS) = yes ]; then \
+		(cd $(svn_builddir) && \
+			env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+			make check-swig-py 2>&1) | \
+				tee $(svn_builddir)/tests.log.bindings.py; \
+	fi
+
+# We add the svn prefix to PATH here because the ruby tests
+# attempt to start an svnserve binary found in PATH.
 svn-check-swig-rb:
-	cd $(svn_builddir)/subversion/bindings/swig/ruby/test && \
-		env RUBYLIB=$(RUBYLIB) \
-		LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
-		ruby run-test.rb --verbose=verbose
+	-if [ $(ENABLE_RUBY_BINDINGS) = yes ]; then \
+		(cd $(svn_builddir)/subversion/bindings/swig/ruby/test && \
+			env RUBYLIB=$(RUBYLIB) \
+			LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+			PATH=$(SVN_PREFIX)/bin:$$PATH \
+			ruby run-test.rb --verbose=normal 2>&1) | \
+				tee $(svn_builddir)/tests.log.bindings.rb; \
+	fi
+
 svn-check-javahl:
-	cd $(svn_builddir) && \
-		env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
-		make check-javahl
+	-if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
+		(cd $(svn_builddir) && \
+			env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+			make check-javahl 2>&1) | \
+				tee $(svn_builddir)/tests.log.bindings.javahl; \
+	fi
 
 svn-check: svn-check-local svn-check-svn svn-check-neon svn-check-serf svn-check-bindings
 
@@ -1097,7 +1161,7 @@ endif
 	@echo "openssl:    `openssl version | cut -d' ' -f2`"
 	@echo "swig:       `swig -version | grep Version | cut -d' ' -f3`"
 	@echo "python:     `python --version 2>&1 | cut -d' ' -f2`"
-	@echo "perl:       `perl -version | grep ^This | cut -d' ' -f4 | sed -e 's/^v//'`"
+	@echo "perl:       `eval \`perl -V:version\`; echo $$version`"
 	@echo "ruby:       `ruby --version | cut -d' ' -f2`"
 ifeq ($(ENABLE_JAVA_BINDINGS),yes)
 	@echo "java:       `java -version 2>&1 | grep version | cut -d' ' -f3  | sed -e 's/\"//g'`"

Modified: subversion/branches/performance/tools/dist/collect_sigs.py
URL: http://svn.apache.org/viewvc/subversion/branches/performance/tools/dist/collect_sigs.py?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/tools/dist/collect_sigs.py (original)
+++ subversion/branches/performance/tools/dist/collect_sigs.py Fri Nov 26 18:55:51 2010
@@ -1,14 +1,131 @@
 #!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# A script intended to be useful in helping to collect signatures for a
+# release.  This is a pretty rough, and patches are welcome to improve it.
+#
+# Some thoughts about future improvement:
+#  * Display of per-file and per-release statistics
+#  * Make use of the python-gpg package (http://code.google.com/p/python-gnupg/)
+#  * Post to IRC when a new signature is collected
+#    - Since we don't want to have a long running bot, perhaps we could
+#      also patch wayita to accept and then echo a privmsg?
+#
+
+import sys, os
+import sqlite3
+
+def make_config():
+  'Output a blank config file'
+
+  if os.path.exists('config.py'):
+    print "'config.py' already exists!'"
+    sys.exit(1)
+
+  conf = open('config.py', 'w')
+  conf.write("version = ''\n")
+  conf.write("sigdir = ''\n")
+  conf.write("filesdir = ''\n")
+  conf.close()
+
+  print "'config.py' generated"
+
+def make_db():
+  'Initialize a blank database'
+
+  db = sqlite3.connect('sigs.db')
+  db.execute('''
+    CREATE TABLE signatures (
+      keyid TEXT, filename TEXT, signature BLOB,
+      UNIQUE(keyid,filename)
+    );
+''');
+
+# This function is web-facing
+def generate_asc_files(target_dir='.'):
+  fds = {}
+  def _open(filename):
+    if not fds.has_key(filename):
+      fd = open(os.path.join(target_dir, filename + '.asc'), 'w')
+      fds[filename] = fd
+    return fds[filename]
+
+  db = sqlite3.connect(os.path.join(target_dir, 'sigs.db'))
+  curs = db.cursor()
+  curs.execute('SELECT filename, signature FROM signatures;')
+  for filename, signature in curs:
+    fd = _open(filename)
+    fd.write(signature + "\n")
+
+  for fd in fds.values():
+    fd.flush()
+    fd.close()
+
+actions = {
+    'make_config' : make_config,
+    'make_db' : make_db,
+    'make_asc' : generate_asc_files,
+}
+
+
+if __name__ == '__main__':
+  if len(sys.argv) > 1:
+    if sys.argv[1] in actions:
+      actions[sys.argv[1]]()
+      sys.exit(0)
+
+
+# Stuff below this line is the web-facing side
+# ======================================================================
+
 
 import cgi
 import cgitb
 cgitb.enable()
 
-import sys, os, string, subprocess, re
+import string, subprocess, re
+
+try:
+  sys.path.append(os.path.dirname(sys.argv[0]))
+  import config
+except:
+  print 'Content-type: text/plain'
+  print
+  print 'Cannot find config file'
+  sys.exit(1)
 
-version = '1.6.13'
 r = re.compile('\[GNUPG\:\] GOODSIG (\w*) (.*)')
 
+def files():
+  for f in os.listdir(config.filesdir):
+    if config.version in f and (f.endswith('.tar.gz') or f.endswith('.zip') or f.endswith('.tar.bz2')):
+      yield f
+
+def ordinal(N):
+  try:
+    return [None, 'first', 'second', 'third', 'fourth', 'fifth', 'sixth'][N]
+  except:
+    # Huh?  We only have six files to sign.
+    return "%dth" % N
+
 shell_content = '''
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
 "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
@@ -17,43 +134,67 @@ shell_content = '''
 <title>Signature collection for Subversion $version</title>
 </head>
 <body style="font-size: 14pt; text-align: justify;
-  background-color: #f0f0f0; padding: 0 5%">
-<p>This page is used to collect signatures for the proposed release of
-Apache Subversion $version.</p>
+  background-color: #f0f0f0; padding: 0 5%%">
+<p>This page is used to collect <a href="%s/list">signatures</a> for the
+proposed release of Apache Subversion $version.</p>
 $content
 </body>
 </html>
-'''
+''' % os.getenv('SCRIPT_NAME')
 
-def default_page():
-  c = '''
-<form method="post">
-File: <select name="filename">
-%s
-</select>
-<br/>
-<p>Paste signature in the area below:<br/>
-<textarea name="signature" rows="10" cols="80"></textarea>
+signature_area = '''
+<hr/>
+<form method="post" action="%s">
+<p>Paste one or more signatures in the area below:<br/>
+<textarea name="signatures" rows="20" cols="80"></textarea>
 </p>
 <input type="submit" value="Submit" />
+<p>Any text not between the <tt>BEGIN PGP SIGNATURE</tt>
+and <tt>END PGP SIGNATURE</tt> lines will be ignored.</p>
 </form>
+<hr/>
+''' % os.getenv('SCRIPT_NAME')
+ 
+
+
+def split(sigs):
+  lines = []
+  for line in sigs.split('\n'):
+    if lines or '--BEGIN' in line:
+      lines.append(line)
+    if '--END' in line:
+      yield "\n".join(lines) + "\n"
+      lines = []
+
+def list_signatures():
+  db = sqlite3.connect(os.path.join(config.sigdir, 'sigs.db'))
+  template = '''
+<hr/>
+<p>The following signature files are available:</p>
+<p>%s</p>
 '''
 
-  contents = [f for f in os.listdir('.')
-              if f.endswith('.tar.gz') or f.endswith('.zip')
-                                       or f.endswith('.tar.bz2')]
-  contents.sort()
+  lines = ""
+  curs = db.cursor()
+  curs.execute('''SELECT filename, COUNT(*) FROM signatures
+                  GROUP BY filename ORDER BY filename''')
+  for filename, count in curs:
+    lines += '<a href="%s/%s.asc">%s.asc</a>: %d signature%s<br/>\n' \
+             % (os.getenv('SCRIPT_NAME'), filename, filename,
+                count, ['s', ''][count == 1])
+  return (template % lines) + signature_area
+
+def save_valid_sig(db, filename, keyid, signature):
+  db.execute('INSERT OR REPLACE INTO signatures VALUES (?,?,?);',
+             (keyid, filename, buffer(signature)))
+  db.commit()
 
-  options = ''
-  for f in contents:
-    options = options + '<option value="%s">%s</option>\n' % (f, f)
+  generate_asc_files(config.sigdir)
 
-  return c % options
-
-
-def verify_sig(signature, filename):
+def verify_sig_for_file(signature, filename):
   args = ['gpg', '--logger-fd', '1', '--no-tty',
-          '--status-fd', '2', '--verify', '-', filename]
+          '--status-fd', '2', '--verify', '-',
+          os.path.join(config.filesdir, filename)]
 
   gpg = subprocess.Popen(args,
                          stdin=subprocess.PIPE,
@@ -74,13 +215,28 @@ def verify_sig(signature, filename):
   for line in lines:
     match = r.search(line)
     if match:
-      keyid = match.group(1)[-8:]
+      keyid = match.group(1)
       user = match.group(2)
 
-  return (True, (keyid, user))
-
+  return (True, (filename, keyid, user))
 
-def process_sig(signature, filename):
+def verify_sig(signature):
+  all_failures = ""
+  for filename in files():
+    (verified, result) = verify_sig_for_file(signature, filename)
+    if verified:
+      return (verified, result)
+    else:
+      all_failures += "%s:\n[[[\n%s]]]\n\n" % (filename, result)
+  return (False, all_failures)
+
+def process_sigs(signatures):
+  success = '''
+  <p style="color: green;">All %d signatures verified!</p>
+'''
+  failure = '''
+  <p style="color: red;">%d of %d signatures failed to verify; details below.</p>
+'''
   c_verified = '''
   <p style="color: green;">The signature is verified!</p>
   <p>Filename: <code>%s</code></p>
@@ -93,35 +249,92 @@ def process_sig(signature, filename):
 '''
   c_unverified = '''
   <p style="color: red;">The signature was not able to be verified!</p>
-  <p>Filename: <code>%s</code></p>
+  <p>Signature: <pre>%s</pre></p>
   <p>Reason:</p><pre>%s</pre>
   <p>Please talk to the release manager if this is in error.</p>
 '''
 
-  (verified, result) = verify_sig(signature, filename)
-
-  if verified:
-    return c_verified % (filename, result[0], result[1])
+  outcomes = []
+  N_sigs = 0
+  N_verified = 0
+  retval = ''
+
+  # Verify
+  db = sqlite3.connect(os.path.join(config.sigdir, 'sigs.db'))
+  for signature in split(signatures):
+    N_sigs += 1
+    (verified, result) = verify_sig(signature)
+    outcomes.append((verified, result))
+
+    if verified:
+      (filename, keyid, user) = result
+      save_valid_sig(db, filename, keyid, signature)
+      N_verified += 1
+
+  # Output header
+  if N_verified == N_sigs:
+    retval += success % N_sigs
   else:
-    return c_unverified % (filename, result)
+    retval += failure % (N_sigs-N_verified, N_sigs)
 
+  # Output details
+  N = 0
+  for outcome in outcomes:
+    N += 1
+    (verified, result) = outcome
+    retval += "<h1>Results for the %s signature</h1>" % ordinal(N)
+    if verified:
+      (filename, keyid, user) = result
+      retval += c_verified % (filename, keyid[-8:], user)
+    else:
+      retval += c_unverified % (signature, result)
+
+  return retval + signature_area
+
+
+def cat_signatures(basename):
+  # strip '.asc' extension
+  assert basename[:-4] in files()
+
+  # cat
+  ascfile = os.path.join(config.sigdir, basename)
+  if os.path.exists(ascfile):
+    return (open(ascfile, 'r').read())
 
-def main():
-  print "Content-Type: text/html"
+def print_content_type(mimetype):
+  print "Content-Type: " + mimetype
   print
 
+def main():
   form = cgi.FieldStorage()
-  if 'signature' not in form:
-    content = default_page()
-  else:
-    content = process_sig(form['signature'].value, form['filename'].value)
+  pathinfo = os.getenv('PATH_INFO')
+
+  # default value, to be changed below
+  content = signature_area
+
+  if 'signatures' in form:
+    content = process_sigs(form['signatures'].value)
+
+  elif pathinfo and pathinfo[1:]:
+    basename = pathinfo.split('/')[-1]
+
+    if basename == 'list':
+      content = list_signatures()
+
+    elif basename[:-4] in files():
+      # early exit; bypass 'content' entirely
+      print_content_type('text/plain')
+      print cat_signatures(basename)
+      return
 
   # These are "global" values, not specific to our action.
   mapping = {
-      'version' : version,
+      'version' : config.version,
       'content' : content,
     }
 
+  print_content_type('text/html')
+
   template = string.Template(shell_content)
   print template.safe_substitute(mapping)
 

Modified: subversion/branches/performance/tools/dist/dist.sh
URL: http://svn.apache.org/viewvc/subversion/branches/performance/tools/dist/dist.sh?rev=1039511&r1=1039510&r2=1039511&view=diff
==============================================================================
--- subversion/branches/performance/tools/dist/dist.sh (original)
+++ subversion/branches/performance/tools/dist/dist.sh Fri Nov 26 18:55:51 2010
@@ -229,6 +229,10 @@ rm -f "$DISTPATH/STATUS"
 # (See http://svn.haxx.se/dev/archive-2009-04/0166.shtml for discussion.)
 rm -rf "$DISTPATH/contrib"
 
+# Remove notes/ from our distribution tarball.  It's large, but largely
+# blue-sky and out-of-date, and of questionable use to end users.
+rm -rf "$DISTPATH/notes"
+
 # Remove packages/ from the tarball.
 # (See http://svn.haxx.se/dev/archive-2009-12/0205.shtml)
 rm -rf "$DISTPATH/packages"