You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by rh...@apache.org on 2015/12/12 12:15:19 UTC

svn commit: r1719652 [2/2] - in /subversion/branches/ra-git: ./ subversion/include/ subversion/include/private/ subversion/libsvn_client/ subversion/libsvn_delta/ subversion/libsvn_diff/ subversion/libsvn_fs_fs/ subversion/libsvn_fs_x/ subversion/libsv...

Modified: subversion/branches/ra-git/subversion/tests/libsvn_subr/io-test.c
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/subversion/tests/libsvn_subr/io-test.c?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/subversion/tests/libsvn_subr/io-test.c (original)
+++ subversion/branches/ra-git/subversion/tests/libsvn_subr/io-test.c Sat Dec 12 11:15:19 2015
@@ -123,7 +123,7 @@ static struct test_file_definition_t tes
 
 static svn_error_t *
 create_test_file(struct test_file_definition_t* definition,
-                 const char *testname,
+                 const char *test_dir,
                  apr_pool_t *pool,
                  apr_pool_t *scratch_pool)
 {
@@ -132,7 +132,6 @@ create_test_file(struct test_file_defini
   apr_off_t midpos = definition->size / 2;
   svn_error_t *err = NULL;
   int i;
-  const char *test_dir = apr_pstrcat(pool, TEST_DIR_PREFIX, testname, NULL);
 
   if (definition->size < 5)
     SVN_ERR_ASSERT(strlen(definition->data) >= (apr_size_t)definition->size);
@@ -184,31 +183,13 @@ create_comparison_candidates(struct test
                              const char *testname,
                              apr_pool_t *pool)
 {
-  svn_node_kind_t kind;
   apr_pool_t *iterpool = svn_pool_create(pool);
   struct test_file_definition_t *candidate;
   svn_error_t *err = SVN_NO_ERROR;
   apr_size_t count = 0;
-  const char *test_dir = apr_pstrcat(pool, TEST_DIR_PREFIX,
-                                     testname, NULL);
+  const char *test_dir;
 
-  /* If there's already a directory named io-test-temp, delete it.
-     Doing things this way means that repositories stick around after
-     a failure for postmortem analysis, but also that tests can be
-     re-run without cleaning out the repositories created by prior
-     runs.  */
-  SVN_ERR(svn_io_check_path(test_dir, &kind, pool));
-
-  if (kind == svn_node_dir)
-    SVN_ERR(svn_io_remove_dir2(test_dir, TRUE, NULL, NULL, pool));
-  else if (kind != svn_node_none)
-    return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
-                             "There is already a file named '%s'",
-                             test_dir);
-
-  SVN_ERR(svn_io_dir_make(test_dir, APR_OS_DEFAULT, pool));
-
-  svn_test_add_dir_cleanup(test_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&test_dir, testname, pool));
 
   for (candidate = test_file_definitions_template;
        candidate->name != NULL;
@@ -220,7 +201,7 @@ create_comparison_candidates(struct test
   for (candidate = *definitions; candidate->name != NULL; candidate += 1)
     {
       svn_pool_clear(iterpool);
-      err = create_test_file(candidate, testname, pool, iterpool);
+      err = create_test_file(candidate, test_dir, pool, iterpool);
       if (err)
         break;
     }
@@ -518,10 +499,7 @@ read_length_line_shouldnt_loop(apr_pool_
   apr_size_t buffer_limit = sizeof(buffer);
   apr_file_t *f;
 
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "read_length_tmp", pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "read_length_tmp", pool));
 
   SVN_ERR(svn_io_write_unique(&tmp_file, tmp_dir, "1234\r\n", 6,
                               svn_io_file_del_on_pool_cleanup, pool));
@@ -535,6 +513,167 @@ read_length_line_shouldnt_loop(apr_pool_
   return SVN_NO_ERROR;
 }
 
+static svn_error_t *
+test_read_length_line(apr_pool_t *pool)
+{
+  const char *tmp_dir;
+  const char *tmp_file;
+  char buffer[80];
+  apr_size_t buffer_limit;
+  apr_file_t *f;
+  svn_error_t *err;
+
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_read_length_line",
+                                    pool));
+
+  /* Test 1: Read empty file. */
+  tmp_file = svn_dirent_join(tmp_dir, "empty", pool);
+  SVN_ERR(svn_io_file_create(tmp_file, "", pool));
+
+  SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+                           APR_OS_DEFAULT, pool));
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+  SVN_ERR(svn_io_file_close(f, pool));
+
+  /* Test 2: Read empty line.*/
+  tmp_file = svn_dirent_join(tmp_dir, "empty-line", pool);
+  SVN_ERR(svn_io_file_create(tmp_file, "\n", pool));
+
+  SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+                           APR_OS_DEFAULT, pool));
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_ERR(err);
+  SVN_TEST_ASSERT(buffer_limit == 0);
+  SVN_TEST_STRING_ASSERT(buffer, "");
+  SVN_ERR(svn_io_file_close(f, pool));
+
+  /* Test 3: Read two lines.*/
+  tmp_file = svn_dirent_join(tmp_dir, "lines", pool);
+  SVN_ERR(svn_io_file_create(tmp_file, "first\nsecond\n", pool));
+
+  SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+                           APR_OS_DEFAULT, pool));
+
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_ERR(err);
+  SVN_TEST_ASSERT(buffer_limit == 5);
+  SVN_TEST_STRING_ASSERT(buffer, "first");
+
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_ERR(err);
+  SVN_TEST_ASSERT(buffer_limit == 6);
+  SVN_TEST_STRING_ASSERT(buffer, "second");
+
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+  SVN_ERR(svn_io_file_close(f, pool));
+
+  /* Test 4: Content without end-of-line.*/
+  tmp_file = svn_dirent_join(tmp_dir, "no-eol", pool);
+  SVN_ERR(svn_io_file_create(tmp_file, "text", pool));
+
+  SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+                           APR_OS_DEFAULT, pool));
+
+  buffer_limit = sizeof(buffer);
+  err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+  SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+  SVN_ERR(svn_io_file_close(f, pool));
+
+  return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_readline(apr_pool_t *pool)
+{
+  const char *tmp_dir;
+  const char *tmp_file;
+  svn_stringbuf_t *buf;
+  apr_file_t *f;
+  svn_error_t *err;
+  const char *eol;
+  svn_boolean_t eof;
+  apr_off_t pos;
+
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_readline",
+                                    pool));
+
+  tmp_file = svn_dirent_join(tmp_dir, "foo", pool);
+
+  SVN_ERR(svn_io_file_create(tmp_file, "CR\rLF\nCRLF\r\nno-eol", pool));
+
+  SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+                           APR_OS_DEFAULT, pool));
+  err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+  SVN_ERR(err);
+  SVN_TEST_STRING_ASSERT(buf->data, "CR");
+  SVN_TEST_STRING_ASSERT(eol, "\r");
+  SVN_TEST_ASSERT(!eof);
+
+  /* Check that APR file reports correct offset. See r1719196 why it's
+     important. */
+  SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+  SVN_TEST_INT_ASSERT(pos, 3);
+
+  err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+  SVN_ERR(err);
+  SVN_TEST_STRING_ASSERT(buf->data, "LF");
+  SVN_TEST_STRING_ASSERT(eol, "\n");
+  SVN_TEST_ASSERT(!eof);
+
+  /* Check that APR file reports correct offset. See r1719196 why it's
+     important. */
+  SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+  SVN_TEST_INT_ASSERT(pos, 6);
+
+  err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+  SVN_ERR(err);
+  SVN_TEST_STRING_ASSERT(buf->data, "CRLF");
+  SVN_TEST_STRING_ASSERT(eol, "\r\n");
+  SVN_TEST_ASSERT(!eof);
+
+  /* Check that APR file reports correct offset. See r1719196 why it's
+     important. */
+  SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+  SVN_TEST_INT_ASSERT(pos, 12);
+
+  err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+  SVN_ERR(err);
+  SVN_TEST_STRING_ASSERT(buf->data, "no-eol");
+  SVN_TEST_STRING_ASSERT(eol, NULL);
+  SVN_TEST_ASSERT(eof);
+
+  /* Check that APR file reports correct offset. See r1719196 why it's
+     important. */
+  SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+  SVN_TEST_INT_ASSERT(pos, 18);
+
+  /* Further reads still returns EOF. */
+  err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+  SVN_ERR(err);
+  SVN_TEST_STRING_ASSERT(buf->data, "");
+  SVN_TEST_STRING_ASSERT(eol, NULL);
+  SVN_TEST_ASSERT(eof);
+
+  /* Check that APR file reports correct offset. See r1719196 why it's
+     important. */
+  SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+  SVN_TEST_INT_ASSERT(pos, 18);
+
+  SVN_ERR(svn_io_file_close(f, pool));
+
+  return SVN_NO_ERROR;
+}
+
 /* Move the read pointer in FILE to absolute position OFFSET and align
  * the read buffer to multiples of BLOCK_SIZE.  BUFFERED is set only if
  * FILE actually uses a read buffer.  Use POOL for allocations.
@@ -562,8 +701,7 @@ aligned_seek(apr_file_t *file,
     }
 
   /* we must be at the desired offset */
-  current = 0;
-  SVN_ERR(svn_io_file_seek(file, APR_CUR, &current, pool));
+  SVN_ERR(svn_io_file_get_offset(&current, file, pool));
   SVN_TEST_ASSERT(current == (apr_off_t)offset);
 
   return SVN_NO_ERROR;
@@ -639,11 +777,7 @@ aligned_seek_test(apr_pool_t *pool)
   const apr_size_t file_size = 100000;
 
   /* create a temp folder & schedule it for automatic cleanup */
-
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "aligned_seek_tmp", pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "aligned_seek_tmp", pool));
 
   /* create a temp file with know contents */
 
@@ -690,10 +824,7 @@ ignore_enoent(apr_pool_t *pool)
   apr_file_t *file;
 
   /* Create an empty directory. */
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "ignore_enoent", pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "ignore_enoent", pool));
 
   /* Path does not exist. */
   path = svn_dirent_join(tmp_dir, "not-present", pool);
@@ -750,11 +881,9 @@ test_install_stream_to_longpath(apr_pool
   int i;
 
   /* Create an empty directory. */
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_install_stream_to_longpath",
-                                  pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir,
+                                    "test_install_stream_to_longpath",
+                                    pool));
 
   deep_dir = tmp_dir;
 
@@ -792,11 +921,9 @@ test_install_stream_over_readonly_file(a
   svn_stringbuf_t *actual_content;
 
   /* Create an empty directory. */
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_install_stream_over_readonly_file",
-                                  pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir,
+                                    "test_install_stream_over_readonly_file",
+                                    pool));
 
   final_abspath = svn_dirent_join(tmp_dir, "stream1", pool);
 
@@ -829,10 +956,7 @@ test_file_size_get(apr_pool_t *pool)
   svn_filesize_t filesize;
 
   /* Create an empty directory. */
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_file_size_get", pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_size_get", pool));
 
   /* Path does not exist. */
   path = svn_dirent_join(tmp_dir, "file", pool);
@@ -871,10 +995,7 @@ test_file_rename2(apr_pool_t *pool)
   svn_node_kind_t actual_kind;
 
   /* Create an empty directory. */
-  SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_file_rename2", pool));
-  SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
-  SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
-  svn_test_add_dir_cleanup(tmp_dir);
+  SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_rename2", pool));
 
   foo_path = svn_dirent_join(tmp_dir, "foo", pool);
   bar_path = svn_dirent_join(tmp_dir, "bar", pool);
@@ -948,6 +1069,10 @@ static struct svn_test_descriptor_t test
                    "test svn_io_file_size_get"),
     SVN_TEST_PASS2(test_file_rename2,
                    "test svn_io_file_rename2"),
+    SVN_TEST_PASS2(test_read_length_line,
+                   "test svn_io_read_length_line()"),
+    SVN_TEST_PASS2(test_file_readline,
+                   "test svn_io_file_readline()"),
     SVN_TEST_NULL
   };
 

Modified: subversion/branches/ra-git/subversion/tests/svn_test.h
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/subversion/tests/svn_test.h?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/subversion/tests/svn_test.h (original)
+++ subversion/branches/ra-git/subversion/tests/svn_test.h Sat Dec 12 11:15:19 2015
@@ -347,6 +347,12 @@ svn_error_t *
 svn_test__init_auth_baton(svn_auth_baton_t **baton,
                           apr_pool_t *result_pool);
 
+/* Create a temp folder for test & schedule it for automatic cleanup.
+ * Uses POOL for all allocations. */
+svn_error_t *
+svn_test_make_sandbox_dir(const char **sb_dir_p,
+                          const char *sb_name,
+                          apr_pool_t *pool);
 
 /*
  * Test predicates

Modified: subversion/branches/ra-git/subversion/tests/svn_test_main.c
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/subversion/tests/svn_test_main.c?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/subversion/tests/svn_test_main.c (original)
+++ subversion/branches/ra-git/subversion/tests/svn_test_main.c Sat Dec 12 11:15:19 2015
@@ -761,6 +761,23 @@ svn_test__init_auth_baton(svn_auth_baton
   return SVN_NO_ERROR;
 }
 
+svn_error_t *
+svn_test_make_sandbox_dir(const char **sb_dir_p,
+                          const char *sb_name,
+                          apr_pool_t *pool)
+{
+  const char *sb_dir;
+
+  sb_dir = svn_test_data_path(sb_name, pool);
+  SVN_ERR(svn_io_remove_dir2(sb_dir, TRUE, NULL, NULL, pool));
+  SVN_ERR(svn_io_make_dir_recursively(sb_dir, pool));
+  svn_test_add_dir_cleanup(sb_dir);
+
+  *sb_dir_p = sb_dir;
+
+  return SVN_NO_ERROR;
+}
+
 /* Standard svn test program */
 int
 svn_test_main(int argc, const char *argv[], int max_threads,

Modified: subversion/branches/ra-git/tools/dev/svnmover/ra.c
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/tools/dev/svnmover/ra.c?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/tools/dev/svnmover/ra.c (original)
+++ subversion/branches/ra-git/tools/dev/svnmover/ra.c Sat Dec 12 11:15:19 2015
@@ -299,11 +299,18 @@ branch_get_mutable_state(svn_branch__txn
   for (i = 0; i < branches->nelts; i++)
     {
       svn_branch__state_t *b = APR_ARRAY_IDX(branches, i, void *);
+      svn_branch__history_t *history
+        = svn_branch__history_create_empty(result_pool);
 
-      b->predecessor
+      /* Set each branch's parent to the branch in the base rev */
+      svn_branch__rev_bid_t *parent
         = svn_branch__rev_bid_create(base_revision,
                                      svn_branch__get_id(b, scratch_pool),
                                      result_pool);
+
+      svn_hash_sets(history->parents,
+                    apr_pstrdup(result_pool, b->bid), parent);
+      SVN_ERR(svn_branch__state_set_history(b, history, scratch_pool));
     }
 
   *txn_p = txn;

Modified: subversion/branches/ra-git/tools/dev/svnmover/svnmover.c
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/tools/dev/svnmover/svnmover.c?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/tools/dev/svnmover/svnmover.c (original)
+++ subversion/branches/ra-git/tools/dev/svnmover/svnmover.c Sat Dec 12 11:15:19 2015
@@ -539,31 +539,115 @@ rev_bid_str(const svn_branch__rev_bid_t
   return apr_psprintf(result_pool, "r%ld.%s", rev_bid->rev, rev_bid->bid);
 }
 
+/*  */
+static const char *
+list_parents(svn_branch__history_t *history,
+             apr_pool_t *result_pool)
+{
+  const char *result = "";
+  apr_hash_index_t *hi;
+
+  for (hi = apr_hash_first(result_pool, history->parents);
+       hi; hi = apr_hash_next(hi))
+    {
+      svn_branch__rev_bid_t *parent = apr_hash_this_val(hi);
+      const char *parent_str = rev_bid_str(parent, result_pool);
+
+      result = apr_psprintf(result_pool, "%s%s%s",
+                            result, result[0] ? ", " : "", parent_str);
+    }
+  return result;
+}
+
+/* Return a string representation of HISTORY.
+ */
+static const char *
+history_str(svn_branch__history_t *history,
+            apr_pool_t *result_pool)
+{
+  const char *result
+    = list_parents(history, result_pool);
+
+  return apr_psprintf(result_pool, "parents={%s}", result);
+}
+
+/*
+ */
+static svn_error_t *
+svn_branch__history_add_parent(svn_branch__history_t *history,
+                               svn_revnum_t rev,
+                               const char *branch_id,
+                               apr_pool_t *scratch_pool)
+{
+  apr_pool_t *pool = apr_hash_pool_get(history->parents);
+  svn_branch__rev_bid_t *new_parent;
+
+  new_parent = svn_branch__rev_bid_create(rev, branch_id, pool);
+  svn_hash_sets(history->parents, apr_pstrdup(pool, branch_id), new_parent);
+  return SVN_NO_ERROR;
+}
+
 /* Set *DIFFERENCE_P to some sort of indication of the difference between
- * MERGE_HISTORY1 and MERGE_HISTORY2, or to null if there is no difference.
+ * HISTORY1 and HISTORY2, or to null if there is no difference.
  *
  * Inputs may be null.
  */
 static svn_error_t *
-merge_history_diff(const char **difference_p,
-                   svn_branch__rev_bid_t *merge_history1,
-                   svn_branch__rev_bid_t *merge_history2,
-                   apr_pool_t *result_pool)
-{
-  *difference_p = NULL;
-  if ((merge_history1 || merge_history2)
-      && !(merge_history1 && merge_history2
-           && svn_branch__rev_bid_equal(merge_history1, merge_history2)))
+history_diff(const char **difference_p,
+             svn_branch__history_t *history1,
+             svn_branch__history_t *history2,
+             apr_pool_t *result_pool,
+             apr_pool_t *scratch_pool)
+{
+  apr_hash_t *combined;
+  apr_hash_index_t *hi;
+  svn_boolean_t different = FALSE;
+
+  if (! history1)
+    history1 = svn_branch__history_create_empty(scratch_pool);
+  if (! history2)
+    history2 = svn_branch__history_create_empty(scratch_pool);
+  combined = hash_overlay(history1->parents,
+                          history2->parents);
+
+  for (hi = apr_hash_first(scratch_pool, combined);
+       hi; hi = apr_hash_next(hi))
+    {
+      const char *bid = apr_hash_this_key(hi);
+      svn_branch__rev_bid_t *parent1 = svn_hash_gets(history1->parents, bid);
+      svn_branch__rev_bid_t *parent2 = svn_hash_gets(history2->parents, bid);
+
+      if (!(parent1 && parent2
+            && svn_branch__rev_bid_equal(parent1, parent2)))
+        {
+          different = TRUE;
+          break;
+        }
+    }
+  if (different)
     {
       *difference_p = apr_psprintf(result_pool, "%s -> %s",
-                                   rev_bid_str(merge_history1, result_pool),
-                                   rev_bid_str(merge_history2, result_pool));
+                                   history_str(history1, scratch_pool),
+                                   history_str(history2, scratch_pool));
+    }
+  else
+    {
+      *difference_p = NULL;
     }
   return SVN_NO_ERROR;
 }
 
 /* Set *IS_CHANGED to true if EDIT_TXN differs from its base txn, else to
  * false.
+ *
+ * Notice only a difference in content: branches deleted or added, or branch
+ * contents different. Ignore any differences in branch history metadata.
+ *
+ * ### At least we must ignore the "this branch" parent changing from
+ *     old-revision to new-revision. However we should probably notice
+ *     if a merge parent is added (which means we want to make a commit
+ *     recording this merge, even if no content changed), and perhaps
+ *     other cases.
  */
 static svn_error_t *
 txn_is_changed(svn_branch__txn_t *edit_txn,
@@ -602,9 +686,6 @@ txn_is_changed(svn_branch__txn_t *edit_t
       svn_branch__state_t *base_branch
         = svn_branch__txn_get_branch_by_id(base_txn, edit_branch->bid,
                                            scratch_pool);
-      svn_branch__rev_bid_t *edit_branch_merge_history;
-      svn_branch__rev_bid_t *base_branch_merge_history;
-      const char *merge_history_difference;
       svn_element__tree_t *edit_branch_elements, *base_branch_elements;
       apr_hash_t *diff;
 
@@ -614,20 +695,29 @@ txn_is_changed(svn_branch__txn_t *edit_t
           return SVN_NO_ERROR;
         }
 
-      /* Compare merge histories */
-      SVN_ERR(svn_branch__state_get_merge_ancestor(
-                edit_branch, &edit_branch_merge_history, scratch_pool));
-      SVN_ERR(svn_branch__state_get_merge_ancestor(
-                base_branch, &base_branch_merge_history, scratch_pool));
-      SVN_ERR(merge_history_diff(&merge_history_difference,
-                                 edit_branch_merge_history,
-                                 base_branch_merge_history,
-                                 scratch_pool));
-      if (merge_history_difference)
+#if 0
+      /* Compare histories */
+      /* ### No, don't. Ignore any differences in branch history metadata. */
+      {
+      svn_branch__history_t *edit_branch_history;
+      svn_branch__history_t *base_branch_history;
+      const char *history_difference;
+
+      SVN_ERR(svn_branch__state_get_history(edit_branch, &edit_branch_history,
+                                            scratch_pool));
+      SVN_ERR(svn_branch__state_get_history(base_branch, &base_branch_history,
+                                            scratch_pool));
+      SVN_ERR(history_diff(&history_difference,
+                           edit_branch_history,
+                           base_branch_history,
+                           scratch_pool, scratch_pool));
+      if (history_difference)
         {
           *is_changed = TRUE;
           return SVN_NO_ERROR;
         }
+      }
+#endif
 
       /* Compare elements */
       SVN_ERR(svn_branch__state_get_elements(edit_branch, &edit_branch_elements,
@@ -726,8 +816,11 @@ get_union_of_subbranches(apr_hash_t **al
                                     svn_branch__root_eid(right_branch),
                                     result_pool));
   all_subbranches
-    = left_branch ? hash_overlay(s_left->subbranches, s_right->subbranches)
-                  : s_right->subbranches;
+    = (s_left && s_right) ? hash_overlay(s_left->subbranches,
+                                         s_right->subbranches)
+        : s_left ? s_left->subbranches
+        : s_right ? s_right->subbranches
+        : apr_hash_make(result_pool);
 
   *all_subbranches_p = all_subbranches;
   return SVN_NO_ERROR;
@@ -765,6 +858,31 @@ svn_branch__replay(svn_branch__txn_t *ed
          element where it was attached */
     }
 
+  /* Replay any change in history */
+  /* ### Actually, here we just set the output history to the right-hand-side
+     history if that differs from left-hand-side.
+     This doesn't seem right, in general. It's OK if we're just copying
+     a txn into a fresh txn, as for example we do during commit. */
+  {
+    svn_branch__history_t *left_history = NULL;
+    svn_branch__history_t *right_history = NULL;
+    const char *history_difference;
+
+    if (left_branch)
+      SVN_ERR(svn_branch__state_get_history(left_branch, &left_history,
+                                            scratch_pool));
+    if (right_branch)
+      SVN_ERR(svn_branch__state_get_history(right_branch, &right_history,
+                                            scratch_pool));
+    SVN_ERR(history_diff(&history_difference, left_history, right_history,
+                         scratch_pool, scratch_pool));
+    if (history_difference)
+      {
+        SVN_ERR(svn_branch__state_set_history(edit_branch, right_history,
+                                              scratch_pool));
+      }
+  }
+
   /* Replay its subbranches, recursively.
      (If we're deleting the current branch, we don't also need to
      explicitly delete its subbranches... do we?) */
@@ -797,7 +915,6 @@ svn_branch__replay(svn_branch__txn_t *ed
                 = svn_branch__id_nest(edit_branch->bid, this_eid, scratch_pool);
 
               SVN_ERR(svn_branch__txn_open_branch(edit_txn, &edit_subbranch,
-                                                  right_subbranch->predecessor,
                                                   new_branch_id,
                                                   svn_branch__root_eid(right_subbranch),
                                                   scratch_pool, scratch_pool));
@@ -813,27 +930,6 @@ svn_branch__replay(svn_branch__txn_t *ed
         }
     }
 
-  /* Replay any change in merge history */
-  {
-    svn_branch__rev_bid_t *left_merge_history = NULL;
-    svn_branch__rev_bid_t *right_merge_history = NULL;
-    const char *merge_history_difference;
-
-    if (left_branch)
-      SVN_ERR(svn_branch__state_get_merge_ancestor(
-                left_branch, &left_merge_history, scratch_pool));
-    if (right_branch)
-      SVN_ERR(svn_branch__state_get_merge_ancestor(
-                right_branch, &right_merge_history, scratch_pool));
-    SVN_ERR(merge_history_diff(&merge_history_difference,
-              left_merge_history, right_merge_history, scratch_pool));
-    if (merge_history_difference)
-      {
-        SVN_ERR(svn_branch__state_add_merge_ancestor(
-                  edit_branch, right_merge_history, scratch_pool));
-      }
-  }
-
   return SVN_NO_ERROR;
 }
 
@@ -993,13 +1089,17 @@ update_wc_base_r(svnmover_wc_t *wc,
             {
               const char *new_branch_id
                 = svn_branch__id_nest(base_branch->bid, eid, scratch_pool);
+              svn_branch__history_t *history;
 
               SVN_ERR(svn_branch__txn_open_branch(base_branch->txn,
                                                   &base_subbranch,
-                                                  work_subbranch->predecessor,
                                                   new_branch_id,
                                                   svn_branch__root_eid(work_subbranch),
                                                   scratch_pool, scratch_pool));
+              SVN_ERR(svn_branch__state_get_history(
+                        work_subbranch, &history, scratch_pool));
+              SVN_ERR(svn_branch__state_set_history(
+                        base_subbranch, history, scratch_pool));
             }
           SVN_ERR(update_wc_base_r(wc, base_subbranch, work_subbranch,
                                    new_rev, scratch_pool));
@@ -1785,7 +1885,7 @@ do_merge(svnmover_wc_t *wc,
          svn_branch__el_rev_id_t *yca,
          apr_pool_t *scratch_pool)
 {
-  svn_branch__rev_bid_t *new_ancestor;
+  svn_branch__history_t *history;
 
   if (src->eid != tgt->eid || src->eid != yca->eid)
     {
@@ -1799,15 +1899,15 @@ do_merge(svnmover_wc_t *wc,
                                 src, tgt, yca,
                                 wc->pool, scratch_pool));
 
-  /* Update the merge history */
+  /* Update the history */
+  SVN_ERR(svn_branch__state_get_history(tgt->branch, &history, scratch_pool));
   /* ### Assume this was a complete merge -- i.e. all changes up to YCA were
-     previously merged, so now SRC is a new ancestor. */
-  new_ancestor = svn_branch__rev_bid_create(src->rev, src->branch->bid,
-                                              scratch_pool);
-  SVN_ERR(svn_branch__state_add_merge_ancestor(wc->working->branch, new_ancestor,
-                                               scratch_pool));
-  svnmover_notify_v(_("--- recorded merge ancestor as: %ld.%s"),
-                    new_ancestor->rev, new_ancestor->bid);
+     previously merged, so now SRC is a new parent. */
+  SVN_ERR(svn_branch__history_add_parent(history, src->rev, src->branch->bid,
+                                         scratch_pool));
+  SVN_ERR(svn_branch__state_set_history(tgt->branch, history, scratch_pool));
+  svnmover_notify_v(_("--- recorded merge parent as: %ld.%s"),
+                    src->rev, src->branch->bid);
 
   if (svnmover_any_conflicts(wc->conflicts))
     {
@@ -1827,8 +1927,10 @@ do_auto_merge(svnmover_wc_t *wc,
 {
   svn_branch__rev_bid_t *yca;
 
-  SVN_ERR(svn_branch__state_get_merge_ancestor(tgt->branch, &yca,
-                                               scratch_pool));
+  /* Find the Youngest Common Ancestor.
+     ### TODO */
+  yca = NULL;
+
   if (yca)
     {
       svn_branch__repos_t *repos = wc->working->branch->txn->repos;
@@ -1854,6 +1956,48 @@ do_auto_merge(svnmover_wc_t *wc,
   return SVN_NO_ERROR;
 }
 
+/* Show the difference in history metadata between BRANCH1 and BRANCH2.
+ *
+ * If HEADER is non-null, print *HEADER and then set *HEADER to null.
+ *
+ * BRANCH1 and/or BRANCH2 may be null.
+ */
+static svn_error_t *
+show_history_r(svn_branch__state_t *branch,
+               const char *prefix,
+               apr_pool_t *scratch_pool)
+{
+  svn_branch__history_t *history = NULL;
+  svn_branch__subtree_t *subtree = NULL;
+  apr_hash_index_t *hi;
+
+  if (! branch)
+    return SVN_NO_ERROR;
+
+  SVN_ERR(svn_branch__state_get_history(branch, &history, scratch_pool));
+  svnmover_notify("%s%s: %s", prefix,
+                  branch->bid, history_str(history, scratch_pool));
+
+  /* recurse into each subbranch */
+  SVN_ERR(svn_branch__get_subtree(branch, &subtree,
+                                  svn_branch__root_eid(branch),
+                                  scratch_pool));
+   for (hi = apr_hash_first(scratch_pool, subtree->subbranches);
+       hi; hi = apr_hash_next(hi))
+    {
+      int e = svn_eid__hash_this_key(hi);
+      svn_branch__state_t *subbranch = NULL;
+
+      SVN_ERR(svn_branch__get_subbranch_at_eid(branch, &subbranch, e,
+                                               scratch_pool));
+      if (subbranch)
+        {
+          SVN_ERR(show_history_r(subbranch, prefix, scratch_pool));
+        }
+    }
+  return SVN_NO_ERROR;
+}
+
 /*  */
 typedef struct diff_item_t
 {
@@ -2063,28 +2207,43 @@ svn_branch__diff_func_t(svn_branch__subt
  * Recurse into sub-branches.
  */
 static svn_error_t *
-subtree_diff_r(svn_branch__subtree_t *left,
-               const char *left_bid,
-               const char *left_rrpath,
-               svn_branch__subtree_t *right,
-               const char *right_bid,
-               const char *right_rrpath,
+subtree_diff_r(svn_branch__state_t *left_branch,
+               int left_root_eid,
+               svn_branch__state_t *right_branch,
+               int right_root_eid,
                svn_branch__diff_func_t diff_func,
                const char *prefix,
                apr_pool_t *scratch_pool)
 {
+  svn_branch__subtree_t *left = NULL;
+  svn_branch__subtree_t *right = NULL;
   const char *left_str
-    = left ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
-                          left_bid, left->tree->root_eid, left_rrpath)
-           : NULL;
+    = left_branch
+        ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
+                       left_branch->bid, left_root_eid,
+                       svn_branch__get_root_rrpath(left_branch, scratch_pool))
+        : NULL;
   const char *right_str
-    = right ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
-                           right_bid, right->tree->root_eid, right_rrpath)
+    = right_branch
+        ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
+                       right_branch->bid, right_root_eid,
+                       svn_branch__get_root_rrpath(right_branch, scratch_pool))
             : NULL;
   const char *header;
   apr_hash_t *subbranches_l, *subbranches_r, *subbranches_all;
   apr_hash_index_t *hi;
 
+  if (left_branch)
+    {
+      SVN_ERR(svn_branch__get_subtree(left_branch, &left, left_root_eid,
+                                      scratch_pool));
+    }
+  if (right_branch)
+    {
+      SVN_ERR(svn_branch__get_subtree(right_branch, &right, right_root_eid,
+                                      scratch_pool));
+    }
+
   if (!left)
     {
       header = apr_psprintf(scratch_pool,
@@ -2113,7 +2272,7 @@ subtree_diff_r(svn_branch__subtree_t *le
                      scratch_pool, "--- diff branch %s : %s",
                      left_str, right_str);
         }
-      SVN_ERR(diff_func(left, left_bid, right, right_bid,
+      SVN_ERR(diff_func(left, left_branch->bid, right, right_branch->bid,
                         prefix, header,
                         scratch_pool));
     }
@@ -2127,41 +2286,30 @@ subtree_diff_r(svn_branch__subtree_t *le
        hi; hi = apr_hash_next(hi))
     {
       int e = svn_eid__hash_this_key(hi);
-      svn_branch__subtree_t *sub_left = NULL, *sub_right = NULL;
-      const char *sub_left_bid = NULL, *sub_right_bid = NULL;
-      const char *sub_left_rrpath = NULL, *sub_right_rrpath = NULL;
+      svn_branch__state_t *left_subbranch = NULL, *right_subbranch = NULL;
+      int left_subbranch_eid = -1, right_subbranch_eid = -1;
 
       /* recurse */
-      if (left)
+      if (left_branch)
         {
-          sub_left = svn_branch__subtree_get_subbranch_at_eid(left, e,
-                                                              scratch_pool);
-          if (sub_left)
+          SVN_ERR(svn_branch__get_subbranch_at_eid(left_branch, &left_subbranch, e,
+                                                   scratch_pool));
+          if (left_subbranch)
             {
-              const char *relpath
-                = svn_element__tree_get_path_by_eid(left->tree, e, scratch_pool);
-
-              sub_left_bid = svn_branch__id_nest(left_bid, e, scratch_pool);
-              sub_left_rrpath = svn_relpath_join(left_rrpath, relpath,
-                                                 scratch_pool);
+              left_subbranch_eid = svn_branch__root_eid(left_subbranch);
             }
         }
-      if (right)
+      if (right_branch)
         {
-          sub_right = svn_branch__subtree_get_subbranch_at_eid(right, e,
-                                                               scratch_pool);
-          if (sub_right)
+          SVN_ERR(svn_branch__get_subbranch_at_eid(right_branch, &right_subbranch, e,
+                                                   scratch_pool));
+          if (right_subbranch)
             {
-              const char *relpath
-                = svn_element__tree_get_path_by_eid(right->tree, e, scratch_pool);
-
-              sub_right_bid = svn_branch__id_nest(right_bid, e, scratch_pool);
-              sub_right_rrpath = svn_relpath_join(right_rrpath, relpath,
-                                                  scratch_pool);
+              right_subbranch_eid = svn_branch__root_eid(right_subbranch);
             }
         }
-      SVN_ERR(subtree_diff_r(sub_left, sub_left_bid, sub_left_rrpath,
-                             sub_right, sub_right_bid, sub_right_rrpath,
+      SVN_ERR(subtree_diff_r(left_subbranch, left_subbranch_eid,
+                             right_subbranch, right_subbranch_eid,
                              diff_func, prefix, scratch_pool));
     }
   return SVN_NO_ERROR;
@@ -2178,34 +2326,8 @@ branch_diff_r(svn_branch__el_rev_id_t *l
               const char *prefix,
               apr_pool_t *scratch_pool)
 {
-  svn_branch__rev_bid_t *merge_history1, *merge_history2;
-  const char *merge_history_difference;
-  svn_branch__subtree_t *s_left;
-  svn_branch__subtree_t *s_right;
-
-  /* ### This should be done for each branch, e.g. in subtree_diff_r(). */
-  /* ### This notification should start with a '--- diff branch ...' line. */
-  SVN_ERR(svn_branch__state_get_merge_ancestor(left->branch, &merge_history1,
-                                               scratch_pool));
-  SVN_ERR(svn_branch__state_get_merge_ancestor(right->branch, &merge_history2,
-                                               scratch_pool));
-  SVN_ERR(merge_history_diff(&merge_history_difference,
-                             merge_history1, merge_history2, scratch_pool));
-  if (merge_history_difference)
-    svnmover_notify("%s--- merge history is different: %s", prefix,
-                    merge_history_difference);
-
-  SVN_ERR(svn_branch__get_subtree(left->branch, &s_left, left->eid,
-                                  scratch_pool));
-  SVN_ERR(svn_branch__get_subtree(right->branch, &s_right, right->eid,
-                                  scratch_pool));
-
-  SVN_ERR(subtree_diff_r(s_left,
-                         svn_branch__get_id(left->branch, scratch_pool),
-                         svn_branch__get_root_rrpath(left->branch, scratch_pool),
-                         s_right,
-                         svn_branch__get_id(right->branch, scratch_pool),
-                         svn_branch__get_root_rrpath(right->branch, scratch_pool),
+  SVN_ERR(subtree_diff_r(left->branch, left->eid,
+                         right->branch, right->eid,
                          diff_func, prefix, scratch_pool));
   return SVN_NO_ERROR;
 }
@@ -2387,8 +2509,40 @@ do_cat(svn_branch__el_rev_id_t *file_el_
   return SVN_NO_ERROR;
 }
 
+/* Find the main parent of branch-state BRANCH. That means:
+ *   - the only parent (in the case of straight history or branching), else
+ *   - the parent with the same branch id (in the case of normal merging), else
+ *   - none (in the case of a new unrelated branch, or a new branch formed
+ *     by merging two or more other branches).
+ */
+static svn_error_t *
+find_branch_main_parent(svn_branch__state_t *branch,
+                        svn_branch__rev_bid_t **predecessor_p,
+                        apr_pool_t *result_pool)
+{
+  svn_branch__history_t *history;
+  svn_branch__rev_bid_t *our_own_history;
+  svn_branch__rev_bid_t *predecessor = NULL;
+
+  SVN_ERR(svn_branch__state_get_history(branch, &history, result_pool));
+  if (apr_hash_count(history->parents) == 1)
+    {
+      apr_hash_index_t *hi = apr_hash_first(result_pool, history->parents);
+
+      predecessor = apr_hash_this_val(hi);
+    }
+  else if ((our_own_history = svn_hash_gets(history->parents, branch->bid)))
+    {
+      predecessor = our_own_history;
+    }
+
+  if (predecessor_p)
+    *predecessor_p = predecessor;
+  return SVN_NO_ERROR;
+}
+
 /* Set *NEW_EL_REV_P to the location where OLD_EL_REV was in the previous
- * revision. Branching is followed.
+ * revision. Follow the "main line" of any branching in its history.
  *
  * If the same EID...
  */
@@ -2398,24 +2552,17 @@ svn_branch__find_predecessor_el_rev(svn_
                                     apr_pool_t *result_pool)
 {
   const svn_branch__repos_t *repos = old_el_rev->branch->txn->repos;
-  svn_branch__rev_bid_t *predecessor = old_el_rev->branch->predecessor;
+  svn_branch__rev_bid_t *predecessor;
   svn_branch__state_t *branch;
 
+  SVN_ERR(find_branch_main_parent(old_el_rev->branch,
+                                  &predecessor, result_pool));
   if (! predecessor)
     {
       *new_el_rev_p = NULL;
       return SVN_NO_ERROR;
     }
 
-  /* A predecessor can point at another branch within the same revision.
-     We don't want that result, so iterate until we find another revision. */
-  while (predecessor->rev == old_el_rev->rev)
-    {
-      branch = svn_branch__txn_get_branch_by_id(
-                 old_el_rev->branch->txn, predecessor->bid, result_pool);
-      predecessor = branch->predecessor;
-    }
-
   SVN_ERR(svn_branch__repos_get_branch_by_id(&branch,
                                              repos, predecessor->rev,
                                              predecessor->bid, result_pool));
@@ -2444,6 +2591,8 @@ do_log(svn_branch__el_rev_id_t *left,
 
       svnmover_notify(SVN_CL__LOG_SEP_STRING "r%ld | ...",
                       right->rev);
+      svnmover_notify("History:");
+      SVN_ERR(show_history_r(right->branch, "   ", scratch_pool));
       svnmover_notify("Changed elements:");
       SVN_ERR(branch_diff_r(el_rev_left, right,
                             show_subtree_diff, "   ",
@@ -2484,8 +2633,8 @@ do_mkbranch(const char **new_branch_id_p
   new_branch_id = svn_branch__id_nest(outer_branch_id, new_outer_eid,
                                       scratch_pool);
   SVN_ERR(svn_branch__txn_open_branch(txn, &new_branch,
-                                      NULL /*predecessor*/, new_branch_id,
-                                      new_inner_eid, scratch_pool, scratch_pool));
+                                      new_branch_id, new_inner_eid,
+                                      scratch_pool, scratch_pool));
   SVN_ERR(svn_branch__state_alter_one(new_branch, new_inner_eid,
                                       -1, "", payload, scratch_pool));
 
@@ -2524,6 +2673,7 @@ do_branch(svn_branch__state_t **new_bran
   int to_outer_eid;
   const char *new_branch_id;
   svn_branch__state_t *new_branch;
+  svn_branch__history_t *history;
   const char *to_path
     = branch_peid_name_to_path(to_outer_branch,
                                to_outer_parent_eid, new_name, scratch_pool);
@@ -2536,6 +2686,10 @@ do_branch(svn_branch__state_t **new_bran
   SVN_ERR(svn_branch__txn_branch(txn, &new_branch,
                                  from, new_branch_id,
                                  result_pool, scratch_pool));
+  history = svn_branch__history_create_empty(scratch_pool);
+  SVN_ERR(svn_branch__history_add_parent(history, from->rev, from->bid,
+                                         scratch_pool));
+  SVN_ERR(svn_branch__state_set_history(new_branch, history, scratch_pool));
   SVN_ERR(svn_branch__state_alter_one(to_outer_branch, to_outer_eid,
                                       to_outer_parent_eid, new_name,
                                       svn_element__payload_create_subbranch(
@@ -3072,6 +3226,45 @@ do_migrate(svnmover_wc_t *wc,
   return SVN_NO_ERROR;
 }
 
+static svn_error_t *
+show_branch_history(svn_branch__state_t *branch,
+                    apr_pool_t *scratch_pool)
+{
+  svn_branch__history_t *history;
+  svn_branch__rev_bid_t *main_parent;
+  apr_hash_index_t *hi;
+
+  SVN_ERR(svn_branch__state_get_history(branch, &history, scratch_pool));
+
+  SVN_ERR(find_branch_main_parent(branch, &main_parent, scratch_pool));
+  if (main_parent)
+    {
+      if (strcmp(main_parent->bid, branch->bid) == 0)
+        {
+          svnmover_notify("  main parent: r%ld.%s",
+                          main_parent->rev, main_parent->bid);
+        }
+      else
+        {
+          svnmover_notify("  main parent (branched from): r%ld.%s",
+                          main_parent->rev, main_parent->bid);
+        }
+    }
+  for (hi = apr_hash_first(scratch_pool, history->parents);
+       hi; hi = apr_hash_next(hi))
+    {
+      svn_branch__rev_bid_t *parent = apr_hash_this_val(hi);
+
+      if (! svn_branch__rev_bid_equal(parent, main_parent))
+        {
+          svnmover_notify("  other parent (complete merge): r%ld.%s",
+                          parent->rev, parent->bid);
+        }
+    }
+
+  return SVN_NO_ERROR;
+}
+
 /* Show info about element E.
  *
  * TODO: Show different info for a repo element versus a WC element.
@@ -3260,14 +3453,11 @@ execute(svnmover_wc_t *wc,
           {
             svn_boolean_t is_modified;
             svn_revnum_t base_rev_min, base_rev_max;
-            svn_branch__rev_bid_t *merge_ancestor;
 
             SVN_ERR(txn_is_changed(wc->working->branch->txn, &is_modified,
                                    iterpool));
             SVN_ERR(svnmover_wc_get_base_revs(wc, &base_rev_min, &base_rev_max,
                                               iterpool));
-            SVN_ERR(svn_branch__state_get_merge_ancestor(
-                      wc->working->branch, &merge_ancestor, iterpool));
 
             svnmover_notify("Repository Root: %s", wc->repos_root_url);
             if (base_rev_min == base_rev_max)
@@ -3276,16 +3466,9 @@ execute(svnmover_wc_t *wc,
               svnmover_notify("Base Revisions: %ld to %ld",
                               base_rev_min, base_rev_max);
             svnmover_notify("Base Branch:    %s", wc->base->branch->bid);
-            SVN_ERR(svn_branch__state_get_merge_ancestor(
-                      wc->base->branch, &merge_ancestor, iterpool));
-            if (merge_ancestor)
-              svnmover_notify("  merge ancestor: %ld.%s",
-                              merge_ancestor->rev, merge_ancestor->bid);
             svnmover_notify("Working Branch: %s", wc->working->branch->bid);
+            SVN_ERR(show_branch_history(wc->working->branch, iterpool));
             svnmover_notify("Modified:       %s", is_modified ? "yes" : "no");
-            if (merge_ancestor)
-              svnmover_notify("  merge ancestor: %ld.%s",
-                              merge_ancestor->rev, merge_ancestor->bid);
           }
           break;
 

Modified: subversion/branches/ra-git/tools/dist/security/_gnupg.py
URL: http://svn.apache.org/viewvc/subversion/branches/ra-git/tools/dist/security/_gnupg.py?rev=1719652&r1=1719651&r2=1719652&view=diff
==============================================================================
--- subversion/branches/ra-git/tools/dist/security/_gnupg.py (original)
+++ subversion/branches/ra-git/tools/dist/security/_gnupg.py Sat Dec 12 11:15:19 2015
@@ -1,29 +1,3 @@
-# Copyright (c) 2008-2011 by Vinay Sajip.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-#     * Redistributions of source code must retain the above copyright notice,
-#       this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above copyright notice,
-#       this list of conditions and the following disclaimer in the documentation
-#       and/or other materials provided with the distribution.
-#     * The name(s) of the copyright holder(s) may not be used to endorse or
-#       promote products derived from this software without specific prior
-#       written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) "AS IS" AND ANY EXPRESS OR
-# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-# EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
 """ A wrapper for the 'gpg' command::
 
 Portions of this module are derived from A.M. Kuchling's well-designed
@@ -53,14 +27,14 @@ Vinay Sajip to make use of the subproces
 and so does not work on Windows). Renamed to gnupg.py to avoid confusion with
 the previous versions.
 
-Modifications Copyright (C) 2008-2011 Vinay Sajip. All rights reserved.
+Modifications Copyright (C) 2008-2014 Vinay Sajip. All rights reserved.
 
 A unittest harness (test_gnupg.py) has also been added.
 """
-import locale
 
+__version__ = "0.3.8.dev0"
 __author__ = "Vinay Sajip"
-__date__  = "$02-Sep-2011 13:18:12$"
+__date__  = "$07-Dec-2014 18:46:17$"
 
 try:
     from io import StringIO
@@ -71,12 +45,20 @@ import codecs
 import locale
 import logging
 import os
+import re
 import socket
 from subprocess import Popen
 from subprocess import PIPE
 import sys
 import threading
 
+STARTUPINFO = None
+if os.name == 'nt':
+    try:
+        from subprocess import STARTUPINFO, STARTF_USESHOWWINDOW, SW_HIDE
+    except ImportError:
+        STARTUPINFO = None
+
 try:
     import logging.NullHandler as NullHandler
 except ImportError:
@@ -86,13 +68,61 @@ except ImportError:
 try:
     unicode
     _py3k = False
+    string_types = basestring
+    text_type = unicode
 except NameError:
     _py3k = True
+    string_types = str
+    text_type = str
 
 logger = logging.getLogger(__name__)
 if not logger.handlers:
     logger.addHandler(NullHandler())
 
+# We use the test below because it works for Jython as well as CPython
+if os.path.__name__ == 'ntpath':
+    # On Windows, we don't need shell quoting, other than worrying about
+    # paths with spaces in them.
+    def shell_quote(s):
+        return '"%s"' % s
+else:
+    # Section copied from sarge
+
+    # This regex determines which shell input needs quoting
+    # because it may be unsafe
+    UNSAFE = re.compile(r'[^\w%+,./:=@-]')
+
+    def shell_quote(s):
+        """
+        Quote text so that it is safe for Posix command shells.
+
+        For example, "*.py" would be converted to "'*.py'". If the text is
+        considered safe it is returned unquoted.
+
+        :param s: The value to quote
+        :type s: str (or unicode on 2.x)
+        :return: A safe version of the input, from the point of view of Posix
+                 command shells
+        :rtype: The passed-in type
+        """
+        if not isinstance(s, string_types):
+            raise TypeError('Expected string type, got %s' % type(s))
+        if not s:
+            result = "''"
+        elif not UNSAFE.search(s):
+            result = s
+        else:
+            result = "'%s'" % s.replace("'", r"'\''")
+        return result
+
+    # end of sarge code
+
+# Now that we use shell=False, we shouldn't need to quote arguments.
+# Use no_quote instead of shell_quote to remind us of where quoting
+# was needed.
+def no_quote(s):
+    return s
+
 def _copy_data(instream, outstream):
     # Copy one stream to another
     sent = 0
@@ -102,7 +132,7 @@ def _copy_data(instream, outstream):
         enc = 'ascii'
     while True:
         data = instream.read(1024)
-        if len(data) == 0:
+        if not data:
             break
         sent += len(data)
         logger.debug("sending chunk (%d): %r", sent, data[:256])
@@ -132,34 +162,58 @@ def _write_passphrase(stream, passphrase
     passphrase = '%s\n' % passphrase
     passphrase = passphrase.encode(encoding)
     stream.write(passphrase)
-    logger.debug("Wrote passphrase: %r", passphrase)
+    logger.debug('Wrote passphrase')
 
 def _is_sequence(instance):
-    return isinstance(instance,list) or isinstance(instance,tuple)
+    return isinstance(instance, (list, tuple, set, frozenset))
 
-def _make_binary_stream(s, encoding):
+def _make_memory_stream(s):
     try:
-        if _py3k:
-            if isinstance(s, str):
-                s = s.encode(encoding)
-        else:
-            if type(s) is not str:
-                s = s.encode(encoding)
         from io import BytesIO
         rv = BytesIO(s)
     except ImportError:
         rv = StringIO(s)
     return rv
 
+def _make_binary_stream(s, encoding):
+    if _py3k:
+        if isinstance(s, str):
+            s = s.encode(encoding)
+    else:
+        if type(s) is not str:
+            s = s.encode(encoding)
+    return _make_memory_stream(s)
+
 class Verify(object):
     "Handle status messages for --verify"
 
+    TRUST_UNDEFINED = 0
+    TRUST_NEVER = 1
+    TRUST_MARGINAL = 2
+    TRUST_FULLY = 3
+    TRUST_ULTIMATE = 4
+
+    TRUST_LEVELS = {
+        "TRUST_UNDEFINED" : TRUST_UNDEFINED,
+        "TRUST_NEVER" : TRUST_NEVER,
+        "TRUST_MARGINAL" : TRUST_MARGINAL,
+        "TRUST_FULLY" : TRUST_FULLY,
+        "TRUST_ULTIMATE" : TRUST_ULTIMATE,
+    }
+
     def __init__(self, gpg):
         self.gpg = gpg
         self.valid = False
         self.fingerprint = self.creation_date = self.timestamp = None
         self.signature_id = self.key_id = None
         self.username = None
+        self.key_status = None
+        self.status = None
+        self.pubkey_fingerprint = None
+        self.expire_timestamp = None
+        self.sig_timestamp = None
+        self.trust_text = None
+        self.trust_level = None
 
     def __nonzero__(self):
         return self.valid
@@ -167,14 +221,31 @@ class Verify(object):
     __bool__ = __nonzero__
 
     def handle_status(self, key, value):
-        if key in ("TRUST_UNDEFINED", "TRUST_NEVER", "TRUST_MARGINAL",
-                   "TRUST_FULLY", "TRUST_ULTIMATE", "RSA_OR_IDEA", "NODATA",
-                   "IMPORT_RES", "PLAINTEXT", "PLAINTEXT_LENGTH"):
+        if key in self.TRUST_LEVELS:
+            self.trust_text = key
+            self.trust_level = self.TRUST_LEVELS[key]
+        elif key in ("RSA_OR_IDEA", "NODATA", "IMPORT_RES", "PLAINTEXT",
+                     "PLAINTEXT_LENGTH", "POLICY_URL", "DECRYPTION_INFO",
+                     "DECRYPTION_OKAY", "INV_SGNR", "FILE_START", "FILE_ERROR",
+                     "FILE_DONE", "PKA_TRUST_GOOD", "PKA_TRUST_BAD", "BADMDC",
+                     "GOODMDC", "NO_SGNR", "NOTATION_NAME", "NOTATION_DATA",
+                     "PROGRESS", "PINENTRY_LAUNCHED", "NEWSIG"):
             pass
         elif key == "BADSIG":
             self.valid = False
             self.status = 'signature bad'
             self.key_id, self.username = value.split(None, 1)
+        elif key == "ERRSIG":
+            self.valid = False
+            (self.key_id,
+             algo, hash_algo,
+             cls,
+             self.timestamp) = value.split()[:5]
+            self.status = 'signature error'
+        elif key == "EXPSIG":
+            self.valid = False
+            self.status = 'signature expired'
+            self.key_id, self.username = value.split(None, 1)
         elif key == "GOODSIG":
             self.valid = True
             self.status = 'signature good'
@@ -190,28 +261,33 @@ class Verify(object):
         elif key == "SIG_ID":
             (self.signature_id,
              self.creation_date, self.timestamp) = value.split()
-        elif key == "ERRSIG":
+        elif key == "DECRYPTION_FAILED":
             self.valid = False
-            (self.key_id,
-             algo, hash_algo,
-             cls,
-             self.timestamp) = value.split()[:5]
-            self.status = 'signature error'
+            self.key_id = value
+            self.status = 'decryption failed'
         elif key == "NO_PUBKEY":
             self.valid = False
             self.key_id = value
             self.status = 'no public key'
-        elif key in ("KEYEXPIRED", "SIGEXPIRED"):
+        elif key in ("KEYEXPIRED", "SIGEXPIRED", "KEYREVOKED"):
             # these are useless in verify, since they are spit out for any
             # pub/subkeys on the key, not just the one doing the signing.
             # if we want to check for signatures with expired key,
-            # the relevant flag is EXPKEYSIG.
+            # the relevant flag is EXPKEYSIG or REVKEYSIG.
             pass
         elif key in ("EXPKEYSIG", "REVKEYSIG"):
             # signed with expired or revoked key
             self.valid = False
             self.key_id = value.split()[0]
-            self.status = (('%s %s') % (key[:3], key[3:])).lower()
+            if key == "EXPKEYSIG":
+                self.key_status = 'signing key has expired'
+            else:
+                self.key_status = 'signing key was revoked'
+            self.status = self.key_status
+        elif key == "UNEXPECTED":
+            self.valid = False
+            self.key_id = value
+            self.status = 'unexpected data'
         else:
             raise ValueError("Unknown status message: %r" % key)
 
@@ -280,8 +356,8 @@ class ImportResult(object):
                 'problem': reason, 'text': self.problem_reason[reason]})
         elif key == "IMPORT_RES":
             import_res = value.split()
-            for i in range(len(self.counts)):
-                setattr(self, self.counts[i], int(import_res[i]))
+            for i, count in enumerate(self.counts):
+                setattr(self, count, int(import_res[i]))
         elif key == "KEYEXPIRED":
             self.results.append({'fingerprint': None,
                 'problem': '0', 'text': 'Key expired'})
@@ -293,12 +369,68 @@ class ImportResult(object):
 
     def summary(self):
         l = []
-        l.append('%d imported'%self.imported)
+        l.append('%d imported' % self.imported)
         if self.not_imported:
-            l.append('%d not imported'%self.not_imported)
+            l.append('%d not imported' % self.not_imported)
         return ', '.join(l)
 
-class ListKeys(list):
+ESCAPE_PATTERN = re.compile(r'\\x([0-9a-f][0-9a-f])', re.I)
+BASIC_ESCAPES = {
+    r'\n': '\n',
+    r'\r': '\r',
+    r'\f': '\f',
+    r'\v': '\v',
+    r'\b': '\b',
+    r'\0': '\0',
+}
+
+class SendResult(object):
+    def __init__(self, gpg):
+        self.gpg = gpg
+
+    def handle_status(self, key, value):
+        logger.debug('SendResult: %s: %s', key, value)
+
+class SearchKeys(list):
+    ''' Handle status messages for --search-keys.
+
+        Handle pub and uid (relating the latter to the former).
+
+        Don't care about the rest
+    '''
+
+    UID_INDEX = 1
+    FIELDS = 'type keyid algo length date expires'.split()
+
+    def __init__(self, gpg):
+        self.gpg = gpg
+        self.curkey = None
+        self.fingerprints = []
+        self.uids = []
+
+    def get_fields(self, args):
+        result = {}
+        for i, var in enumerate(self.FIELDS):
+            result[var] = args[i]
+        result['uids'] = []
+        return result
+
+    def pub(self, args):
+        self.curkey = curkey = self.get_fields(args)
+        self.append(curkey)
+
+    def uid(self, args):
+        uid = args[self.UID_INDEX]
+        uid = ESCAPE_PATTERN.sub(lambda m: chr(int(m.group(1), 16)), uid)
+        for k, v in BASIC_ESCAPES.items():
+            uid = uid.replace(k, v)
+        self.curkey['uids'].append(uid)
+        self.uids.append(uid)
+
+    def handle_status(self, key, value):
+        pass
+
+class ListKeys(SearchKeys):
     ''' Handle status messages for --list-keys.
 
         Handle pub and uid (relating the latter to the former).
@@ -307,7 +439,6 @@ class ListKeys(list):
 
         crt = X.509 certificate
         crs = X.509 certificate and private key available
-        sub = subkey (secondary key)
         ssb = secret subkey (secondary key)
         uat = user attribute (same as user id except for field 10).
         sig = signature
@@ -316,24 +447,17 @@ class ListKeys(list):
         grp = reserved for gpgsm
         rvk = revocation key
     '''
-    def __init__(self, gpg):
-        self.gpg = gpg
-        self.curkey = None
-        self.fingerprints = []
-        self.uids = []
+
+    UID_INDEX = 9
+    FIELDS = 'type trust length algo keyid date expires dummy ownertrust uid'.split()
 
     def key(self, args):
-        vars = ("""
-            type trust length algo keyid date expires dummy ownertrust uid
-        """).split()
-        self.curkey = {}
-        for i in range(len(vars)):
-            self.curkey[vars[i]] = args[i]
-        self.curkey['uids'] = []
-        if self.curkey['uid']:
-            self.curkey['uids'].append(self.curkey['uid'])
-        del self.curkey['uid']
-        self.append(self.curkey)
+        self.curkey = curkey = self.get_fields(args)
+        if curkey['uid']:
+            curkey['uids'].append(curkey['uid'])
+        del curkey['uid']
+        curkey['subkeys'] = []
+        self.append(curkey)
 
     pub = sec = key
 
@@ -341,14 +465,34 @@ class ListKeys(list):
         self.curkey['fingerprint'] = args[9]
         self.fingerprints.append(args[9])
 
-    def uid(self, args):
-        self.curkey['uids'].append(args[9])
-        self.uids.append(args[9])
+    def sub(self, args):
+        subkey = [args[4], args[11]]
+        self.curkey['subkeys'].append(subkey)
+
+
+class ScanKeys(ListKeys):
+    ''' Handle status messages for --with-fingerprint.'''
+
+    def sub(self, args):
+        # --with-fingerprint --with-colons somehow outputs fewer colons,
+        # use the last value args[-1] instead of args[11]
+        subkey = [args[4], args[-1]]
+        self.curkey['subkeys'].append(subkey)
 
-    def handle_status(self, key, value):
-        pass
+class TextHandler(object):
+    def _as_text(self):
+        return self.data.decode(self.gpg.encoding, self.gpg.decode_errors)
+
+    if _py3k:
+        __str__ = _as_text
+    else:
+        __unicode__ = _as_text
+
+        def __str__(self):
+            return self.data
 
-class Crypt(Verify):
+
+class Crypt(Verify, TextHandler):
     "Handle status messages for --encrypt and --decrypt"
     def __init__(self, gpg):
         Verify.__init__(self, gpg)
@@ -362,18 +506,17 @@ class Crypt(Verify):
 
     __bool__ = __nonzero__
 
-    def __str__(self):
-        return self.data.decode(self.gpg.encoding, self.gpg.decode_errors)
-
     def handle_status(self, key, value):
         if key in ("ENC_TO", "USERID_HINT", "GOODMDC", "END_DECRYPTION",
-                   "BEGIN_SIGNING", "NO_SECKEY", "ERROR", "NODATA"):
+                   "BEGIN_SIGNING", "NO_SECKEY", "ERROR", "NODATA", "PROGRESS",
+                   "CARDCTRL", "BADMDC", "SC_OP_FAILURE", "SC_OP_SUCCESS",
+                   "PINENTRY_LAUNCHED"):
             # in the case of ERROR, this is because a more specific error
             # message will have come first
             pass
         elif key in ("NEED_PASSPHRASE", "BAD_PASSPHRASE", "GOOD_PASSPHRASE",
                      "MISSING_PASSPHRASE", "DECRYPTION_FAILED",
-                     "KEY_NOT_CREATED"):
+                     "KEY_NOT_CREATED", "NEED_PASSPHRASE_PIN"):
             self.status = key.replace("_", " ").lower()
         elif key == "NEED_PASSPHRASE_SYM":
             self.status = 'need symmetric passphrase'
@@ -415,7 +558,8 @@ class GenKey(object):
         return self.fingerprint or ''
 
     def handle_status(self, key, value):
-        if key in ("PROGRESS", "GOOD_PASSPHRASE", "NODATA"):
+        if key in ("PROGRESS", "GOOD_PASSPHRASE", "NODATA", "KEY_NOT_CREATED",
+                   "PINENTRY_LAUNCHED"):
             pass
         elif key == "KEY_CREATED":
             (self.type,self.fingerprint) = value.split()
@@ -434,7 +578,7 @@ class DeleteResult(object):
     problem_reason = {
         '1': 'No such key',
         '2': 'Must delete secret key first',
-        '3': 'Ambigious specification',
+        '3': 'Ambiguous specification',
     }
 
     def handle_status(self, key, value):
@@ -444,11 +588,18 @@ class DeleteResult(object):
         else:
             raise ValueError("Unknown status message: %r" % key)
 
-class Sign(object):
+    def __nonzero__(self):
+        return self.status == 'ok'
+
+    __bool__ = __nonzero__
+
+
+class Sign(TextHandler):
     "Handle status messages for --sign"
     def __init__(self, gpg):
         self.gpg = gpg
         self.type = None
+        self.hash_algo = None
         self.fingerprint = None
 
     def __nonzero__(self):
@@ -456,21 +607,26 @@ class Sign(object):
 
     __bool__ = __nonzero__
 
-    def __str__(self):
-        return self.data.decode(self.gpg.encoding, self.gpg.decode_errors)
-
     def handle_status(self, key, value):
         if key in ("USERID_HINT", "NEED_PASSPHRASE", "BAD_PASSPHRASE",
-                   "GOOD_PASSPHRASE", "BEGIN_SIGNING"):
+                   "GOOD_PASSPHRASE", "BEGIN_SIGNING", "CARDCTRL", "INV_SGNR",
+                   "NO_SGNR", "MISSING_PASSPHRASE", "NEED_PASSPHRASE_PIN",
+                   "SC_OP_FAILURE", "SC_OP_SUCCESS", "PROGRESS"):
             pass
+        elif key in ("KEYEXPIRED", "SIGEXPIRED"):
+            self.status = 'key expired'
+        elif key == "KEYREVOKED":
+            self.status = 'key revoked'
         elif key == "SIG_CREATED":
             (self.type,
-             algo, hashalgo, cls,
+             algo, self.hash_algo, cls,
              self.timestamp, self.fingerprint
              ) = value.split()
         else:
             raise ValueError("Unknown status message: %r" % key)
 
+VERSION_RE = re.compile(r'gpg \(GnuPG\) (\d+(\.\d+)*)'.encode('ascii'), re.I)
+HEX_DIGITS_RE = re.compile(r'[0-9a-f]+$', re.I)
 
 class GPG(object):
 
@@ -481,31 +637,54 @@ class GPG(object):
         'delete': DeleteResult,
         'generate': GenKey,
         'import': ImportResult,
+        'send': SendResult,
         'list': ListKeys,
+        'scan': ScanKeys,
+        'search': SearchKeys,
         'sign': Sign,
         'verify': Verify,
     }
 
     "Encapsulate access to the gpg executable"
     def __init__(self, gpgbinary='gpg', gnupghome=None, verbose=False,
-                 use_agent=False, keyring=None):
+                 use_agent=False, keyring=None, options=None,
+                 secret_keyring=None):
         """Initialize a GPG process wrapper.  Options are:
 
         gpgbinary -- full pathname for GPG binary.
 
         gnupghome -- full pathname to where we can find the public and
         private keyrings.  Default is whatever gpg defaults to.
-        keyring -- name of alternative keyring file to use. If specified,
-        the default keyring is not used.
+        keyring -- name of alternative keyring file to use, or list of such
+        keyrings. If specified, the default keyring is not used.
+        options =-- a list of additional options to pass to the GPG binary.
+        secret_keyring -- name of alternative secret keyring file to use, or
+        list of such keyrings.
         """
         self.gpgbinary = gpgbinary
         self.gnupghome = gnupghome
+        if keyring:
+            # Allow passing a string or another iterable. Make it uniformly
+            # a list of keyring filenames
+            if isinstance(keyring, string_types):
+                keyring = [keyring]
         self.keyring = keyring
+        if secret_keyring:
+            # Allow passing a string or another iterable. Make it uniformly
+            # a list of keyring filenames
+            if isinstance(secret_keyring, string_types):
+                secret_keyring = [secret_keyring]
+        self.secret_keyring = secret_keyring
         self.verbose = verbose
         self.use_agent = use_agent
-        self.encoding = locale.getpreferredencoding()
-        if self.encoding is None: # This happens on Jython!
-            self.encoding = sys.stdin.encoding
+        if isinstance(options, str):
+            options = [options]
+        self.options = options
+        # Changed in 0.3.7 to use Latin-1 encoding rather than
+        # locale.getpreferredencoding falling back to sys.stdin.encoding
+        # falling back to utf-8, because gpg itself uses latin-1 as the default
+        # encoding.
+        self.encoding = 'latin-1'
         if gnupghome and not os.path.isdir(self.gnupghome):
             os.makedirs(self.gnupghome,0x1C0)
         p = self._open_subprocess(["--version"])
@@ -514,25 +693,54 @@ class GPG(object):
         if p.returncode != 0:
             raise ValueError("Error invoking gpg: %s: %s" % (p.returncode,
                                                              result.stderr))
+        m = VERSION_RE.match(result.data)
+        if not m:
+            self.version = None
+        else:
+            dot = '.'.encode('ascii')
+            self.version = tuple([int(s) for s in m.groups()[0].split(dot)])
 
-    def _open_subprocess(self, args, passphrase=False):
-        # Internal method: open a pipe to a GPG subprocess and return
-        # the file objects for communicating with it.
-        cmd = [self.gpgbinary, '--status-fd 2 --no-tty']
+    def make_args(self, args, passphrase):
+        """
+        Make a list of command line elements for GPG. The value of ``args``
+        will be appended. The ``passphrase`` argument needs to be True if
+        a passphrase will be sent to GPG, else False.
+        """
+        cmd = [self.gpgbinary, '--status-fd', '2', '--no-tty']
         if self.gnupghome:
-            cmd.append('--homedir "%s" ' % self.gnupghome)
+            cmd.extend(['--homedir',  no_quote(self.gnupghome)])
         if self.keyring:
-            cmd.append('--no-default-keyring --keyring "%s" ' % self.keyring)
+            cmd.append('--no-default-keyring')
+            for fn in self.keyring:
+                cmd.extend(['--keyring', no_quote(fn)])
+        if self.secret_keyring:
+            for fn in self.secret_keyring:
+                cmd.extend(['--secret-keyring', no_quote(fn)])
         if passphrase:
-            cmd.append('--batch --passphrase-fd 0')
+            cmd.extend(['--batch', '--passphrase-fd', '0'])
         if self.use_agent:
             cmd.append('--use-agent')
+        if self.options:
+            cmd.extend(self.options)
         cmd.extend(args)
-        cmd = ' '.join(cmd)
+        return cmd
+
+    def _open_subprocess(self, args, passphrase=False):
+        # Internal method: open a pipe to a GPG subprocess and return
+        # the file objects for communicating with it.
+        cmd = self.make_args(args, passphrase)
         if self.verbose:
-            print(cmd)
+            pcmd = ' '.join(cmd)
+            print(pcmd)
         logger.debug("%s", cmd)
-        return Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+        if not STARTUPINFO:
+            si = None
+        else:
+            si = STARTUPINFO()
+            si.dwFlags = STARTF_USESHOWWINDOW
+            si.wShowWindow = SW_HIDE
+        return Popen(cmd, shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE,
+                     startupinfo=si)
 
     def _read_response(self, stream, result):
         # Internal method: reads all the stderr output from GPG, taking notice
@@ -609,7 +817,7 @@ class GPG(object):
         stderr.close()
         stdout.close()
 
-    def _handle_io(self, args, file, result, passphrase=None, binary=False):
+    def _handle_io(self, args, fileobj, result, passphrase=None, binary=False):
         "Handle a call to GPG - pass input data, collect output data"
         # Handle a basic data call - pass data to GPG, handle the output
         # including status information. Garbage In, Garbage Out :)
@@ -620,7 +828,7 @@ class GPG(object):
             stdin = p.stdin
         if passphrase:
             _write_passphrase(stdin, passphrase, self.encoding)
-        writer = _threaded_copy_data(file, stdin)
+        writer = _threaded_copy_data(fileobj, stdin)
         self._collect_output(p, result, writer, stdin)
         return result
 
@@ -634,8 +842,15 @@ class GPG(object):
         f.close()
         return result
 
+    def set_output_without_confirmation(self, args, output):
+        "If writing to a file which exists, avoid a confirmation message."
+        if os.path.exists(output):
+            # We need to avoid an overwrite confirmation message
+            args.extend(['--batch', '--yes'])
+        args.extend(['--output', output])
+
     def sign_file(self, file, keyid=None, passphrase=None, clearsign=True,
-                  detach=False, binary=False):
+                  detach=False, binary=False, output=None):
         """sign file"""
         logger.debug("sign_file: %s", file)
         if binary:
@@ -649,7 +864,10 @@ class GPG(object):
         elif clearsign:
             args.append("--clearsign")
         if keyid:
-            args.append('--default-key "%s"' % keyid)
+            args.extend(['--default-key', no_quote(keyid)])
+        if output:  # write the output to a file with the specified name
+            self.set_output_without_confirmation(args, output)
+
         result = self.result_map['sign'](self)
         #We could use _handle_io here except for the fact that if the
         #passphrase is bad, gpg bails and you can't write the message.
@@ -701,8 +919,8 @@ class GPG(object):
             logger.debug('Wrote to temp file: %r', s)
             os.write(fd, s)
             os.close(fd)
-            args.append(fn)
-            args.append('"%s"' % data_filename)
+            args.append(no_quote(fn))
+            args.append(no_quote(data_filename))
             try:
                 p = self._open_subprocess(args)
                 self._collect_output(p, result, stdin=p.stdin)
@@ -710,6 +928,15 @@ class GPG(object):
                 os.unlink(fn)
         return result
 
+    def verify_data(self, sig_filename, data):
+        "Verify the signature in sig_filename against data in memory"
+        logger.debug('verify_data: %r, %r ...', sig_filename, data[:16])
+        result = self.result_map['verify'](self)
+        args = ['--verify', no_quote(sig_filename), '-']
+        stream = _make_memory_stream(data)
+        self._handle_io(args, stream, result, binary=True)
+        return result
+
     #
     # KEY MANAGEMENT
     #
@@ -773,7 +1000,8 @@ class GPG(object):
         >>> import shutil
         >>> shutil.rmtree("keys")
         >>> gpg = GPG(gnupghome="keys")
-        >>> result = gpg.recv_keys('pgp.mit.edu', '3FF0DB166A7476EA')
+        >>> os.chmod('keys', 0x1C0)
+        >>> result = gpg.recv_keys('keyserver.ubuntu.com', '92905378')
         >>> assert result
 
         """
@@ -781,33 +1009,60 @@ class GPG(object):
         logger.debug('recv_keys: %r', keyids)
         data = _make_binary_stream("", self.encoding)
         #data = ""
-        args = ['--keyserver', keyserver, '--recv-keys']
-        args.extend(keyids)
+        args = ['--keyserver', no_quote(keyserver), '--recv-keys']
+        args.extend([no_quote(k) for k in keyids])
         self._handle_io(args, data, result, binary=True)
         logger.debug('recv_keys result: %r', result.__dict__)
         data.close()
         return result
 
+    def send_keys(self, keyserver, *keyids):
+        """Send a key to a keyserver.
+
+        Note: it's not practical to test this function without sending
+        arbitrary data to live keyservers.
+        """
+        result = self.result_map['send'](self)
+        logger.debug('send_keys: %r', keyids)
+        data = _make_binary_stream('', self.encoding)
+        #data = ""
+        args = ['--keyserver', no_quote(keyserver), '--send-keys']
+        args.extend([no_quote(k) for k in keyids])
+        self._handle_io(args, data, result, binary=True)
+        logger.debug('send_keys result: %r', result.__dict__)
+        data.close()
+        return result
+
     def delete_keys(self, fingerprints, secret=False):
         which='key'
         if secret:
             which='secret-key'
         if _is_sequence(fingerprints):
-            fingerprints = ' '.join(fingerprints)
-        args = ['--batch --delete-%s "%s"' % (which, fingerprints)]
+            fingerprints = [no_quote(s) for s in fingerprints]
+        else:
+            fingerprints = [no_quote(fingerprints)]
+        args = ['--batch', '--delete-%s' % which]
+        args.extend(fingerprints)
         result = self.result_map['delete'](self)
         p = self._open_subprocess(args)
         self._collect_output(p, result, stdin=p.stdin)
         return result
 
-    def export_keys(self, keyids, secret=False):
+    def export_keys(self, keyids, secret=False, armor=True, minimal=False):
         "export the indicated keys. 'keyid' is anything gpg accepts"
         which=''
         if secret:
             which='-secret-key'
         if _is_sequence(keyids):
-            keyids = ' '.join(['"%s"' % k for k in keyids])
-        args = ["--armor --export%s %s" % (which, keyids)]
+            keyids = [no_quote(k) for k in keyids]
+        else:
+            keyids = [no_quote(keyids)]
+        args = ['--export%s' % which]
+        if armor:
+            args.append('--armor')
+        if minimal:
+            args.extend(['--export-options','export-minimal'])
+        args.extend(keyids)
         p = self._open_subprocess(args)
         # gpg --export produces no status-fd output; stdout will be
         # empty in case of failure
@@ -817,6 +1072,27 @@ class GPG(object):
         logger.debug('export_keys result: %r', result.data)
         return result.data.decode(self.encoding, self.decode_errors)
 
+    def _get_list_output(self, p, kind):
+        # Get the response information
+        result = self.result_map[kind](self)
+        self._collect_output(p, result, stdin=p.stdin)
+        lines = result.data.decode(self.encoding,
+                                   self.decode_errors).splitlines()
+        valid_keywords = 'pub uid sec fpr sub'.split()
+        for line in lines:
+            if self.verbose:
+                print(line)
+            logger.debug("line: %r", line.rstrip())
+            if not line:
+                break
+            L = line.strip().split(':')
+            if not L:
+                continue
+            keyword = L[0]
+            if keyword in valid_keywords:
+                getattr(result, keyword)(L)
+        return result
+
     def list_keys(self, secret=False):
         """ list the keys currently in the keyring
 
@@ -837,25 +1113,58 @@ class GPG(object):
         which='keys'
         if secret:
             which='secret-keys'
-        args = "--list-%s --fixed-list-mode --fingerprint --with-colons" % (which,)
-        args = [args]
+        args = ["--list-%s" % which, "--fixed-list-mode", "--fingerprint",
+                "--with-colons"]
+        p = self._open_subprocess(args)
+        return self._get_list_output(p, 'list')
+
+    def scan_keys(self, filename):
+        """
+        List details of an ascii armored or binary key file
+        without first importing it to the local keyring.
+
+        The function achieves this by running:
+        $ gpg --with-fingerprint --with-colons filename
+        """
+        args = ['--with-fingerprint', '--with-colons']
+        args.append(no_quote(filename))
         p = self._open_subprocess(args)
+        return self._get_list_output(p, 'scan')
+
+    def search_keys(self, query, keyserver='pgp.mit.edu'):
+        """ search keyserver by query (using --search-keys option)
 
-        # there might be some status thingumy here I should handle... (amk)
-        # ...nope, unless you care about expired sigs or keys (stevegt)
+        >>> import shutil
+        >>> shutil.rmtree('keys')
+        >>> gpg = GPG(gnupghome='keys')
+        >>> os.chmod('keys', 0x1C0)
+        >>> result = gpg.search_keys('<vi...@hotmail.com>')
+        >>> assert result, 'Failed using default keyserver'
+        >>> keyserver = 'keyserver.ubuntu.com'
+        >>> result = gpg.search_keys('<vi...@hotmail.com>', keyserver)
+        >>> assert result, 'Failed using keyserver.ubuntu.com'
+
+        """
+        query = query.strip()
+        if HEX_DIGITS_RE.match(query):
+            query = '0x' + query
+        args = ['--fixed-list-mode', '--fingerprint', '--with-colons',
+                '--keyserver', no_quote(keyserver), '--search-keys',
+                no_quote(query)]
+        p = self._open_subprocess(args)
 
         # Get the response information
-        result = self.result_map['list'](self)
+        result = self.result_map['search'](self)
         self._collect_output(p, result, stdin=p.stdin)
         lines = result.data.decode(self.encoding,
                                    self.decode_errors).splitlines()
-        valid_keywords = 'pub uid sec fpr'.split()
+        valid_keywords = ['pub', 'uid']
         for line in lines:
             if self.verbose:
                 print(line)
-            logger.debug("line: %r", line.rstrip())
-            if not line:
-                break
+            logger.debug('line: %r', line.rstrip())
+            if not line:    # sometimes get blank lines on Windows
+                continue
             L = line.strip().split(':')
             if not L:
                 continue
@@ -876,7 +1185,7 @@ class GPG(object):
         >>> assert not result
 
         """
-        args = ["--gen-key --batch"]
+        args = ["--gen-key", "--batch"]
         result = self.result_map['generate'](self)
         f = _make_binary_stream(input, self.encoding)
         self._handle_io(args, f, result, binary=True)
@@ -890,15 +1199,13 @@ class GPG(object):
         parms = {}
         for key, val in list(kwargs.items()):
             key = key.replace('_','-').title()
-            parms[key] = val
+            if str(val).strip():    # skip empty strings
+                parms[key] = val
         parms.setdefault('Key-Type','RSA')
-        parms.setdefault('Key-Length',1024)
+        parms.setdefault('Key-Length',2048)
         parms.setdefault('Name-Real', "Autogenerated Key")
-        parms.setdefault('Name-Comment', "Generated by gnupg.py")
-        try:
-            logname = os.environ['LOGNAME']
-        except KeyError:
-            logname = os.environ['USERNAME']
+        logname = (os.environ.get('LOGNAME') or os.environ.get('USERNAME') or
+                   'unspecified')
         hostname = socket.gethostname()
         parms.setdefault('Name-Email', "%s@%s" % (logname.replace(' ', '_'),
                                                   hostname))
@@ -939,23 +1246,30 @@ class GPG(object):
         "Encrypt the message read from the file-like object 'file'"
         args = ['--encrypt']
         if symmetric:
+            # can't be False or None - could be True or a cipher algo value
+            # such as AES256
             args = ['--symmetric']
+            if symmetric is not True:
+                args.extend(['--cipher-algo', no_quote(symmetric)])
+            # else use the default, currently CAST5
         else:
-            args = ['--encrypt']
+            if not recipients:
+                raise ValueError('No recipients specified with asymmetric '
+                                 'encryption')
             if not _is_sequence(recipients):
                 recipients = (recipients,)
             for recipient in recipients:
-                args.append('--recipient "%s"' % recipient)
-        if armor:   # create ascii-armored output - set to False for binary output
+                args.extend(['--recipient', no_quote(recipient)])
+        if armor:   # create ascii-armored output - False for binary output
             args.append('--armor')
         if output:  # write the output to a file with the specified name
-            if os.path.exists(output):
-                os.remove(output) # to avoid overwrite confirmation message
-            args.append('--output "%s"' % output)
-        if sign:
-            args.append('--sign --default-key "%s"' % sign)
+            self.set_output_without_confirmation(args, output)
+        if sign is True:
+            args.append('--sign')
+        elif sign:
+            args.extend(['--sign', '--default-key', no_quote(sign)])
         if always_trust:
-            args.append("--always-trust")
+            args.append('--always-trust')
         result = self.result_map['crypt'](self)
         self._handle_io(args, file, result, passphrase=passphrase, binary=True)
         logger.debug('encrypt result: %r', result.data)
@@ -983,9 +1297,6 @@ class GPG(object):
         'hello'
         >>> result = gpg.encrypt("hello again",print1)
         >>> message = str(result)
-        >>> result = gpg.decrypt(message)
-        >>> result.status == 'need passphrase'
-        True
         >>> result = gpg.decrypt(message,passphrase='bar')
         >>> result.status in ('decryption failed', 'bad passphrase')
         True
@@ -995,9 +1306,6 @@ class GPG(object):
         True
         >>> str(result)
         'hello again'
-        >>> result = gpg.encrypt("signed hello",print2,sign=print1)
-        >>> result.status == 'need passphrase'
-        True
         >>> result = gpg.encrypt("signed hello",print2,sign=print1,passphrase='foo')
         >>> result.status == 'encryption ok'
         True
@@ -1023,9 +1331,7 @@ class GPG(object):
                      output=None):
         args = ["--decrypt"]
         if output:  # write the output to a file with the specified name
-            if os.path.exists(output):
-                os.remove(output) # to avoid overwrite confirmation message
-            args.append('--output "%s"' % output)
+            self.set_output_without_confirmation(args, output)
         if always_trust:
             args.append("--always-trust")
         result = self.result_map['crypt'](self)