You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by br...@apache.org on 2012/08/16 12:18:03 UTC

svn commit: r1373783 [49/50] - in /subversion/branches/compressed-pristines: ./ build/ build/ac-macros/ build/generator/ build/generator/templates/ build/win32/ contrib/client-side/emacs/ contrib/client-side/svn-push/ contrib/client-side/svnmerge/ cont...

Modified: subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/benchmark.py
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/benchmark.py?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/benchmark.py (original)
+++ subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/benchmark.py Thu Aug 16 10:17:48 2012
@@ -17,42 +17,139 @@
 # specific language governing permissions and limitations
 # under the License.
 
-"""
-usage: benchmark.py run <run_file> <levels> <spread> [N]
-       benchmark.py show <run_file>
-       benchmark.py compare <run_file1> <run_file2>
-       benchmark.py combine <new_file> <run_file1> <run_file2> ...
+"""Usage: benchmark.py run|list|compare|show|chart ...
 
-Test data is written to run_file.
-If a run_file exists, data is added to it.
+RUN BENCHMARKS
+
+  benchmark.py run <branch>@<revision>,<levels>x<spread> [N] [options]
+
+Test data is added to an sqlite database created automatically, by default
+'benchmark.db' in the current working directory. To specify a different path,
+use option -f <path_to_db>.
+
+<branch_name> is a label of the svn branch you're testing, e.g. "1.7.x".
+<revision> is the last-changed-revision of above branch.
 <levels> is the number of directory levels to create
 <spread> is the number of child trees spreading off each dir level
 If <N> is provided, the run is repeated N times.
-"""
+
+<branch_name> and <revision> are simply used for later reference. You
+should enter labels matching the selected --svn-bin-dir.
+
+<levels> and <spread> control the way the tested working copy is structured:
+  <levels>: number of directory levels to create.
+  <spread>: number of files and subdirectories created in each dir.
+
+
+LIST WHAT IS ON RECORD
+
+  benchmark.py list [ <branch>@<rev>,<levels>x<spread> ]
+
+Find entries in the database for the given constraints. Any arguments can
+be omitted. (To select only a rev, start with a '@', like '@123'; to select
+only spread, start with an 'x', like "x100".)
+
+Omit all args to get a listing of all available distinct entries.
+
+
+COMPARE TIMINGS
+
+  benchmark.py compare B@R,LxS B@R,LxS
+
+Compare two kinds of timings (in text mode). Each B@R,LxS selects
+timings from branch, revision, WC-levels and -spread by the same labels as
+previously given for a 'run' call. Any elements can be omitted. For example:
+  benchmark.py compare 1.7.0 trunk@1349903
+    Compare the total timings of all combined '1.7.0' branch runs to
+    all combined runs of 'trunk'-at-revision-1349903.
+  benchmark.py compare 1.7.0,5x5 trunk@1349903,5x5
+    Same as above, but only compare the working copy types with 5 levels
+    and a spread of 5.
+
+
+SHOW TIMINGS
+
+  benchmark.py show <branch>@<rev>,<levels>x<spread>
+
+Print out a summary of the timings selected from the given constraints.
+Any arguments can be omitted (like for the 'list' command).
+
+
+GENERATE CHARTS
+
+  benchmark.py chart compare B@R,LxS B@R,LxS [ B@R,LxS ... ]
+
+Produce a bar chart that compares any number of sets of timings. Timing sets
+are supplied by B@R,LxS arguments (i.e. <branch>@<rev>,<levels>x<spread> as
+provided for a 'run' call), where any number of elements may be omitted. The
+less constraints you supply, the more timings are included (try it out with
+the 'list' command). The first set is taken as a reference point for 100% and
++0 seconds. Each following dataset produces a set of labeled bar charts.
+So, at least two constraint arguments must be provided.
+
+Use the -c option to limit charts to specific command names.
+
+
+EXAMPLES
+
+# Run 3 benchmarks on svn 1.7.0. Timings are saved in benchmark.db.
+# Provide label '1.7.0' and its Last-Changed-Rev for later reference.
+# (You may also set your $PATH instead of using --svn-bin-dir.)
+./benchmark.py run --svn-bin-dir ~/svn-prefix/1.7.0/bin 1.7.0@1181106,5x5 3
+
+# Record 3 benchmark runs on trunk, again naming its Last-Changed-Rev.
+./benchmark.py run --svn-bin-dir ~/svn-prefix/trunk/bin trunk@1352725,5x5 3
+
+# Work with the results of above two runs
+./benchmark.py list
+./benchmark.py compare 1.7.0 trunk
+./benchmark.py show 1.7.0 trunk
+./benchmark.py chart compare 1.7.0 trunk
+./benchmark.py chart compare 1.7.0 trunk -c "update,commit,TOTAL RUN"
+
+# Rebuild r1352598, run it and chart improvements since 1.7.0.
+svn up -r1352598 ~/src/trunk
+make -C ~/src/trunk dist-clean install
+export PATH="$HOME/svn-prefix/trunk/bin:$PATH"
+which svn
+./benchmark.py run trunk@1352598,5x5 3
+./benchmark.py chart compare 1.7.0 trunk@1352598 trunk@1352725 -o chart.svg
+
+
+GLOBAL OPTIONS"""
 
 import os
-import sys
+import time
+import datetime
+import sqlite3
+import optparse
 import tempfile
 import subprocess
-import datetime
 import random
 import shutil
-import cPickle
-import optparse
 import stat
+import string
 
+IGNORE_COMMANDS = ('--version', )
 TOTAL_RUN = 'TOTAL RUN'
 
-timings = None
+j = os.path.join
 
-def run_cmd(cmd, stdin=None, shell=False):
+def time_str():
+  return time.strftime('%Y-%m-%d %H:%M:%S');
 
-  if shell:
-    printable_cmd = 'CMD: ' + cmd
-  else:
-    printable_cmd = 'CMD: ' + ' '.join(cmd)
+def timedelta_to_seconds(td):
+  return ( float(td.seconds)
+           + float(td.microseconds) / (10**6)
+           + td.days * 24 * 60 * 60 )
+
+def run_cmd(cmd, stdin=None, shell=False, verbose=False):
   if options.verbose:
-    print printable_cmd
+    if shell:
+      printable_cmd = cmd
+    else:
+      printable_cmd = ' '.join(cmd)
+    print 'CMD:', printable_cmd
 
   if stdin:
     stdin_arg = subprocess.PIPE
@@ -66,584 +163,1029 @@ def run_cmd(cmd, stdin=None, shell=False
                        shell=shell)
   stdout,stderr = p.communicate(input=stdin)
 
-  if options.verbose:
+  if verbose:
     if (stdout):
       print "STDOUT: [[[\n%s]]]" % ''.join(stdout)
   if (stderr):
     print "STDERR: [[[\n%s]]]" % ''.join(stderr)
 
-  return stdout,stderr
+  return stdout, stderr
 
-def timedelta_to_seconds(td):
-  return ( float(td.seconds)
-           + float(td.microseconds) / (10**6)
-           + td.days * 24 * 60 * 60 )
 
+_next_unique_basename_count = 0
 
-class Timings:
+def next_unique_basename(prefix):
+  global _next_unique_basename_count
+  _next_unique_basename_count += 1
+  return '_'.join((prefix, str(_next_unique_basename_count)))
+
+
+si_units = [
+    (1000 ** 5, 'P'),
+    (1000 ** 4, 'T'), 
+    (1000 ** 3, 'G'), 
+    (1000 ** 2, 'M'), 
+    (1000 ** 1, 'K'),
+    (1000 ** 0, ''),
+    ]
+def n_label(n):
+    """(stolen from hurry.filesize)"""
+    for factor, suffix in si_units:
+        if n >= factor:
+            break
+    amount = int(n/factor)
+    if isinstance(suffix, tuple):
+        singular, multiple = suffix
+        if amount == 1:
+            suffix = singular
+        else:
+            suffix = multiple
+    return str(amount) + suffix
+
+
+def split_arg_once(l_r, sep):
+  if not l_r:
+    return (None, None)
+  if sep in l_r:
+    l, r = l_r.split(sep)
+  else:
+    l = l_r
+    r = None
+  if not l:
+    l = None
+  if not r:
+    r = None
+  return (l, r)
+
+RUN_KIND_SEPARATORS=('@', ',', 'x')
+
+class RunKind:
+  def __init__(self, b_r_l_s):
+    b_r, l_s = split_arg_once(b_r_l_s, RUN_KIND_SEPARATORS[1])
+    self.branch, self.revision = split_arg_once(b_r, RUN_KIND_SEPARATORS[0])
+    self.levels, self.spread = split_arg_once(l_s, RUN_KIND_SEPARATORS[2])
+    if self.levels: self.levels = int(self.levels)
+    if self.spread: self.spread = int(self.spread)
+
+    label_parts = []
+    if self.branch:
+      label_parts.append(self.branch)
+    if self.revision:
+      label_parts.append(RUN_KIND_SEPARATORS[0])
+      label_parts.append(self.revision)
+    if self.levels or self.spread:
+      label_parts.append(RUN_KIND_SEPARATORS[1])
+      if self.levels:
+        label_parts.append(str(self.levels))
+      if self.spread:
+        label_parts.append(RUN_KIND_SEPARATORS[2])
+        label_parts.append(str(self.spread))
+    self.label = ''.join(label_parts)
+
+  def args(self):
+    return (self.branch, self.revision, self.levels, self.spread)
+
+
+PATHNAME_VALID_CHARS = "-_.,@%s%s" % (string.ascii_letters, string.digits)
+def filesystem_safe_string(s):
+  return ''.join(c for c in s if c in PATHNAME_VALID_CHARS)
+
+def do_div(ref, val):
+  if ref:
+    return float(val) / float(ref)
+  else:
+    return 0.0
 
-  def __init__(self, *ignore_svn_cmds):
-    self.timings = {}
-    self.current_name = None
+def do_diff(ref, val):
+  return float(val) - float(ref)
+
+
+# ------------------------- database -------------------------
+
+class TimingsDb:
+  def __init__(self, db_path):
+    self.db_path = db_path;
+    self.conn = sqlite3.connect(db_path)
+    self.ensure_tables_created()
+
+  def ensure_tables_created(self):
+    c = self.conn.cursor()
+
+    c.execute("""SELECT name FROM sqlite_master WHERE type='table' AND
+              name='batch'""")
+    if c.fetchone():
+      # exists
+      return
+
+    print 'Creating database tables.'
+    c.executescript('''
+        CREATE TABLE batch (
+          batch_id INTEGER PRIMARY KEY AUTOINCREMENT,
+          started TEXT,
+          ended TEXT
+        );
+
+        CREATE TABLE run_kind (
+          run_kind_id INTEGER PRIMARY KEY AUTOINCREMENT,
+          branch TEXT NOT NULL,
+          revision TEXT NOT NULL,
+          wc_levels INTEGER,
+          wc_spread INTEGER,
+          UNIQUE(branch, revision, wc_levels, wc_spread)
+        );
+
+        CREATE TABLE run (
+          run_id INTEGER PRIMARY KEY AUTOINCREMENT,
+          batch_id INTEGER NOT NULL REFERENCES batch(batch_id),
+          run_kind_id INTEGER NOT NULL REFERENCES run_kind(run_kind_id),
+          started TEXT,
+          ended TEXT,
+          aborted INTEGER
+        );
+
+        CREATE TABLE timings (
+          run_id INTEGER NOT NULL REFERENCES run(run_id),
+          command TEXT NOT NULL,
+          sequence INTEGER,
+          timing REAL
+        );'''
+      )
+    self.conn.commit()
+    c.close();
+
+
+class Batch:
+  def __init__(self, db):
+    self.db = db
+    self.started = time_str()
+    c = db.conn.cursor()
+    c.execute("INSERT INTO batch (started) values (?)", (self.started,))
+    db.conn.commit()
+    self.id = c.lastrowid
+    c.close()
+
+  def done(self):
+    conn = self.db.conn
+    c = conn.cursor()
+    c.execute("""
+        UPDATE batch
+        SET ended = ?
+        WHERE batch_id = ?""",
+        (time_str(), self.id))
+    conn.commit()
+    c.close()
+
+class Run:
+  def __init__(self, batch, run_kind):
+    self.batch = batch
+    conn = self.batch.db.conn
+    c = conn.cursor()
+
+    c.execute("""
+        SELECT run_kind_id FROM run_kind
+        WHERE branch = ?
+          AND revision = ?
+          AND wc_levels = ?
+          AND wc_spread = ?""",
+        run_kind.args())
+    kind_ids = c.fetchone()
+    if kind_ids:
+      kind_id = kind_ids[0]
+    else:
+      c.execute("""
+          INSERT INTO run_kind (branch, revision, wc_levels, wc_spread)
+          VALUES (?, ?, ?, ?)""",
+          run_kind.args())
+      conn.commit()
+      kind_id = c.lastrowid
+
+    self.started = time_str()
+    
+    c.execute("""
+        INSERT INTO run
+          (batch_id, run_kind_id, started)
+        VALUES
+          (?, ?, ?)""",
+        (self.batch.id, kind_id, self.started))
+    conn.commit()
+    self.id = c.lastrowid
+    c.close();
     self.tic_at = None
-    self.ignore = ignore_svn_cmds
-    self.name = None
+    self.current_command = None
+    self.timings = []
 
-  def tic(self, name):
-    if name in self.ignore:
+  def tic(self, command):
+    if command in IGNORE_COMMANDS:
       return
     self.toc()
-    self.current_name = name
+    self.current_command = command
     self.tic_at = datetime.datetime.now()
 
   def toc(self):
-    if self.current_name and self.tic_at:
+    if self.current_command and self.tic_at:
       toc_at = datetime.datetime.now()
-      self.submit_timing(self.current_name,
+      self.remember_timing(self.current_command,
                          timedelta_to_seconds(toc_at - self.tic_at))
-    self.current_name = None
+    self.current_command = None
     self.tic_at = None
 
-  def submit_timing(self, name, seconds):
-    times = self.timings.get(name)
-    if not times:
-      times = []
-      self.timings[name] = times
-    times.append(seconds)
-
-  def min_max_avg(self, name):
-    ttimings = self.timings.get(name)
-    return ( min(ttimings),
-             max(ttimings),
-             reduce(lambda x,y: x + y, ttimings) / len(ttimings) )
-
-  def summary(self):
-    s = []
-    if self.name:
-      s.append('Timings for %s' % self.name)
-    s.append('    N   min     max     avg    operation  (unit is seconds)')
-
-    names = sorted(self.timings.keys())
-
-    for name in names:
-      timings = self.timings.get(name)
-      if not name or not timings: continue
+  def remember_timing(self, command, seconds):
+    self.timings.append((command, seconds))
 
-      tmin, tmax, tavg = self.min_max_avg(name)
-
-      s.append('%5d %7.2f %7.2f %7.2f  %s' % (
-                 len(timings),
-                 tmin,
-                 tmax,
-                 tavg,
-                 name))
-
-    return '\n'.join(s)
-
-
-  def compare_to(self, other, verbose):
-    def do_div(a, b):
-      if b:
-        return float(a) / float(b)
+  def submit_timings(self):
+    conn = self.batch.db.conn
+    c = conn.cursor()
+    print 'submitting...'
+
+    c.executemany("""
+      INSERT INTO timings
+        (run_id, command, sequence, timing)
+      VALUES
+        (?, ?, ?, ?)""",
+      [(self.id, t[0], (i + 1), t[1]) for i,t in enumerate(self.timings)])
+
+    conn.commit()
+    c.close()
+
+  def done(self, aborted=False):
+    conn = self.batch.db.conn
+    c = conn.cursor()
+    c.execute("""
+        UPDATE run
+        SET ended = ?, aborted = ?
+        WHERE run_id = ?""",
+        (time_str(), aborted, self.id))
+    conn.commit()
+    c.close()
+
+
+class TimingQuery:
+  def __init__(self, db, run_kind):
+    self.cursor = db.conn.cursor()
+    self.constraints = []
+    self.values = []
+    self.timings = None
+    self.FROM_WHERE = """
+         FROM batch AS b,
+              timings AS t,
+              run AS r,
+              run_kind as k
+         WHERE
+              t.run_id = r.run_id
+              AND k.run_kind_id = r.run_kind_id
+              AND b.batch_id = r.batch_id
+              AND r.aborted = 0
+         """
+    self.append_constraint('k', 'branch', run_kind.branch)
+    self.append_constraint('k', 'revision', run_kind.revision)
+    self.append_constraint('k', 'wc_levels', run_kind.levels)
+    self.append_constraint('k', 'wc_spread', run_kind.spread)
+    self.label = run_kind.label
+
+  def append_constraint(self, table, name, val):
+    if val:
+      self.constraints.append('AND %s.%s = ?' % (table, name))
+      self.values.append(val)
+
+  def remove_last_constraint(self):
+    del self.constraints[-1]
+    del self.values[-1]
+
+  def get_sorted_X(self, x, n=1):
+    query = ['SELECT DISTINCT %s' % x,
+             self.FROM_WHERE ]
+    query.extend(self.constraints)
+    query.append('ORDER BY %s' % x)
+    c = db.conn.cursor()
+    try:
+      #print ' '.join(query)
+      c.execute(' '.join(query), self.values)
+      if n == 1:
+        return [tpl[0] for tpl in c.fetchall()]
       else:
-        return 0.0
-
-    def do_diff(a, b):
-      return float(a) - float(b)
-
-    selfname = self.name
-    if not selfname:
-      selfname = 'unnamed'
-    othername = other.name
-    if not othername:
-      othername = 'the other'
-
-    selftotal = self.min_max_avg(TOTAL_RUN)[2]
-    othertotal = other.min_max_avg(TOTAL_RUN)[2]
-
-    s = ['COMPARE %s to %s' % (othername, selfname)]
-
-    if TOTAL_RUN in self.timings and TOTAL_RUN in other.timings:
-      s.append('  %s timings: %5.1f seconds avg for %s'
-               % (TOTAL_RUN, othertotal, othername))
-      s.append('  %s          %5.1f seconds avg for %s'
-               % (' ' * len(TOTAL_RUN), selftotal, selfname))
-
-
-    if not verbose:
-      s.append('      avg         operation')
-    else:
-      s.append('      min              max              avg         operation')
+        return c.fetchall()
+    finally:
+      c.close()
 
-    names = sorted(self.timings.keys())
+  def get_sorted_command_names(self):
+    return self.get_sorted_X('t.command')
 
-    for name in names:
-      if not name in other.timings:
-        continue
+  def get_sorted_branches(self):
+    return self.get_sorted_X('k.branch')
 
+  def get_sorted_revisions(self):
+    return self.get_sorted_X('k.revision')
+
+  def get_sorted_levels_spread(self):
+    return self.get_sorted_X('k.wc_levels,k.wc_spread', n = 2)
+
+  def count_runs_batches(self):
+    query = ["""SELECT
+                  count(DISTINCT r.run_id),
+                  count(DISTINCT b.batch_id)""",
+             self.FROM_WHERE ]
+    query.extend(self.constraints)
+    c = db.conn.cursor()
+    try:
+      #print ' '.join(query)
+      c.execute(' '.join(query), self.values)
+      return c.fetchone()
+    finally:
+      c.close()
 
-      min_me, max_me, avg_me = self.min_max_avg(name)
-      min_other, max_other, avg_other = other.min_max_avg(name)
+  def get_command_timings(self, command):
+    query = ["""SELECT
+                  count(t.timing),
+                  min(t.timing),
+                  max(t.timing),
+                  avg(t.timing)""",
+             self.FROM_WHERE ]
+    self.append_constraint('t', 'command', command)
+    try:
+      query.extend(self.constraints)
+      c = db.conn.cursor()
+      try:
+        c.execute(' '.join(query), self.values)
+        return c.fetchone()
+      finally:
+        c.close()
+    finally:
+      self.remove_last_constraint()
 
-      avg_str = '%7.2f|%+7.3f' % (do_div(avg_me, avg_other),
-                                  do_diff(avg_me, avg_other))
+  def get_timings(self):
+    if self.timings:
+      return self.timings
+    self.timings = {}
+    for command_name in self.get_sorted_command_names():
+      self.timings[command_name] = self.get_command_timings(command_name)
+    return self.timings
+      
 
-      if not verbose:
-        s.append('%-16s  %s' % (avg_str, name))
-      else:
-        min_str = '%7.2f|%+7.3f' % (do_div(min_me, min_other),
-                                    do_diff(min_me, min_other))
-        max_str = '%7.2f|%+7.3f' % (do_div(max_me, max_other),
-                                    do_diff(max_me, max_other))
-
-        s.append('%-16s %-16s %-16s  %s' % (min_str, max_str, avg_str, name))
-
-    s.extend([
-      '(legend: "1.23|+0.45" means: slower by factor 1.23 and by 0.45 seconds;',
-      ' factor < 1 and difference < 0 means \'%s\' is faster than \'%s\')'
-      % (self.name, othername)])
-
-    return '\n'.join(s)
-
-
-  def add(self, other):
-    for name, other_times in other.timings.items():
-      my_times = self.timings.get(name)
-      if not my_times:
-        my_times = []
-        self.timings[name] = my_times
-      my_times.extend(other_times)
+# ------------------------------------------------------------ run tests
 
 
+def perform_run(batch, run_kind,
+                svn_bin, svnadmin_bin, verbose):
 
+  run = Run(batch, run_kind)
 
-j = os.path.join
+  def create_tree(in_dir, _levels, _spread):
+    try:
+      os.mkdir(in_dir)
+    except:
+      pass
+
+    for i in range(_spread):
+      # files
+      fn = j(in_dir, next_unique_basename('file'))
+      f = open(fn, 'w')
+      f.write('This is %s\n' % fn)
+      f.close()
+
+      # dirs
+      if (_levels > 1):
+        dn = j(in_dir, next_unique_basename('dir'))
+        create_tree(dn, _levels - 1, _spread)
+
+  def svn(*args):
+    name = args[0]
+
+    cmd = [ svn_bin ]
+    cmd.extend( list(args) )
+    if verbose:
+      print 'svn cmd:', ' '.join(cmd)
+
+    stdin = None
+    if stdin:
+      stdin_arg = subprocess.PIPE
+    else:
+      stdin_arg = None
 
-_create_count = 0
+    run.tic(name)
+    try:
+      p = subprocess.Popen(cmd,
+                           stdin=stdin_arg,
+                           stdout=subprocess.PIPE,
+                           stderr=subprocess.PIPE,
+                           shell=False)
+      stdout,stderr = p.communicate(input=stdin)
+    except OSError:
+      stdout = stderr = None
+    finally:
+      run.toc()
 
-def next_name(prefix):
-  global _create_count
-  _create_count += 1
-  return '_'.join((prefix, str(_create_count)))
+    if verbose:
+      if (stdout):
+        print "STDOUT: [[[\n%s]]]" % ''.join(stdout)
+      if (stderr):
+        print "STDERR: [[[\n%s]]]" % ''.join(stderr)
 
-def create_tree(in_dir, levels, spread=5):
-  try:
-    os.mkdir(in_dir)
-  except:
-    pass
-
-  for i in range(spread):
-    # files
-    fn = j(in_dir, next_name('file'))
-    f = open(fn, 'w')
-    f.write('This is %s\n' % fn)
-    f.close()
+    return stdout,stderr
 
-    # dirs
-    if (levels > 1):
-      dn = j(in_dir, next_name('dir'))
-      create_tree(dn, levels - 1, spread)
 
+  def add(*args):
+    return svn('add', *args)
 
-def svn(*args):
-  name = args[0]
+  def ci(*args):
+    return svn('commit', '-mm', *args)
 
-  ### options comes from the global namespace; it should be passed
-  cmd = [options.svn] + list(args)
-  if options.verbose:
-    print 'svn cmd:', ' '.join(cmd)
+  def up(*args):
+    return svn('update', *args)
 
-  stdin = None
-  if stdin:
-    stdin_arg = subprocess.PIPE
-  else:
-    stdin_arg = None
+  def st(*args):
+    return svn('status', *args)
 
-  ### timings comes from the global namespace; it should be passed
-  timings.tic(name)
-  try:
-    p = subprocess.Popen(cmd,
-                         stdin=stdin_arg,
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.PIPE,
-                         shell=False)
-    stdout,stderr = p.communicate(input=stdin)
-  except OSError:
-    stdout = stderr = None
-  finally:
-    timings.toc()
+  def info(*args):
+    return svn('info', *args)
 
-  if options.verbose:
-    if (stdout):
-      print "STDOUT: [[[\n%s]]]" % ''.join(stdout)
-    if (stderr):
-      print "STDERR: [[[\n%s]]]" % ''.join(stderr)
+  _chars = [chr(x) for x in range(ord('a'), ord('z') +1)]
 
-  return stdout,stderr
+  def randstr(len=8):
+    return ''.join( [random.choice(_chars) for i in range(len)] )
 
+  def _copy(path):
+    dest = next_unique_basename(path + '_copied')
+    svn('copy', path, dest)
 
-def add(*args):
-  return svn('add', *args)
+  def _move(path):
+    dest = path + '_moved'
+    svn('move', path, dest)
 
-def ci(*args):
-  return svn('commit', '-mm', *args)
+  def _propmod(path):
+    so, se = svn('proplist', path)
+    propnames = [line.strip() for line in so.strip().split('\n')[1:]]
 
-def up(*args):
-  return svn('update', *args)
+    # modify?
+    if len(propnames):
+      svn('ps', propnames[len(propnames) / 2], randstr(), path)
 
-def st(*args):
-  return svn('status', *args)
+    # del?
+    if len(propnames) > 1:
+      svn('propdel', propnames[len(propnames) / 2], path)
 
-def info(*args):
-  return svn('info', *args)
+  def _propadd(path):
+    # set a new one.
+    svn('propset', randstr(), randstr(), path)
 
-_chars = [chr(x) for x in range(ord('a'), ord('z') +1)]
+  def _mod(path):
+    if os.path.isdir(path):
+      _propmod(path)
+      return
 
-def randstr(len=8):
-  return ''.join( [random.choice(_chars) for i in range(len)] )
+    f = open(path, 'a')
+    f.write('\n%s\n' % randstr())
+    f.close()
 
-def _copy(path):
-  dest = next_name(path + '_copied')
-  svn('copy', path, dest)
+  def _add(path):
+    if os.path.isfile(path):
+      return _mod(path)
+
+    if random.choice((True, False)):
+      # create a dir
+      svn('mkdir', j(path, next_unique_basename('new_dir')))
+    else:
+      # create a file
+      new_path = j(path, next_unique_basename('new_file'))
+      f = open(new_path, 'w')
+      f.write(randstr())
+      f.close()
+      svn('add', new_path)
+
+  def _del(path):
+    svn('delete', path)
+
+  _mod_funcs = (_mod, _add, _propmod, _propadd, )#_copy,) # _move, _del)
+
+  def modify_tree(in_dir, fraction):
+    child_names = os.listdir(in_dir)
+    for child_name in child_names:
+      if child_name[0] == '.':
+        continue
+      if random.random() < fraction:
+        path = j(in_dir, child_name)
+        random.choice(_mod_funcs)(path)
 
-def _move(path):
-  dest = path + '_moved'
-  svn('move', path, dest)
+    for child_name in child_names:
+      if child_name[0] == '.': continue
+      path = j(in_dir, child_name)
+      if os.path.isdir(path):
+        modify_tree(path, fraction)
 
-def _propmod(path):
-  so, se = svn('proplist', path)
-  propnames = [line.strip() for line in so.strip().split('\n')[1:]]
+  def propadd_tree(in_dir, fraction):
+    for child_name in os.listdir(in_dir):
+      if child_name[0] == '.': continue
+      path = j(in_dir, child_name)
+      if random.random() < fraction:
+        _propadd(path)
+      if os.path.isdir(path):
+        propadd_tree(path, fraction)
+
+
+  def rmtree_onerror(func, path, exc_info):
+    """Error handler for ``shutil.rmtree``.
+
+    If the error is due to an access error (read only file)
+    it attempts to add write permission and then retries.
+
+    If the error is for another reason it re-raises the error.
+
+    Usage : ``shutil.rmtree(path, onerror=onerror)``
+    """
+    if not os.access(path, os.W_OK):
+      # Is the error an access error ?
+      os.chmod(path, stat.S_IWUSR)
+      func(path)
+    else:
+      raise
 
-  # modify?
-  if len(propnames):
-    svn('ps', propnames[len(propnames) / 2], randstr(), path)
+  base = tempfile.mkdtemp()
 
-  # del?
-  if len(propnames) > 1:
-    svn('propdel', propnames[len(propnames) / 2], path)
+  # ensure identical modifications for every run
+  random.seed(0)
 
+  aborted = True
 
-def _propadd(path):
-  # set a new one.
-  svn('propset', randstr(), randstr(), path)
+  try:
+    repos = j(base, 'repos')
+    repos = repos.replace('\\', '/')
+    wc = j(base, 'wc')
+    wc2 = j(base, 'wc2')
 
+    if repos.startswith('/'):
+      file_url = 'file://%s' % repos
+    else:
+      file_url = 'file:///%s' % repos
 
-def _mod(path):
-  if os.path.isdir(path):
-    return _propmod(path)
+    print '\nRunning svn benchmark in', base
+    print 'dir levels: %s; new files and dirs per leaf: %s' %(
+          run_kind.levels, run_kind.spread)
 
-  f = open(path, 'a')
-  f.write('\n%s\n' % randstr())
-  f.close()
+    started = datetime.datetime.now()
 
-def _add(path):
-  if os.path.isfile(path):
-    return _mod(path)
+    try:
+      run_cmd([svnadmin_bin, 'create', repos])
+      svn('checkout', file_url, wc)
 
-  if random.choice((True, False)):
-    # create a dir
-    svn('mkdir', j(path, next_name('new_dir')))
-  else:
-    # create a file
-    new_path = j(path, next_name('new_file'))
-    f = open(new_path, 'w')
-    f.write(randstr())
-    f.close()
-    svn('add', new_path)
+      trunk = j(wc, 'trunk')
+      create_tree(trunk, run_kind.levels, run_kind.spread)
+      add(trunk)
+      st(wc)
+      ci(wc)
+      up(wc)
+      propadd_tree(trunk, 0.05)
+      ci(wc)
+      up(wc)
+      st(wc)
+      info('-R', wc)
+
+      trunk_url = file_url + '/trunk'
+      branch_url = file_url + '/branch'
+
+      svn('copy', '-mm', trunk_url, branch_url)
+      st(wc)
+
+      up(wc)
+      st(wc)
+      info('-R', wc)
+
+      svn('checkout', trunk_url, wc2)
+      st(wc2)
+      modify_tree(wc2, 0.5)
+      st(wc2)
+      ci(wc2)
+      up(wc2)
+      up(wc)
+
+      svn('switch', branch_url, wc2)
+      modify_tree(wc2, 0.5)
+      st(wc2)
+      info('-R', wc2)
+      ci(wc2)
+      up(wc2)
+      up(wc)
+
+      modify_tree(trunk, 0.5)
+      st(wc)
+      ci(wc)
+      up(wc2)
+      up(wc)
+
+      svn('merge', '--accept=postpone', trunk_url, wc2)
+      st(wc2)
+      info('-R', wc2)
+      svn('resolve', '--accept=mine-conflict', wc2)
+      st(wc2)
+      svn('resolved', '-R', wc2)
+      st(wc2)
+      info('-R', wc2)
+      ci(wc2)
+      up(wc2)
+      up(wc)
+
+      svn('merge', '--accept=postpone', '--reintegrate', branch_url, trunk)
+      st(wc)
+      svn('resolve', '--accept=mine-conflict', wc)
+      st(wc)
+      svn('resolved', '-R', wc)
+      st(wc)
+      ci(wc)
+      up(wc2)
+      up(wc)
+
+      svn('delete', j(wc, 'branch'))
+      ci(wc)
+      up(wc)
 
-def _del(path):
-  svn('delete', path)
+      aborted = False
 
-_mod_funcs = (_mod, _add, _propmod, _propadd, )#_copy,) # _move, _del)
+    finally:
+      stopped = datetime.datetime.now()
+      print '\nDone with svn benchmark in', (stopped - started)
 
-def modify_tree(in_dir, fraction):
-  child_names = os.listdir(in_dir)
-  for child_name in child_names:
-    if child_name[0] == '.':
-      continue
-    if random.random() < fraction:
-      path = j(in_dir, child_name)
-      random.choice(_mod_funcs)(path)
+      run.remember_timing(TOTAL_RUN,
+                        timedelta_to_seconds(stopped - started))
+  finally:
+    run.done(aborted)
+    run.submit_timings()
+    shutil.rmtree(base, onerror=rmtree_onerror)
 
-  for child_name in child_names:
-    if child_name[0] == '.': continue
-    path = j(in_dir, child_name)
-    if os.path.isdir(path):
-      modify_tree(path, fraction)
+  return aborted
 
-def propadd_tree(in_dir, fraction):
-  for child_name in os.listdir(in_dir):
-    if child_name[0] == '.': continue
-    path = j(in_dir, child_name)
-    if random.random() < fraction:
-      _propadd(path)
-    if os.path.isdir(path):
-      propadd_tree(path, fraction)
 
+# ---------------------------------------------------------------------
 
-def rmtree_onerror(func, path, exc_info):
-  """Error handler for ``shutil.rmtree``.
+    
+def cmdline_run(db, options, run_kind_str, N=1):
+  run_kind = RunKind(run_kind_str)
+  N = int(N)
 
-  If the error is due to an access error (read only file)
-  it attempts to add write permission and then retries.
+  print 'Hi, going to run a Subversion benchmark series of %d runs...' % N
+  print 'Label is %s' % run_kind.label
 
-  If the error is for another reason it re-raises the error.
+  # can we run the svn binaries?
+  svn_bin = j(options.svn_bin_dir, 'svn')
+  svnadmin_bin = j(options.svn_bin_dir, 'svnadmin')
+
+  for b in (svn_bin, svnadmin_bin):
+    so,se = run_cmd([b, '--version'])
+    if not so:
+      print "Can't run", b
+      exit(1)
 
-  Usage : ``shutil.rmtree(path, onerror=onerror)``
-  """
-  if not os.access(path, os.W_OK):
-    # Is the error an access error ?
-    os.chmod(path, stat.S_IWUSR)
-    func(path)
-  else:
-    raise
+    print ', '.join([s.strip() for s in so.split('\n')[:2]])
 
+  batch = Batch(db)
 
-def run(levels, spread, N):
   for i in range(N):
-    base = tempfile.mkdtemp()
+    print 'Run %d of %d' % (i + 1, N)
+    perform_run(batch, run_kind,
+                svn_bin, svnadmin_bin, options.verbose)
+
+  batch.done()
+
+
+def cmdline_list(db, options, run_kind_str=None):
+  run_kind = RunKind(run_kind_str)
+
+  constraints = []
+  def add_if_not_none(name, val):
+    if val:
+      constraints.append('  %s = %s' % (name, val))
+  add_if_not_none('branch', run_kind.branch)
+  add_if_not_none('revision', run_kind.revision)
+  add_if_not_none('levels', run_kind.levels)
+  add_if_not_none('spread', run_kind.spread)
+  if constraints:
+    print 'For\n', '\n'.join(constraints)
+  print 'I found:'
+
+  d = TimingQuery(db, run_kind)
+  
+  cmd_names = d.get_sorted_command_names()
+  if cmd_names:
+    print '\n%d command names:\n ' % len(cmd_names), '\n  '.join(cmd_names)
+
+  branches = d.get_sorted_branches()
+  if branches and (len(branches) > 1 or branches[0] != run_kind.branch):
+    print '\n%d branches:\n ' % len(branches), '\n  '.join(branches)
+
+  revisions = d.get_sorted_revisions()
+  if revisions and (len(revisions) > 1 or revisions[0] != run_kind.revision):
+    print '\n%d revisions:\n ' % len(revisions), '\n  '.join(revisions)
+
+  levels_spread = d.get_sorted_levels_spread()
+  if levels_spread and (
+       len(levels_spread) > 1
+       or levels_spread[0] != (run_kind.levels, run_kind.spread)):
+    print '\n%d kinds of levels x spread:\n ' % len(levels_spread), '\n  '.join(
+            [ ('%dx%d' % (l, s)) for l,s in levels_spread ])
+
+  print "\n%d runs in %d batches.\n" % (d.count_runs_batches())
+
+
+def cmdline_show(db, options, *run_kind_strings):
+  for run_kind_str in run_kind_strings:
+    run_kind = RunKind(run_kind_str)
 
-    # ensure identical modifications for every run
-    random.seed(0)
+    q = TimingQuery(db, run_kind)
+    timings = q.get_timings()
 
-    try:
-      repos = j(base, 'repos')
-      repos = repos.replace('\\', '/')
-      wc = j(base, 'wc')
-      wc2 = j(base, 'wc2')
-
-      if repos.startswith('/'):
-        file_url = 'file://%s' % repos
-      else:
-        file_url = 'file:///%s' % repos
-
-      so, se = svn('--version')
-      if not so:
-        ### options comes from the global namespace; it should be passed
-        print "Can't find svn at", options.svn
-        exit(1)
-      version = ', '.join([s.strip() for s in so.split('\n')[:2]])
-
-      print '\nRunning svn benchmark in', base
-      print 'dir levels: %s; new files and dirs per leaf: %s; run %d of %d' %(
-            levels, spread, i + 1, N)
-
-      print version
-      started = datetime.datetime.now()
-
-      try:
-        run_cmd(['svnadmin', 'create', repos])
-        svn('checkout', file_url, wc)
-
-        trunk = j(wc, 'trunk')
-        create_tree(trunk, levels, spread)
-        add(trunk)
-        st(wc)
-        ci(wc)
-        up(wc)
-        propadd_tree(trunk, 0.5)
-        ci(wc)
-        up(wc)
-        st(wc)
-        info('-R', wc)
-
-        trunk_url = file_url + '/trunk'
-        branch_url = file_url + '/branch'
-
-        svn('copy', '-mm', trunk_url, branch_url)
-        st(wc)
-
-        up(wc)
-        st(wc)
-        info('-R', wc)
-
-        svn('checkout', trunk_url, wc2)
-        st(wc2)
-        modify_tree(wc2, 0.5)
-        st(wc2)
-        ci(wc2)
-        up(wc2)
-        up(wc)
-
-        svn('switch', branch_url, wc2)
-        modify_tree(wc2, 0.5)
-        st(wc2)
-        info('-R', wc2)
-        ci(wc2)
-        up(wc2)
-        up(wc)
-
-        modify_tree(trunk, 0.5)
-        st(wc)
-        ci(wc)
-        up(wc2)
-        up(wc)
-
-        svn('merge', '--accept=postpone', trunk_url, wc2)
-        st(wc2)
-        info('-R', wc2)
-        svn('resolve', '--accept=mine-conflict', wc2)
-        st(wc2)
-        svn('resolved', '-R', wc2)
-        st(wc2)
-        info('-R', wc2)
-        ci(wc2)
-        up(wc2)
-        up(wc)
-
-        svn('merge', '--accept=postpone', '--reintegrate', branch_url, trunk)
-        st(wc)
-        svn('resolve', '--accept=mine-conflict', wc)
-        st(wc)
-        svn('resolved', '-R', wc)
-        st(wc)
-        ci(wc)
-        up(wc2)
-        up(wc)
-
-        svn('delete', j(wc, 'branch'))
-        ci(wc)
-        up(wc2)
-        up(wc)
+    s = []
+    s.append('Timings for %s' % run_kind.label)
+    s.append('   N    min     max     avg   operation  (unit is seconds)')
 
+    for command_name in q.get_sorted_command_names():
+      if options.command_names and command_name not in options.command_names:
+        continue
+      n, tmin, tmax, tavg = timings[command_name]
 
-      finally:
-        stopped = datetime.datetime.now()
-        print '\nDone with svn benchmark in', (stopped - started)
+      s.append('%4s %7.2f %7.2f %7.2f  %s' % (
+                 n_label(n),
+                 tmin,
+                 tmax,
+                 tavg,
+                 command_name))
 
-        ### timings comes from the global namespace; it should be passed
-        timings.submit_timing(TOTAL_RUN,
-                              timedelta_to_seconds(stopped - started))
-
-        # rename ps to prop mod
-        if timings.timings.get('ps'):
-          has = timings.timings.get('prop mod')
-          if not has:
-            has = []
-            timings.timings['prop mod'] = has
-          has.extend( timings.timings['ps'] )
-          del timings.timings['ps']
+    print '\n'.join(s)
 
-        print timings.summary()
-    finally:
-      shutil.rmtree(base, onerror=rmtree_onerror)
 
+def cmdline_compare(db, options, left_str, right_str):
+  left_kind = RunKind(left_str)
+  right_kind = RunKind(right_str)
 
-def read_from_file(file_path):
-  f = open(file_path, 'rb')
-  try:
-    instance = cPickle.load(f)
-    instance.name = os.path.basename(file_path)
-  finally:
-    f.close()
-  return instance
+  leftq = TimingQuery(db, left_kind)
+  left = leftq.get_timings()
+  if not left:
+    print "No timings for", left_kind.label
+    exit(1)
 
+  rightq = TimingQuery(db, right_kind)
+  right = rightq.get_timings()
+  if not right:
+    print "No timings for", right_kind.label
+    exit(1)
 
-def write_to_file(file_path, instance):
-  f = open(file_path, 'wb')
-  cPickle.dump(instance, f)
-  f.close()
+  label = 'Compare %s to %s' % (left_kind.label, right_kind.label)
 
-def cmd_compare(path1, path2):
-  t1 = read_from_file(path1)
-  t2 = read_from_file(path2)
+  s = [label]
 
-  if options.verbose:
-    print t1.summary()
-    print '---'
-    print t2.summary()
-    print '---'
-  print t2.compare_to(t1, options.verbose)
-
-def cmd_combine(dest, *paths):
-  total = Timings('--version');
-
-  for path in paths:
-    t = read_from_file(path)
-    total.add(t)
-
-  print total.summary()
-  write_to_file(dest, total)
-
-def cmd_run(timings_path, levels, spread, N=1):
-  levels = int(levels)
-  spread = int(spread)
-  N = int(N)
-
-  print '\n\nHi, going to run a Subversion benchmark series of %d runs...' % N
-
-  ### UGH! should pass to run()
-  ### neels: Today I contemplated doing that, but at the end of the day
-  ###        it merely blows up the code without much benefit. If this
-  ###        ever becomes part of an imported python package, call again.
-  global timings
-
-  if os.path.isfile(timings_path):
-    print 'Going to add results to existing file', timings_path
-    timings = read_from_file(timings_path)
+  verbose = options.verbose
+  if not verbose:
+    s.append('       N        avg         operation')
   else:
-    print 'Going to write results to new file', timings_path
-    timings = Timings('--version')
-
-  run(levels, spread, N)
+    s.append('       N        min              max              avg         operation')
 
-  write_to_file(timings_path, timings)
+  command_names = [name for name in leftq.get_sorted_command_names()
+                   if name in right]
+  if options.command_names:
+    command_names = [name for name in command_names
+                     if name in options.command_names]
+
+  for command_name in command_names:
+    left_N, left_min, left_max, left_avg = left[command_name]
+    right_N, right_min, right_max, right_avg = right[command_name]
+
+    N_str = '%s/%s' % (n_label(left_N), n_label(right_N))
+    avg_str = '%7.2f|%+7.3f' % (do_div(left_avg, right_avg),
+                                do_diff(left_avg, right_avg))
 
-def cmd_show(*paths):
-  for timings_path in paths:
-    timings = read_from_file(timings_path)
-    print '---\n%s' % timings_path
-    print timings.summary()
+    if not verbose:
+      s.append('%9s %-16s  %s' % (N_str, avg_str, command_name))
+    else:
+      min_str = '%7.2f|%+7.3f' % (do_div(left_min, right_min),
+                                  do_diff(left_min, right_min))
+      max_str = '%7.2f|%+7.3f' % (do_div(left_max, right_max),
+                                  do_diff(left_max, right_max))
+
+      s.append('%9s %-16s %-16s %-16s  %s' % (N_str, min_str, max_str, avg_str,
+                                          command_name))
+
+  s.extend([
+    '(legend: "1.23|+0.45" means: slower by factor 1.23 and by 0.45 seconds;',
+    ' factor < 1 and seconds < 0 means \'%s\' is faster.'
+    % right_kind.label,
+    ' "2/3" means: \'%s\' has 2 timings on record, the other has 3.)'
+    % left_kind.label
+    ])
+
+
+  print '\n'.join(s)
+
+
+# ------------------------------------------------------- charts
+
+def cmdline_chart_compare(db, options, *args):
+  import numpy as np
+  import matplotlib.pyplot as plt
+
+  labels = []
+  timing_sets = []
+  command_names = None
+
+  for arg in args:
+    run_kind = RunKind(arg)
+    query = TimingQuery(db, run_kind)
+    timings = query.get_timings()
+    if not timings:
+      print "No timings for", run_kind.label
+      exit(1)
+    labels.append(run_kind.label)
+    timing_sets.append(timings)
 
+    if command_names:
+      for i in range(len(command_names)):
+        if not command_names[i] in timings:
+          del command_names[i]
+    else:
+      command_names = query.get_sorted_command_names()
 
-def usage():
-  print __doc__
+  if options.command_names:
+    command_names = [name for name in command_names
+                     if name in options.command_names]
+
+  chart_path = options.chart_path
+  if not chart_path:
+    chart_path = 'compare_' + '_'.join(
+      [ filesystem_safe_string(l) for l in labels ]
+      ) + '.svg'
+                  
+  print '\nwriting chart file:', chart_path
+
+  N = len(command_names)
+  M = len(timing_sets) - 1
+
+  ind = np.arange(N)  # the x locations for the groups
+  width = 1. / (1.2 + M)     # the width of the bars
+  dist = 0.15
+
+  fig = plt.figure(figsize=(0.33*N*M,12))
+  plot1 = fig.add_subplot(211)
+  plot2 = fig.add_subplot(212)
+
+  # invisible lines that make sure the scale doesn't get minuscule
+  plot1.axhline(y=101, color='white', linewidth=0.01)
+  plot1.axhline(y=95.0, color='white', linewidth=0.01)
+  plot2.axhline(y=0.1, color='white', linewidth=0.01)
+  plot2.axhline(y=-0.5, color='white', linewidth=0.01)
+
+  reference = timing_sets[0]
+
+  ofs = 0
+
+  for label_i in range(1, len(labels)):
+    timings = timing_sets[label_i]
+    divs = []
+    diffs = []
+    divs_color = []
+    deviations = []
+    for command_name in command_names:
+      ref_N, ref_min, ref_max, ref_avg = reference[command_name]
+      this_N, this_min, this_max, this_avg = timings[command_name]
+
+      val = 100. * (do_div(ref_avg, this_avg) - 1.0)
+      if val < 0:
+        col = '#55dd55'
+      else:
+        col = '#dd5555'
+      divs.append(val)
+      divs_color.append(col)
+      diffs.append( do_diff(ref_avg, this_avg) )
+      deviations.append(this_max / this_min)
+
+    rects = plot1.bar(ind + ofs, divs, width * (1.0 - dist),
+                      color=divs_color, bottom=100.0, edgecolor='none')
+
+    for i in range(len(rects)):
+      x = rects[i].get_x() + width / 2.2
+      div = divs[i]
+      label = labels[label_i]
+
+      plot1.text(x, 100.,
+                 ' %+5.1f%% %s' % (div,label),
+                 ha='center', va='top', size='small',
+                 rotation=-90, family='monospace')
+
+    rects = plot2.bar(ind + ofs, diffs, width * 0.9,
+                   color=divs_color, bottom=0.0, edgecolor='none')
+
+    for i in range(len(rects)):
+      x = rects[i].get_x() + width / 2.2
+      diff = diffs[i]
+      label = labels[label_i]
+
+      plot2.text(x, 0.,
+                 ' %+5.2fs %s' % (diff,label),
+                 ha='center', va='top', size='small',
+                 rotation=-90, family='monospace')
+
+    ofs += width
+
+  plot1.set_title('Speed change compared to %s [%%]' % labels[0])
+  plot1.set_xticks(ind + (width / 2.))
+  plot1.set_xticklabels(command_names, rotation=-55,
+                        horizontalalignment='left',
+                        size='x-small', weight='bold')
+  plot1.axhline(y=100.0, color='#555555', linewidth=0.2)
+  plot2.set_title('[seconds]')
+  plot2.set_xticks(ind + (width / 2.))
+  plot2.set_xticklabels(command_names, rotation=-55,
+                        horizontalalignment='left',
+                        size='medium', weight='bold')
+  plot2.axhline(y=0.0, color='#555555', linewidth=0.2)
+
+  margin = 1.5/(N*M)
+  fig.subplots_adjust(bottom=0.1, top=0.97,
+                      left=margin,
+                      right=1.0-(margin / 2.))
+
+  #plot1.legend( (rects1[0], rects2[0]), (left_label, right_label) )
+
+  #plt.show()
+  plt.savefig(chart_path)
+
+# ------------------------------------------------------------ main
+
+
+# Custom option formatter, keeping newlines in the description.
+# adapted from:
+# http://groups.google.com/group/comp.lang.python/msg/09f28e26af0699b1
+import textwrap
+class IndentedHelpFormatterWithNL(optparse.IndentedHelpFormatter):
+  def format_description(self, description):
+    if not description: return ""
+    desc_width = self.width - self.current_indent
+    indent = " "*self.current_indent
+    bits = description.split('\n')
+    formatted_bits = [
+      textwrap.fill(bit,
+        desc_width,
+        initial_indent=indent,
+        subsequent_indent=indent)
+      for bit in bits]
+    result = "\n".join(formatted_bits) + "\n"
+    return result 
 
 if __name__ == '__main__':
-  parser = optparse.OptionParser()
+  parser = optparse.OptionParser(formatter=IndentedHelpFormatterWithNL())
   # -h is automatically added.
   ### should probably expand the help for that. and see about -?
   parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
                     help='Verbose operation')
-  parser.add_option('--svn', action='store', dest='svn', default='svn',
-                    help='Specify Subversion executable to use')
+  parser.add_option('-b', '--svn-bin-dir', action='store', dest='svn_bin_dir',
+                    default='',
+                    help='Specify directory to find Subversion binaries in')
+  parser.add_option('-f', '--db-path', action='store', dest='db_path',
+                    default='benchmark.db',
+                    help='Specify path to SQLite database file')
+  parser.add_option('-o', '--chart-path', action='store', dest='chart_path',
+                    help='Supply a path for chart output.')
+  parser.add_option('-c', '--command-names', action='store',
+                    dest='command_names',
+                    help='Comma separated list of command names to limit to.')
+
+  parser.set_description(__doc__)
+  parser.set_usage('')
 
-  ### should start passing this, but for now: make it global
-  global options
 
   options, args = parser.parse_args()
 
+  def usage(msg=None):
+    parser.print_help()
+    if msg:
+      print
+      print msg
+    exit(1)
+
   # there should be at least one arg left: the sub-command
   if not args:
-    usage()
-    exit(1)
+    usage('No command argument supplied.')
 
   cmd = args[0]
   del args[0]
 
-  if cmd == 'compare':
-    if len(args) != 2:
-      usage()
-      exit(1)
-    cmd_compare(*args)
+  db = TimingsDb(options.db_path)
 
-  elif cmd == 'combine':
-    if len(args) < 3:
+  if cmd == 'run':
+    if len(args) < 1 or len(args) > 2:
       usage()
-      exit(1)
-    cmd_combine(*args)
+    cmdline_run(db, options, *args)
 
-  elif cmd == 'run':
-    if len(args) < 3 or len(args) > 4:
+  elif cmd == 'compare':
+    if len(args) < 2:
       usage()
-      exit(1)
-    cmd_run(*args)
+    cmdline_compare(db, options, *args)
+
+  elif cmd == 'list':
+    cmdline_list(db, options, *args)
 
   elif cmd == 'show':
-    if not args:
+    cmdline_show(db, options, *args)
+
+  elif cmd == 'chart':
+    if 'compare'.startswith(args[0]):
+      cmdline_chart_compare(db, options, *args[1:])
+    else:
       usage()
-      exit(1)
-    cmd_show(*args)
 
   else:
-    usage()
+    usage('Unknown command argument: %s' % cmd)

Modified: subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run (original)
+++ subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run Thu Aug 16 10:17:48 2012
@@ -17,31 +17,53 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# Where are the svn binaries you want to benchmark?
-SVN_A_NAME="1.7.x"
-SVN_A="$HOME/pat/bench/prefix/bin/svn"
+# debug? Just uncomment.
+#SVNBENCH_DEBUG=DEBUG_
+if [ -n "$SVNBENCH_DEBUG" ]; then
+  SVNBENCH_DEBUG="DEBUG_"
+fi
+
+# Subversion bin-dir used for maintenance of working copies
+SVN_STABLE="$HOME/pat/stable/prefix/bin/"
+
+# Where to find the svn binaries you want to benchmark, what are their labels
+# and Last Changed Revisions?
+# side A
+SVN_A_NAME="1.7.0"
+SVN_A="$HOME/pat/bench/prefix/bin"
+SVN_A_REV="$("$SVN_STABLE"/svnversion -c "$HOME/pat/bench/src" | sed 's/.*://')"
+
+# side B
 SVN_B_NAME="trunk"
-SVN_B="$HOME/pat/trunk/prefix/bin/svn"
+SVN_B="$HOME/pat/trunk/prefix/bin"
+SVN_B_REV="$("$SVN_STABLE"/svnversion -c "$HOME/pat/trunk/src" | sed 's/.*://')"
 
-benchmark="$PWD/benchmark.py"
+echo "$SVN_A_NAME@$SVN_A_REV vs. $SVN_B_NAME@$SVN_B_REV"
 
-parent="$(date +"%Y%m%d-%H%M%S")"
-inital_workdir="$PWD"
-mkdir "$parent"
-cd "$parent"
-pwd
+# benchmark script and parameters...
+benchmark="$PWD/benchmark.py"
 
+db="$PWD/${SVNBENCH_DEBUG}benchmark.db"
 
 batch(){
   levels="$1"
   spread="$2"
   N="$3"
-  pre="${levels}x${spread}_"
-  "$benchmark" "--svn=$SVN_A" run "${pre}$SVN_A_NAME" $levels $spread $N >/dev/null
-  "$benchmark" "--svn=$SVN_B" run "${pre}$SVN_B_NAME" $levels $spread $N >/dev/null
+
+  # SVN_A is a fixed tag, currently 1.7.0. For each call, run this once.
+  # It will be called again and again for each trunk build being tested,
+  # that's why we don't really need to run it $N times every time.
+  N_for_A=1
+  "$benchmark" "--db-path=$db" "--svn-bin-dir=$SVN_A" \
+      run "$SVN_A_NAME@$SVN_A_REV,${levels}x$spread" "$N_for_A" >/dev/null
+
+  # SVN_B is a branch, i.e. the moving target, benchmarked at a specific
+  # point in history each time this script is called. Run this $N times.
+  "$benchmark" "--db-path=$db" "--svn-bin-dir=$SVN_B" \
+      run "$SVN_B_NAME@$SVN_B_REV,${levels}x$spread" $N >/dev/null
 }
 
-N=6
+N=3
 al=5
 as=5
 bl=100
@@ -49,15 +71,16 @@ bs=1
 cl=1
 cs=100
 
-##DEBUG
-#N=1
-#al=1
-#as=1
-#bl=2
-#bs=1
-#cl=1
-#cs=2
-##DEBUG
+if [ -n "$SVNBENCH_DEBUG" ]; then
+  echo "DEBUG"
+  N=1
+  al=1
+  as=1
+  bl=2
+  bs=1
+  cl=1
+  cs=2
+fi
 
 
 {
@@ -65,22 +88,24 @@ started="$(date)"
 echo "Started at $started"
 
 echo "
-*Disclaimer:* this tests only file://-URL access on a GNU/Linux VM.
+*Disclaimer* - This tests only file://-URL access on a GNU/Linux VM.
 This is intended to measure changes in performance of the local working
 copy layer, *only*. These results are *not* generally true for everyone."
 
-batch $al $as $N
-batch $bl $bs $N
-batch $cl $cs $N
-
-"$benchmark" combine "total_$SVN_A_NAME" *x*"_$SVN_A_NAME" >/dev/null
-"$benchmark" combine "total_$SVN_B_NAME" *x*"_$SVN_B_NAME" >/dev/null
+if [ -z "$SVNBENCH_SUMMARY_ONLY" ]; then
+  batch $al $as $N
+  batch $bl $bs $N
+  batch $cl $cs $N
+else
+  echo "(not running benchmarks, just printing results on record.)"
+fi
 
 echo ""
 echo "Averaged-total results across all runs:"
 echo "---------------------------------------"
 echo ""
-"$benchmark" compare "total_$SVN_A_NAME" "total_$SVN_B_NAME"
+"$benchmark" "--db-path=$db" \
+    compare "$SVN_A_NAME" "$SVN_B_NAME@$SVN_B_REV"
 
 echo ""
 echo ""
@@ -88,8 +113,9 @@ echo "Above totals split into separate <
 echo "----------------------------------------------------------------"
 echo ""
 
-for pre in "${al}x${as}_" "${bl}x${bs}_" "${cl}x${cs}_"; do
-  "$benchmark" compare "${pre}$SVN_A_NAME" "${pre}$SVN_B_NAME"
+for lvlspr in "${al}x${as}" "${bl}x${bs}" "${cl}x${cs}"; do
+  "$benchmark" "--db-path=$db" \
+      compare "$SVN_A_NAME,$lvlspr" "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
   echo ""
 done
 
@@ -99,8 +125,13 @@ echo "More detail:"
 echo "------------"
 echo ""
 
-for pre in "${al}x${as}_" "${bl}x${bs}_" "${cl}x${cs}_" "total_"; do
-  "$benchmark" compare -v "${pre}$SVN_A_NAME" "${pre}$SVN_B_NAME"
+for lvlspr in "${al}x${as}" "${bl}x${bs}" "${cl}x${cs}" "" ; do
+  "$benchmark" "--db-path=$db" show "$SVN_A_NAME,$lvlspr"
+  echo --
+  "$benchmark" "--db-path=$db" show "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
+  echo --
+  "$benchmark" "--db-path=$db" \
+      compare -v "$SVN_A_NAME,$lvlspr" "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
   echo ""
   echo ""
 done
@@ -111,7 +142,3 @@ echo "       done at $(date)"
 pwd
 } 2>&1 | tee results.txt
 
-cd "$inital_workdir"
-if [ -f "$parent/total_trunk" ]; then
-  rm -rf "$parent"
-fi

Modified: subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run.bat
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run.bat?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run.bat (original)
+++ subversion/branches/compressed-pristines/tools/dev/benchmarks/suite1/run.bat Thu Aug 16 10:17:48 2012
@@ -16,6 +16,10 @@
 :: under the License.
 
 @ECHO OFF
+
+ECHO.THIS SCRIPT IS CURRENTLY OUTDATED.
+GOTO :EOF
+
 SETLOCAL EnableDelayedExpansion
 
 :: Where are the svn binaries you want to benchmark?

Propchange: subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/__init__.py
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/printers.py
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/printers.py?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/printers.py (original)
+++ subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/printers.py Thu Aug 16 10:17:48 2012
@@ -28,38 +28,92 @@ from gdb.printing import RegexpCollectio
 
 
 class TypedefRegexCollectionPrettyPrinter(RegexpCollectionPrettyPrinter):
-    """Class for implementing a collection of regular-expression based
-       pretty-printers, matching on the type name at the point of use, such
-       as (but not necessarily) a 'typedef' name, ignoring 'const' or
-       'volatile' qualifiers.
-
-       This is modeled on RegexpCollectionPrettyPrinter, which (in GDB 7.3)
-       matches on the base type's tag name and can't match a pointer type or
-       any other type that doesn't have a tag name."""
+    """Class for implementing a collection of pretty-printers, matching the
+       type name to a regular expression.
+
+       A pretty-printer in this collection will be used if the type of the
+       value to be printed matches the printer's regular expression, or if
+       the value is a pointer to and/or typedef to a type name that matches
+       its regular expression.  The variations are tried in this order:
+
+         1. the type name as known to the debugger (could be a 'typedef');
+         2. the type after stripping off any number of layers of 'typedef';
+         3. if it is a pointer, the pointed-to type;
+         4. if it is a pointer, the pointed-to type minus some 'typedef's.
+
+       In all cases, ignore 'const' and 'volatile' qualifiers.  When
+       matching the pointed-to type, dereference the value or use 'None' if
+       the value was a null pointer.
+
+       This class is modeled on RegexpCollectionPrettyPrinter, which (in GDB
+       7.3) matches on the base type's tag name and can't match a pointer
+       type or any other type that doesn't have a tag name.
+    """
 
     def __init__(self, name):
         super(TypedefRegexCollectionPrettyPrinter, self).__init__(name)
 
     def __call__(self, val):
-        """Lookup the pretty-printer for the provided value."""
+        """Find and return an instantiation of a printer for VAL.
+        """
 
-        # Get the type name, without 'const' or 'volatile' qualifiers.
-        typename = str(val.type.unqualified())
-        if not typename:
-            return None
-
-        # Iterate over table of type regexps to find an enabled printer for
-        # that type.  Return an instantiation of the printer if found.
-        for printer in self.subprinters:
-            if printer.enabled and printer.compiled_re.search(typename):
-                return printer.gen_printer(val)
+        def lookup_type(type, val):
+            """Return the first printer whose regular expression matches the
+               name (tag name for struct/union/enum types) of TYPE, ignoring
+               any 'const' or 'volatile' qualifiers.
+
+               VAL is a gdb.Value, or may be None to indicate a dereferenced
+               null pointer.  TYPE is the associated gdb.Type.
+            """
+            if type.code in [gdb.TYPE_CODE_STRUCT, gdb.TYPE_CODE_UNION,
+                             gdb.TYPE_CODE_ENUM]:
+                typename = type.tag
+            else:
+                typename = str(type.unqualified())
+            for printer in self.subprinters:
+                if printer.enabled and printer.compiled_re.search(typename):
+                    return printer.gen_printer(val)
+
+        def lookup_type_or_alias(type, val):
+            """Return the first printer matching TYPE, or else if TYPE is a
+               typedef then the first printer matching the aliased type.
+
+               VAL is a gdb.Value, or may be None to indicate a dereferenced
+               null pointer.  TYPE is the associated gdb.Type.
+            """
+            # First, look for a printer for the given (but unqualified) type.
+            printer = lookup_type(type, val)
+            if printer:
+                return printer
+
+            # If it's a typedef, look for a printer for the aliased type ...
+            while type.code == gdb.TYPE_CODE_TYPEDEF:
+                type = type.target()
+                printer = lookup_type(type, val)
+                if printer:
+                    return printer
+
+        # First, look for a printer for the given (but unqualified) type, or
+        # its aliased type if it's a typedef.
+        printer = lookup_type_or_alias(val.type, val)
+        if printer:
+            return printer
+
+        # If it's a pointer, look for a printer for the pointed-to type.
+        if val.type.code == gdb.TYPE_CODE_PTR:
+            type = val.type.target()
+            printer = lookup_type_or_alias(
+                          type, val and val.dereference() or None)
+            if printer:
+                return printer
 
-        # Cannot find a pretty printer.  Return None.
+        # Cannot find a matching pretty printer in this collection.
         return None
 
 class InferiorFunction:
     """A class whose instances are callable functions on the inferior
-       process."""
+       process.
+    """
     def __init__(self, function_name):
         self.function_name = function_name
         self.func = None
@@ -71,7 +125,8 @@ class InferiorFunction:
 
 def children_as_map(children_iterator):
     """Convert an iteration of (key, value) pairs into the form required for
-       a pretty-printer 'children' method when the display-hint is 'map'."""
+       a pretty-printer 'children' method when the display-hint is 'map'.
+    """
     for k, v in children_iterator:
         yield 'key', k
         yield 'val', v
@@ -95,9 +150,10 @@ svn__apr_hash_index_val = InferiorFuncti
 
 def children_of_apr_hash(hash_p, value_type=None):
     """Iterate over an 'apr_hash_t *' GDB value, in the way required for a
-       pretty-printer 'children' method when the display-hint is 'array'.
+       pretty-printer 'children' method when the display-hint is 'map'.
        Cast the value pointers to VALUE_TYPE, or return values as '...' if
-       VALUE_TYPE is None."""
+       VALUE_TYPE is None.
+    """
     hi = apr_hash_first(0, hash_p)
     while (hi):
         k = svn__apr_hash_index_key(hi).reinterpret_cast(cstringType)
@@ -115,11 +171,17 @@ def children_of_apr_hash(hash_p, value_t
 class AprHashPrinter:
     """for 'apr_hash_t' of 'char *' keys and unknown values"""
     def __init__(self, val):
-        self.hash_p = val.address
+        if val:
+            self.hash_p = val.address
+        else:
+            self.hash_p = val
 
     def to_string(self):
         """Return a string to be displayed before children are displayed, or
-           return None if we don't want any such."""
+           return None if we don't want any such.
+        """
+        if not self.hash_p:
+            return 'NULL'
         return 'hash of ' + str(apr_hash_count(self.hash_p)) + ' items'
 
     def children(self):
@@ -130,20 +192,15 @@ class AprHashPrinter:
     def display_hint(self):
         return 'map'
 
-class PtrAprHashPrinter(AprHashPrinter):
-    """for pointer to 'apr_hash_t' of 'char *' keys and unknown values"""
-    def __init__(self, val):
-        self.hash_p = val
-
-    def to_string(self):
-        if not self.hash_p:
-            return 'NULL'
-        return AprHashPrinter.to_string(self)
-
-    def children(self):
-        if not self.hash_p:
-            return []
-        return AprHashPrinter.children(self)
+def children_of_apr_array(array, value_type):
+    """Iterate over an 'apr_array_header_t' GDB value, in the way required for
+       a pretty-printer 'children' method when the display-hint is 'array'.
+       Cast the values to VALUE_TYPE.
+    """
+    nelts = int(array['nelts'])
+    elts = array['elts'].reinterpret_cast(value_type.pointer())
+    for i in range(nelts):
+        yield str(i), elts[i]
 
 class AprArrayPrinter:
     """for 'apr_array_header_t' of unknown elements"""
@@ -163,122 +220,195 @@ class AprArrayPrinter:
     def display_hint(self):
         return 'array'
 
-class PtrAprArrayPrinter(AprArrayPrinter):
-    """for pointer to 'apr_array_header_t' of unknown elements"""
+########################################################################
+
+# Pretty-printing for Subversion libsvn_subr types.
+
+class SvnBooleanPrinter:
+    """for svn_boolean_t"""
     def __init__(self, val):
-        if not val:
-            self.array = None
-        else:
-            self.array = val.dereference()
+        self.val = val
 
     def to_string(self):
-        if not self.array:
-            return 'NULL'
-        return AprArrayPrinter.to_string(self)
+        if self.val is None:
+            return '(NULL)'
+        if self.val:
+            return 'TRUE'
+        else:
+            return 'FALSE'
 
-    def children(self):
-        if not self.array:
-            return []
-        return AprArrayPrinter.children(self)
+class SvnStringPrinter:
+    """for svn_string_t"""
+    def __init__(self, val):
+        self.val = val
 
+    def to_string(self):
+        if not self.val:
+            return 'NULL'
 
-########################################################################
+        data = self.val['data']
+        len = int(self.val['len'])
+        return data.string(length=len)
 
-# Pretty-printing for Subversion library types.
+    def display_hint(self):
+        if self.val:
+            return 'string'
 
-class SvnStringPrinter:
+class SvnMergeRangePrinter:
+    """for svn_merge_range_t"""
     def __init__(self, val):
         self.val = val
 
     def to_string(self):
-        # Make sure string * works, too
-        val = self.val
+        if not self.val:
+            return 'NULL'
 
-        ptr = val['data']
-        len = val['len']
+        r = self.val
+        start = int(r['start'])
+        end = int(r['end'])
+        if start >= 0 and start < end:
+            if start + 1 == end:
+                rs = str(end)
+            else:
+                rs = str(start + 1) + '-' + str(end)
+        elif end >= 0 and end < start:
+            if start == end + 1:
+                rs = '-' + str(start)
+            else:
+                rs = str(start) + '-' + str(end + 1)
+        else:
+            rs = '(INVALID: s=%d, e=%d)' % (start, end)
+        if not r['inheritable']:
+            rs += '*'
+        return rs
 
-        return "length: " + str(int(len)) + "; contents: '" + ptr.string(length=len) + "'"
+    def display_hint(self):
+        if self.val:
+            return 'string'
+
+class SvnRangelistPrinter:
+    """for svn_rangelist_t"""
+    def __init__(self, val):
+        self.array = val
+        self.svn_merge_range_t = gdb.lookup_type('svn_merge_range_t')
+
+    def to_string(self):
+        if not self.array:
+            return 'NULL'
+
+        s = ''
+        for key, val in children_of_apr_array(self.array,
+                                              self.svn_merge_range_t.pointer()):
+            if s:
+                s += ','
+            s += SvnMergeRangePrinter(val).to_string()
+        return s
 
     def display_hint(self):
-        return 'string'
+        if self.array:
+            return 'string'
+
+class SvnMergeinfoPrinter:
+    """for svn_mergeinfo_t"""
+    def __init__(self, val):
+        self.hash_p = val
+        self.svn_rangelist_t = gdb.lookup_type('svn_rangelist_t')
+
+    def to_string(self):
+        if self.hash_p == 0:
+            return 'NULL'
+
+        s = ''
+        for key, val in children_of_apr_hash(self.hash_p,
+                                             self.svn_rangelist_t.pointer()):
+            if s:
+                s += '; '
+            s += key + ':' + SvnRangelistPrinter(val).to_string()
+        return '{ ' + s + ' }'
 
 class SvnMergeinfoCatalogPrinter:
     """for svn_mergeinfo_catalog_t"""
     def __init__(self, val):
         self.hash_p = val
+        self.svn_mergeinfo_t = gdb.lookup_type('svn_mergeinfo_t')
 
     def to_string(self):
         if self.hash_p == 0:
             return 'NULL'
-        return 'mergeinfo catalog of ' + str(apr_hash_count(self.hash_p)) + ' items'
 
-    def children(self):
-        if self.hash_p == 0:
-            return None
-        mergeinfoType = gdb.lookup_type('svn_mergeinfo_t')
-        return children_as_map(children_of_apr_hash(self.hash_p, mergeinfoType))
+        s = ''
+        for key, val in children_of_apr_hash(self.hash_p,
+                                             self.svn_mergeinfo_t):
+            if s:
+                s += ',\n  '
+            s += "'" + key + "': " + SvnMergeinfoPrinter(val).to_string()
+        return '{ ' + s + ' }'
+
+########################################################################
+
+# Pretty-printing for Subversion libsvn_client types.
+
+class SvnPathrevPrinter:
+    """for svn_client__pathrev_t"""
+    def __init__(self, val):
+        self.val = val
+
+    def to_string(self):
+        if not self.val:
+            return 'NULL'
+
+        rev = int(self.val['rev'])
+        url = self.val['url'].string()
+        repos_root_url = self.val['repos_root_url'].string()
+        relpath = url[len(repos_root_url):]
+        return "%s@%d" % (relpath, rev)
 
     def display_hint(self):
-        return 'map'
+        if self.val:
+            return 'string'
 
 
 ########################################################################
 
 libapr_printer = None
-libapr_printer2 = None
 libsvn_printer = None
-libsvn_printer2 = None
 
 def build_libsvn_printers():
     """Construct the pretty-printer objects."""
 
-    global libapr_printer, libapr_printer2, libsvn_printer, libsvn_printer2
+    global libapr_printer, libsvn_printer
 
-    # These sub-printers match a struct's (or union)'s tag name,
-    # after stripping typedefs, references and const/volatile qualifiers.
-    libapr_printer = RegexpCollectionPrettyPrinter("libapr")
+    libapr_printer = TypedefRegexCollectionPrettyPrinter("libapr")
     libapr_printer.add_printer('apr_hash_t', r'^apr_hash_t$',
                                AprHashPrinter)
     libapr_printer.add_printer('apr_array_header_t', r'^apr_array_header_t$',
                                AprArrayPrinter)
 
-    # These sub-printers match a type name at the point of use,
-    # after stripping const/volatile qualifiers.
-    #
-    # TODO: The "apr_foo_t *" entries are in this collection merely because
-    #       the collection above can't match them, but ideally we'd fix that
-    #       matching and move these entries to there so that they get used
-    #       for any typedef that doesn't have its own specific pretty-printer
-    #       registered.
-    libapr_printer2 = TypedefRegexCollectionPrettyPrinter("libapr2")
-    libapr_printer2.add_printer('apr_hash_t *', r'^apr_hash_t \*$',
-                                PtrAprHashPrinter)
-    libapr_printer2.add_printer('apr_array_header_t *', r'^apr_array_header_t \*$',
-                                PtrAprArrayPrinter)
-
-    # These sub-printers match a struct's (or union)'s tag name,
-    # after stripping typedefs, references and const/volatile qualifiers.
-    libsvn_printer = RegexpCollectionPrettyPrinter("libsvn")
+    libsvn_printer = TypedefRegexCollectionPrettyPrinter("libsvn")
+    libsvn_printer.add_printer('svn_boolean_t', r'^svn_boolean_t$',
+                               SvnBooleanPrinter)
     libsvn_printer.add_printer('svn_string_t', r'^svn_string_t$',
                                SvnStringPrinter)
-
-    # These sub-printers match a type name at the point of use,
-    # after stripping const/volatile qualifiers.
-    libsvn_printer2 = TypedefRegexCollectionPrettyPrinter("libsvn2")
-    libsvn_printer2.add_printer('svn_mergeinfo_catalog_t', r'^svn_mergeinfo_catalog_t$',
-                                SvnMergeinfoCatalogPrinter)
+    libsvn_printer.add_printer('svn_client__pathrev_t', r'^svn_client__pathrev_t$',
+                               SvnPathrevPrinter)
+    libsvn_printer.add_printer('svn_merge_range_t', r'^svn_merge_range_t$',
+                               SvnMergeRangePrinter)
+    libsvn_printer.add_printer('svn_rangelist_t', r'^svn_rangelist_t$',
+                               SvnRangelistPrinter)
+    libsvn_printer.add_printer('svn_mergeinfo_t', r'^svn_mergeinfo_t$',
+                               SvnMergeinfoPrinter)
+    libsvn_printer.add_printer('svn_mergeinfo_catalog_t', r'^svn_mergeinfo_catalog_t$',
+                               SvnMergeinfoCatalogPrinter)
 
 
 def register_libsvn_printers(obj):
     """Register the pretty-printers for the object file OBJ."""
 
-    global libapr_printer, libapr_printer2, libsvn_printer, libsvn_printer2
+    global libapr_printer, libsvn_printer
 
     # Printers registered later take precedence.
     gdb.printing.register_pretty_printer(obj, libapr_printer)
-    gdb.printing.register_pretty_printer(obj, libapr_printer2)
     gdb.printing.register_pretty_printer(obj, libsvn_printer)
-    gdb.printing.register_pretty_printer(obj, libsvn_printer2)
 
 
 # Construct the pretty-printer objects, once, at GDB start-up time when this

Propchange: subversion/branches/compressed-pristines/tools/dev/gdb-py/svndbg/printers.py
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: subversion/branches/compressed-pristines/tools/dev/gen-py-errors.py
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/gen-py-errors.py?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/gen-py-errors.py (original)
+++ subversion/branches/compressed-pristines/tools/dev/gen-py-errors.py Thu Aug 16 10:17:48 2012
@@ -23,44 +23,87 @@
 # ====================================================================
 #
 #
-#  Meant to be run from the root of a Subversion working copy.  If anybody
-#  wants to do some path magic to improve that use, feel free.
-
-import sys, os
-sys.path.append(os.path.join('subversion', 'bindings', 'swig',
-                             'python', 'tests'))
-
+# Locates svn_error_codes.h based on its relative location to this script.
+#
+# Generates to STDOUT. Typically, redirect this into svntest/err.py
+#
 
-import setup_path
+import sys
+import os
+import re
 
-header = '''#!/usr/bin/env python
+HEADER = '''#!/usr/bin/env python
 ### This file automatically generated by tools/dev/gen-py-error.py,
 ### which see for more information
 ###
 ### It is versioned for convenience.
-
 '''
 
+# Established by svn 1.0. May as well hard-code these.
+APR_OS_START_ERROR = 20000
+APR_OS_START_USERERR = APR_OS_START_ERROR + 50000 * 2
+SVN_ERR_CATEGORY_SIZE = 5000
+
+RE_CAT_NAME = re.compile(r'SVN_ERR_([A-Z_]+)_CATEG')
+RE_CAT_VALUE = re.compile(r'\d+')
+
+RE_DEF_NAME = re.compile(r'SVN_ERRDEF\(([A-Z0-9_]+)')
+RE_DEF_VALUE = re.compile(r'SVN_ERR_([A-Z_]+)_CATEG[^0-9]*([0-9]+)')
+
+
+def write_output(codes):
+  print HEADER
+
+  for name, value in codes:
+    # skip SVN_ERR_ on the name
+    print '%s = %d' % (name[8:], value)
+
+
+def main(codes_fname):
+  categ = { }
+  codes = [ ]
+
+  f = open(codes_fname)
+
+  # Parse all the category start values
+  while True:
+    line = f.next()
+    m = RE_CAT_NAME.search(line)
+    if m:
+      name = m.group(1)
+      m = RE_CAT_VALUE.search(f.next())
+      assert m
+      value = int(m.group(0))
+      categ[name] = APR_OS_START_USERERR + value * SVN_ERR_CATEGORY_SIZE
+
+    elif line.strip() == 'SVN_ERROR_START':
+      break
+
+  # Parse each of the error values
+  while True:
+    line = f.next()
+    m = RE_DEF_NAME.search(line)
+    if m:
+      name = m.group(1)
+      line = f.next()
+      m = RE_DEF_VALUE.search(line)
+      if not m:
+        # SVN_ERR_WC_NOT_DIRECTORY is defined as equal to NOT_WORKING_COPY
+        # rather than relative to SVN_ERR_WC_CATEGORY_START
+        #print 'SKIP:', line
+        continue
+      cat = m.group(1)
+      value = int(m.group(2))
+      codes.append((name, categ[cat] + value))
 
-def write_output(errs, filename):
-  out = open(filename, 'w')
-  out.write(header)
-
-  for name, val in errs:
-    out.write('%s = %d\n' % (name, val))
-
-  out.close()
-
-
-def main(output_filename):
-  import core
+    elif line.strip() == 'SVN_ERROR_END':
+      break
 
-  errs = [e for e in dir(core.svn.core) if e.startswith('SVN_ERR_')]
-  codes = []
-  for e in errs:
-    codes.append((e[8:], getattr(core.svn.core, e)))
-  write_output(codes, output_filename)
+  write_output(sorted(codes))
 
 
 if __name__ == '__main__':
-  main(os.path.join('subversion', 'tests', 'cmdline', 'svntest', 'err.py'))
+  this_dir = os.path.dirname(os.path.abspath(__file__))
+  codes_fname = os.path.join(this_dir, os.path.pardir, os.path.pardir,
+                             'subversion', 'include', 'svn_error_codes.h')
+  main(codes_fname)

Propchange: subversion/branches/compressed-pristines/tools/dev/histogram.py
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: subversion/branches/compressed-pristines/tools/dev/mergegraph/mergegraph.py
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/mergegraph/mergegraph.py?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/mergegraph/mergegraph.py (original)
+++ subversion/branches/compressed-pristines/tools/dev/mergegraph/mergegraph.py Thu Aug 16 10:17:48 2012
@@ -65,7 +65,8 @@ from pydot import Node, Edge
 
 def mergeinfo_to_node_list(mi):
   """Convert a mergeinfo string such as '/foo:1,3-5*' into a list of
-     node names such as ['foo1', 'foo3', 'foo4', 'foo5']."""
+     node names such as ['foo1', 'foo3', 'foo4', 'foo5'].
+  """
   ### Doesn't yet strip the leading slash.
   l = []
   if mi:
@@ -89,7 +90,8 @@ def mergeinfo_to_node_list(mi):
 
 class MergeGraph(pydot.Graph):
   """Base class, not intended for direct use.  Use MergeDot for the main
-     graph and MergeSubgraph for a subgraph."""
+     graph and MergeSubgraph for a subgraph.
+  """
 
   def mk_origin_node(graph, name, label):
     """Add a node to the graph"""
@@ -169,13 +171,15 @@ class MergeGraph(pydot.Graph):
 
   def add_annotation(graph, node, label, color='lightblue'):
     """Add a graph node that serves as an annotation to a normal node.
-       More than one annotation can be added to the same normal node."""
+       More than one annotation can be added to the same normal node.
+    """
     subg_name = node + '_annotations'
 
     def get_subgraph(graph, name):
       """Equivalent to pydot.Graph.get_subgraph() when there is no more than
          one subgraph of the given name, but working aroung a bug in
-         pydot.Graph.get_subgraph()."""
+         pydot.Graph.get_subgraph().
+      """
       for subg in graph.get_subgraph_list():
         if subg.get_name() == name:
           return subg
@@ -298,7 +302,12 @@ class MergeDot(MergeGraph, pydot.Dot):
     """Save this merge graph to the given file format. If filename is None,
        construct a filename from the basename of the original file (as passed
        to the constructor and then stored in graph.basename) and the suffix
-       according to the given format."""
+       according to the given format.
+    """
     if not filename:
       filename = graph.basename + '.' + format
-    pydot.Dot.write(graph, filename, format=format)
+    if format == 'sh':
+      import save_as_sh
+      save_as_sh.write_sh_file(graph, filename)
+    else:
+      pydot.Dot.write(graph, filename, format=format)

Modified: subversion/branches/compressed-pristines/tools/dev/svnraisetreeconflict/main.c
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/svnraisetreeconflict/main.c?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/svnraisetreeconflict/main.c (original)
+++ subversion/branches/compressed-pristines/tools/dev/svnraisetreeconflict/main.c Thu Aug 16 10:17:48 2012
@@ -67,8 +67,8 @@
 static svn_error_t *
 version(apr_pool_t *pool)
 {
-  return svn_opt_print_help3(NULL, "svnraisetreeconflict", TRUE, FALSE, NULL,
-                             NULL, NULL, NULL, NULL, NULL, pool);
+  return svn_opt_print_help4(NULL, "svnraisetreeconflict", TRUE, FALSE, FALSE,
+                             NULL, NULL, NULL, NULL, NULL, NULL, pool);
 }
 
 static void
@@ -308,8 +308,8 @@ check_lib_versions(void)
       { "svn_wc",     svn_wc_version },
       { NULL, NULL }
     };
-
   SVN_VERSION_DEFINE(my_version);
+
   return svn_ver_check_list(&my_version, checklist);
 }
 
@@ -365,10 +365,8 @@ main(int argc, const char *argv[])
       if (APR_STATUS_IS_EOF(status))
         break;
       if (status != APR_SUCCESS)
-        {
-          usage(pool);
-          return EXIT_FAILURE;
-        }
+        usage(pool);  /* this will exit() */
+
       switch (opt)
         {
         case 'h':
@@ -379,8 +377,7 @@ main(int argc, const char *argv[])
           exit(0);
           break;
         default:
-          usage(pool);
-          return EXIT_FAILURE;
+          usage(pool);  /* this will exit() */
         }
     }
 
@@ -396,10 +393,7 @@ main(int argc, const char *argv[])
     }
 
   if (remaining_argv->nelts < 1)
-    {
-      usage(pool);
-      return EXIT_FAILURE;
-    }
+    usage(pool);  /* this will exit() */
 
   /* Do the main task */
   SVNRAISETC_INT_ERR(raise_tree_conflict(remaining_argv->nelts,

Modified: subversion/branches/compressed-pristines/tools/dev/unix-build/Makefile.svn
URL: http://svn.apache.org/viewvc/subversion/branches/compressed-pristines/tools/dev/unix-build/Makefile.svn?rev=1373783&r1=1373782&r2=1373783&view=diff
==============================================================================
--- subversion/branches/compressed-pristines/tools/dev/unix-build/Makefile.svn (original)
+++ subversion/branches/compressed-pristines/tools/dev/unix-build/Makefile.svn Thu Aug 16 10:17:48 2012
@@ -30,7 +30,12 @@
 #   |______________________________________________________________|
 
 ENABLE_PERL_BINDINGS ?= yes
-ENABLE_JAVA_BINDINGS ?= no # they don't build with thread-less APR...
+THREADING ?= yes
+ifeq ($(THREADING),yes)
+ENABLE_JAVA_BINDINGS ?= yes
+else
+ENABLE_JAVA_BINDINGS ?= no
+endif
 USE_APR_ICONV ?= no # set to yes to use APR iconv instead of GNU iconv
 PARALLEL ?= 1
 CLEANUP ?= 1
@@ -60,20 +65,21 @@ OBJDIR		= $(PWD)/objdir
 
 BDB_MAJOR_VER	= 4.7
 BDB_VER		= $(BDB_MAJOR_VER).25
-APR_VER		= 1.4.5
+APR_VER		= 1.4.6
 APR_ICONV_VER	= 1.2.1
 GNU_ICONV_VER	= 1.14
-APR_UTIL_VER	= 1.3.12
+APR_UTIL_VER	= 1.4.1
 HTTPD_VER	= 2.2.22
 NEON_VER	= 0.29.6
-SERF_VER	= 1.0.3
+SERF_VER	= 1.1.0
 SERF_OLD_VER	= 0.3.1
 CYRUS_SASL_VER	= 2.1.25
-SQLITE_VER	= 3071100
+SQLITE_VER	= 3071201
 LIBMAGIC_VER	= 5.11
 RUBY_VER	= 1.8.7-p358
 BZ2_VER	= 1.0.6
-PYTHON_VER	= 2.7.2
+PYTHON_VER	= 2.7.3
+JUNIT_VER	= 4.10
 
 BDB_DIST	= db-$(BDB_VER).tar.gz
 APR_ICONV_DIST	= apr-iconv-$(APR_ICONV_VER).tar.gz
@@ -86,6 +92,7 @@ LIBMAGIC_DIST	= file-$(LIBMAGIC_VER).tar
 RUBY_DIST	= ruby-$(RUBY_VER).tar.gz
 BZ2_DIST	= bzip2-$(BZ2_VER).tar.gz
 PYTHON_DIST	= Python-$(PYTHON_VER).tgz
+JUNIT_DIST	= junit-${JUNIT_VER}.jar
 
 DISTFILES	= $(DISTDIR)/$(NEON_DIST) \
 		$(DISTDIR)/$(SERF_DIST) \
@@ -97,7 +104,8 @@ DISTFILES	= $(DISTDIR)/$(NEON_DIST) \
 		$(DISTDIR)/$(LIBMAGIC_DIST) \
 		$(DISTDIR)/$(RUBY_DIST) \
 		$(DISTDIR)/$(BZ2_DIST) \
-		$(DISTDIR)/$(PYTHON_DIST)
+		$(DISTDIR)/$(PYTHON_DIST) \
+		$(DISTDIR)/$(JUNIT_DIST)
 
 FETCH_CMD	= wget -c
 
@@ -117,7 +125,9 @@ CYRUS_SASL_URL	= ftp://ftp.andrew.cmu.ed
 LIBMAGIC_URL	= ftp://ftp.astron.com/pub/file/$(LIBMAGIC_DIST)
 RUBY_URL	= http://ftp.ruby-lang.org/pub/ruby/1.8/$(RUBY_DIST)
 BZ2_URL		= http://bzip.org/$(BZ2_VER)/$(BZ2_DIST)
-PYTHON_URL	= http://python.org/ftp/python/2.7.2/$(PYTHON_DIST)
+PYTHON_URL	= http://python.org/ftp/python/$(PYTHON_VER)/$(PYTHON_DIST)
+JUNIT_URL	= http://cloud.github.com/downloads/KentBeck/junit/$(JUNIT_DIST)
+
 
 BDB_SRCDIR	= $(SRCDIR)/db-$(BDB_VER)
 APR_SRCDIR	= $(SRCDIR)/apr-$(APR_VER)
@@ -292,7 +302,7 @@ $(APR_OBJDIR)/.retrieved:
 	fi
 	touch $@
 
-ifdef THREADING
+ifeq ($(THREADING),yes)
 THREADS_FLAG=--enable-threads
 else
 THREADS_FLAG=--disable-threads
@@ -304,11 +314,6 @@ endif
 
 # configure apr
 $(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved
-	cp $(APR_SRCDIR)/build/apr_hints.m4 \
-		$(APR_SRCDIR)/build/apr_hints.m4.orig
-	cat $(APR_SRCDIR)/build/apr_hints.m4.orig \
-		| sed -e '/^.*APR_ADDTO(CPPFLAGS, \[-D_POSIX_THREADS\]).*$$/d' \
-			> $(APR_SRCDIR)/build/apr_hints.m4
 	cd $(APR_SRCDIR) && ./buildconf
 	cd $(APR_OBJDIR) \
 		&& env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" GREP="`which grep`" \
@@ -619,9 +624,19 @@ $(DISTDIR)/$(NEON_DIST):
 	cd $(DISTDIR) && $(FETCH_CMD) $(NEON_URL)
 
 # retrieve neon
+NEON_SVN_URL=http://svn.webdav.org/repos/projects/neon/trunk
 $(NEON_OBJDIR)/.retrieved: $(DISTDIR)/$(NEON_DIST)
 	[ -d $(NEON_OBJDIR) ] || mkdir -p $(NEON_OBJDIR)
 	tar -C $(SRCDIR) -zxf $(DISTDIR)/$(NEON_DIST)
+	# fix build with OpenSSL lacking SSLv2 support:
+	cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
+		$(NEON_SVN_URL)/src/ne_openssl.c | patch -p0
+	cd $(NEON_SRCDIR)/src && svn diff -c 1872 \
+		$(NEON_SVN_URL)/src/ne_openssl.c | patch -p0
+	cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
+		$(NEON_SVN_URL)/src/ne_ssl.h | patch -p0
+	cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
+		$(NEON_SVN_URL)/src/ne_session.c | patch -p0
 	touch $@
 
 # OpenBSD does not have krb5-config in PATH, but the neon port has
@@ -779,7 +794,7 @@ $(SQLITE_OBJDIR)/.retrieved: $(DISTDIR)/
 	tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SQLITE_DIST)
 	touch $@
 
-ifdef THREADING
+ifeq ($(THREADING),yes)
 THREADSAFE_FLAG=--enable-threadsafe
 else
 THREADSAFE_FLAG=--disable-threadsafe
@@ -1076,6 +1091,14 @@ $(PYTHON_OBJDIR)/.installed: $(PYTHON_OB
 
 
 #######################################################################
+# junit
+#######################################################################
+
+# fetch distfile for junit
+$(DISTDIR)/$(JUNIT_DIST):
+	cd $(DISTDIR) && $(FETCH_CMD) $(JUNIT_URL)
+
+#######################################################################
 # svn
 #######################################################################
 
@@ -1119,12 +1142,20 @@ $(SVN_OBJDIR)/.retrieved:
 	fi
 	touch $@
 
-ifeq ($(BRANCH_MAJOR),1.6)
+ifeq ($(BRANCH_MAJOR),1.7)
+BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
+SERF_FLAG=--with-serf="$(PREFIX)/serf"
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
+else ifeq ($(BRANCH_MAJOR),1.6)
 BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
 SERF_FLAG=--with-serf="$(PREFIX)/serf"
 MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
 MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
 W_NO_SYSTEM_HEADERS=-Wno-system-headers
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
 else ifeq ($(BRANCH_MAJOR),1.5)
 BDB_FLAG=$(PREFIX)/bdb
 SERF_FLAG=--with-serf="$(PREFIX)/serf-old"
@@ -1132,37 +1163,8 @@ MOD_DAV_SVN=modules/mod_dav_svn.so
 MOD_AUTHZ_SVN=modules/mod_authz_svn.so
 DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
 W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else ifeq ($(BRANCH_MAJOR),1.4)
-BDB_FLAG=$(PREFIX)/bdb
-MOD_DAV_SVN=modules/mod_dav_svn.so
-MOD_AUTHZ_SVN=modules/mod_authz_svn.so
-DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
-W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else ifeq ($(BRANCH_MAJOR),1.3)
-BDB_FLAG=$(PREFIX)/bdb
-MOD_DAV_SVN=modules/mod_dav_svn.so
-MOD_AUTHZ_SVN=modules/mod_authz_svn.so
-DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
-W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else ifeq ($(BRANCH_MAJOR),1.2)
-BDB_FLAG=$(PREFIX)/bdb
-MOD_DAV_SVN=modules/mod_dav_svn.so
-MOD_AUTHZ_SVN=modules/mod_authz_svn.so
-DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
-W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else ifeq ($(BRANCH_MAJOR),1.1)
-BDB_FLAG=$(PREFIX)/bdb
-MOD_DAV_SVN=modules/mod_dav_svn.so
-MOD_AUTHZ_SVN=modules/mod_authz_svn.so
-DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
-W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else ifeq ($(BRANCH_MAJOR),1.0)
-BDB_FLAG=$(PREFIX)/bdb
-MOD_DAV_SVN=modules/mod_dav_svn.so
-MOD_AUTHZ_SVN=modules/mod_authz_svn.so
-DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
-W_NO_SYSTEM_HEADERS=-Wno-system-headers
-else
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
+else # 1.8
 BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
 SERF_FLAG=--with-serf="$(PREFIX)/serf"
 MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
@@ -1172,9 +1174,9 @@ endif
 
 ifeq ($(ENABLE_JAVA_BINDINGS),yes)
 	JAVAHL_FLAG=--enable-javahl=yes --with-jdk --with-jikes=no \
-		--with-junit=$(PWD)/junit.jar
+		--with-junit=$(DISTDIR)/$(JUNIT_DIST)
 else
-	JAVAHL_FLAG=--enable-javahl=no
+	JAVAHL_FLAG=--with-jdk=no
 endif
 
 ifdef PROFILE
@@ -1186,13 +1188,7 @@ SVN_WITH_SASL=--with-sasl="$(PREFIX)/cyr
 endif
 
 # configure svn
-$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved
-	@if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
-		if [ ! -e $(PWD)/junit.jar ]; then \
-			echo "Please provide $(PWD)/junit.jar"; \
-			exit 1; \
-		fi; \
-	fi
+$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved $(DISTDIR)/$(JUNIT_DIST)
 	cd $(SVN_SRCDIR) && ./autogen.sh
 	cd $(svn_builddir) && \
 		env LDFLAGS="-L$(PREFIX)/neon/lib -L$(PREFIX)/apr/lib" \
@@ -1205,7 +1201,7 @@ $(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)
 			--prefix="$(SVN_PREFIX)" \
 			--with-apr="$(PREFIX)/apr" \
 			--with-apr-util="$(PREFIX)/apr" \
-			--with-neon="$(PREFIX)/neon" \
+			$(NEON_FLAG) \
 			$(SVN_WITH_HTTPD) \
 			$(SVN_WITH_SASL) \
 			$(SERF_FLAG) \
@@ -1423,10 +1419,16 @@ svn-check-prepare-ramdisk:
 			mkdir -p "$(RAMDISK)/tmp"; \
 	fi
 
+ifndef NEON_FLAG
+svn-check-neon:
+	@echo Neon is not supported by this build of Subversion, skipping tests
+	@true
+else
 svn-check-neon: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk
 	$(HTTPD_START_CMD)
 	$(call do_check,BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=neon)
 	$(HTTPD_STOP_CMD)
+endif
 
 svn-check-serf: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk
 	$(HTTPD_START_CMD)
@@ -1478,7 +1480,7 @@ svn-check-javahl:
 	-if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
 		(cd $(svn_builddir) && \
 			env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
-			make check-javahl 2>&1) | \
+			make check-all-javahl 2>&1) | \
 				tee $(svn_builddir)/tests.log.bindings.javahl; \
 	fi