You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@subversion.apache.org by da...@apache.org on 2012/03/30 15:55:31 UTC
svn commit: r1307424 [11/11] - in /subversion/branches/revprop-packing: ./
build/ac-macros/ notes/ notes/directory-index/ notes/wc-ng/
subversion/bindings/javahl/ subversion/bindings/swig/python/svn/
subversion/bindings/swig/python/tests/ subversion/bi...
Modified: subversion/branches/revprop-packing/tools/dist/release.py
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/dist/release.py?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/dist/release.py (original)
+++ subversion/branches/revprop-packing/tools/dist/release.py Fri Mar 30 13:55:26 2012
@@ -39,6 +39,7 @@ import os
import re
import sys
import glob
+import fnmatch
import shutil
import urllib2
import hashlib
@@ -62,15 +63,31 @@ except ImportError:
import ezt
-# Our required / recommended versions
-autoconf_ver = '2.68'
-libtool_ver = '2.4'
-swig_ver = '2.0.4'
+# Our required / recommended release tool versions by release branch
+tool_versions = {
+ 'trunk' : {
+ 'autoconf' : '2.68',
+ 'libtool' : '2.4',
+ 'swig' : '2.0.4',
+ },
+ '1.7' : {
+ 'autoconf' : '2.68',
+ 'libtool' : '2.4',
+ 'swig' : '2.0.4',
+ },
+ '1.6' : {
+ 'autoconf' : '2.64',
+ 'libtool' : '1.5.26',
+ 'swig' : '1.3.36',
+ },
+}
# Some constants
repos = 'http://svn.apache.org/repos/asf/subversion'
-people_host = 'minotaur.apache.org'
-people_dist_dir = '/www/www.apache.org/dist/subversion'
+dist_repos = 'https://dist.apache.org/repos/dist'
+dist_dev_url = dist_repos + '/dev/subversion'
+dist_release_url = dist_repos + '/release/subversion'
+extns = ['zip', 'tar.gz', 'tar.bz2']
#----------------------------------------------------------------------
@@ -88,6 +105,7 @@ class Version(object):
self.pre = 'nightly'
self.pre_num = None
self.base = 'nightly'
+ self.branch = 'trunk'
return
match = self.regex.search(ver_str)
@@ -107,6 +125,7 @@ class Version(object):
self.pre_num = None
self.base = '%d.%d.%d' % (self.major, self.minor, self.patch)
+ self.branch = '%d.%d' % (self.major, self.minor)
def is_prerelease(self):
return self.pre != None
@@ -189,10 +208,6 @@ def download_file(url, target):
target_file = open(target, 'w')
target_file.write(response.read())
-def assert_people():
- if os.uname()[1] != people_host:
- raise RuntimeError('Not running on expected host "%s"' % people_host)
-
#----------------------------------------------------------------------
# Cleaning up the environment
@@ -255,10 +270,11 @@ class RollDep(object):
class AutoconfDep(RollDep):
- def __init__(self, base_dir, use_existing, verbose):
+ def __init__(self, base_dir, use_existing, verbose, autoconf_ver):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'autoconf'
self._filebase = 'autoconf-' + autoconf_ver
+ self._autoconf_ver = autoconf_ver
self._url = 'http://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase
def have_usable(self):
@@ -266,7 +282,7 @@ class AutoconfDep(RollDep):
if not output: return False
version = output[0].split()[-1:][0]
- return version == autoconf_ver
+ return version == self._autoconf_ver
def use_system(self):
if not self._use_existing: return False
@@ -274,18 +290,18 @@ class AutoconfDep(RollDep):
class LibtoolDep(RollDep):
- def __init__(self, base_dir, use_existing, verbose):
+ def __init__(self, base_dir, use_existing, verbose, libtool_ver):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'libtool'
self._filebase = 'libtool-' + libtool_ver
+ self._libtool_ver = libtool_ver
self._url = 'http://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase
def have_usable(self):
output = self._test_version(['libtool', '--version'])
if not output: return False
- version = output[0].split()[-1:][0]
- return version == libtool_ver
+ return self._libtool_ver in output[0]
def use_system(self):
# We unconditionally return False here, to avoid using a borked
@@ -294,10 +310,11 @@ class LibtoolDep(RollDep):
class SwigDep(RollDep):
- def __init__(self, base_dir, use_existing, verbose, sf_mirror):
+ def __init__(self, base_dir, use_existing, verbose, swig_ver, sf_mirror):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'swig'
self._filebase = 'swig-' + swig_ver
+ self._swig_ver = swig_ver
self._url = 'http://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \
{ 'swig' : self._filebase,
'sf_mirror' : sf_mirror }
@@ -308,7 +325,7 @@ class SwigDep(RollDep):
if not output: return False
version = output[1].split()[-1:][0]
- return version == swig_ver
+ return version == self._swig_ver
def use_system(self):
if not self._use_existing: return False
@@ -326,9 +343,12 @@ def build_env(args):
if not args.use_existing:
raise
- autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose)
- libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose)
+ autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['autoconf'])
+ libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['libtool'])
swig = SwigDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['swig'],
args.sf_mirror)
# iterate over our rolling deps, and build them if needed
@@ -342,54 +362,37 @@ def build_env(args):
#----------------------------------------------------------------------
# Create release artifacts
-def fetch_changes(repos, branch, revision):
- changes_peg_url = '%s/%s/CHANGES@%d' % (repos, branch, revision)
- proc = subprocess.Popen(['svn', 'cat', changes_peg_url],
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- (stdout, stderr) = proc.communicate()
- proc.wait()
- return stdout.split('\n')
-
-
def compare_changes(repos, branch, revision):
- # Compare trunk's version of CHANGES with that of the branch,
- # ignoring any lines in trunk's version precede what *should*
- # match the contents of the branch's version. (This allows us to
- # continue adding new stuff at the top of trunk's CHANGES that
- # might relate to the *next* major release line.)
- branch_CHANGES = fetch_changes(repos, branch, revision)
- trunk_CHANGES = fetch_changes(repos, 'trunk', revision)
- try:
- first_matching_line = trunk_CHANGES.index(branch_CHANGES[0])
- except ValueError:
- raise RuntimeError('CHANGES not synced between trunk and branch')
-
- trunk_CHANGES = trunk_CHANGES[first_matching_line:]
- saw_diff = False
- import difflib
- for diff_line in difflib.unified_diff(trunk_CHANGES, branch_CHANGES):
- saw_diff = True
- logging.debug('%s', diff_line)
- if saw_diff:
- raise RuntimeError('CHANGES not synced between trunk and branch')
-
+ mergeinfo_cmd = ['svn', 'mergeinfo', '--show-revs=eligible',
+ repos + '/trunk/CHANGES',
+ repos + '/' + branch + '/' + 'CHANGES']
+ proc = subprocess.Popen(mergeinfo_cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ rc = proc.wait()
+ if stderr:
+ raise RuntimeError('svn mergeinfo failed: %s' % stderr)
+ if stdout:
+ raise RuntimeError('CHANGES has unmerged revisions: %s' % stdout)
def roll_tarballs(args):
'Create the release artifacts.'
- extns = ['zip', 'tar.gz', 'tar.bz2']
if args.branch:
branch = args.branch
else:
- branch = 'branches/' + args.version.base[:-1] + 'x'
+ branch = 'branches/%d.%d.x' % (args.version.major, args.version.minor)
logging.info('Rolling release %s from branch %s@%d' % (args.version,
branch, args.revnum))
# Ensure we've got the appropriate rolling dependencies available
- autoconf = AutoconfDep(args.base_dir, False, args.verbose)
- libtool = LibtoolDep(args.base_dir, False, args.verbose)
- swig = SwigDep(args.base_dir, False, args.verbose, None)
+ autoconf = AutoconfDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['autoconf'])
+ libtool = LibtoolDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['libtool'])
+ swig = SwigDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['swig'], None)
for dep in [autoconf, libtool, swig]:
if not dep.have_usable():
@@ -440,44 +443,47 @@ def roll_tarballs(args):
# And we're done!
-
#----------------------------------------------------------------------
-# Post the candidate release artifacts
+# Sign the candidate release artifacts
+
+def sign_candidates(args):
+ 'Sign candidate artifacts in the dist development directory.'
+
+ def sign_file(filename):
+ asc_file = open(filename + '.asc', 'a')
+ logging.info("Signing %s" % filename)
+ proc = subprocess.Popen(['gpg', '-ba', '-o', '-', filename],
+ stdout=asc_file)
+ proc.wait()
+ asc_file.close()
-def post_candidates(args):
- 'Post the generated tarballs to web-accessible directory.'
if args.target:
target = args.target
else:
- target = os.path.join(os.getenv('HOME'), 'public_html', 'svn',
- str(args.version))
-
- logging.info('Moving tarballs to %s' % target)
- if os.path.exists(target):
- shutil.rmtree(target)
- shutil.copytree(get_deploydir(args.base_dir), target)
+ target = get_deploydir(args.base_dir)
- data = { 'version' : str(args.version),
- 'revnum' : args.revnum,
- }
+ for e in extns:
+ filename = os.path.join(target, 'subversion-%s.%s' % (args.version, e))
+ sign_file(filename)
+ if args.version.major >= 1 and args.version.minor <= 6:
+ filename = os.path.join(target,
+ 'subversion-deps-%s.%s' % (args.version, e))
+ sign_file(filename)
- # Choose the right template text
- if args.version.is_prerelease():
- if args.version.pre == 'nightly':
- template_filename = 'nightly-candidates.ezt'
- else:
- template_filename = 'rc-candidates.ezt'
- else:
- template_filename = 'stable-candidates.ezt'
- template = ezt.Template()
- template.parse(get_tmplfile(template_filename).read())
- template.generate(open(os.path.join(target, 'HEADER.html'), 'w'), data)
+#----------------------------------------------------------------------
+# Post the candidate release artifacts
- template = ezt.Template()
- template.parse(get_tmplfile('htaccess.ezt').read())
- template.generate(open(os.path.join(target, '.htaccess'), 'w'), data)
+def post_candidates(args):
+ 'Post candidate artifacts to the dist development directory.'
+ logging.info('Importing tarballs to %s' % dist_dev_url)
+ proc = subprocess.Popen(['svn', 'import', '-m',
+ 'Add %s candidate release artifacts'
+ % args.version.base,
+ get_deploydir(args.base_dir), dist_dev_url])
+ (stdout, stderr) = proc.communicate()
+ proc.wait()
#----------------------------------------------------------------------
# Clean dist
@@ -485,30 +491,41 @@ def post_candidates(args):
def clean_dist(args):
'Clean the distribution directory of all but the most recent artifacts.'
- regex = re.compile('subversion-(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
-
- if not args.dist_dir:
- assert_people()
- args.dist_dir = people_dist_dir
+ proc = subprocess.Popen(['svn', 'list', dist_release_url],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ proc.wait()
+ if stderr:
+ raise RuntimeError(stderr)
- logging.info('Cleaning dist dir \'%s\'' % args.dist_dir)
+ filenames = stdout.split('\n')
+ tar_gz_archives = []
+ for entry in filenames:
+ if fnmatch.fnmatch(entry, 'subversion-*.tar.gz'):
+ tar_gz_archives.append(entry)
- filenames = glob.glob(os.path.join(args.dist_dir, 'subversion-*.tar.gz'))
versions = []
- for filename in filenames:
- versions.append(Version(filename))
+ for archive in tar_gz_archives:
+ versions.append(Version(archive))
+ svnmucc_cmd = ['svnmucc', '-m', 'Remove old Subversion releases.\n' +
+ 'They are still available at ' +
+ 'http://archive.apache.org/dist/subversion/']
for k, g in itertools.groupby(sorted(versions),
lambda x: (x.major, x.minor)):
releases = list(g)
logging.info("Saving release '%s'", releases[-1])
for r in releases[:-1]:
- for filename in glob.glob(os.path.join(args.dist_dir,
- 'subversion-%s.*' % r)):
+ for filename in filenames:
+ if fnmatch.fnmatch(filename, 'subversion-%s.*' % r):
logging.info("Removing '%s'" % filename)
- os.remove(filename)
+ svnmucc_cmd += ['rm', dist_release_url + '/' + filename]
+ # don't redirect stdout/stderr since svnmucc might ask for a password
+ proc = subprocess.Popen(svnmucc_cmd)
+ proc.wait()
#----------------------------------------------------------------------
# Move to dist
@@ -516,23 +533,29 @@ def clean_dist(args):
def move_to_dist(args):
'Move candidate artifacts to the distribution directory.'
- if not args.dist_dir:
- assert_people()
- args.dist_dir = people_dist_dir
-
- if args.target:
- target = args.target
- else:
- target = os.path.join(os.getenv('HOME'), 'public_html', 'svn',
- str(args.version))
+ proc = subprocess.Popen(['svn', 'list', dist_dev_url],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ proc.wait()
+ if stderr:
+ raise RuntimeError(stderr)
- logging.info('Moving %s to dist dir \'%s\'' % (str(args.version),
- args.dist_dir) )
- filenames = glob.glob(os.path.join(target,
- 'subversion-%s.*' % str(args.version)))
+ filenames = []
+ for entry in stdout.split('\n'):
+ if fnmatch.fnmatch(entry, 'subversion-%s.*' % str(args.version)):
+ filenames.append(entry)
+ svnmucc_cmd = ['svnmucc', '-m',
+ 'Publish Subversion-%s.' % str(args.version)]
+ svnmucc_cmd += ['rm', dist_dev_url + '/' + 'svn_version.h.dist']
for filename in filenames:
- shutil.copy(filename, args.dist_dir)
+ svnmucc_cmd += ['mv', dist_dev_url + '/' + filename,
+ dist_release_url + '/' + filename]
+ # don't redirect stdout/stderr since svnmucc might ask for a password
+ logging.info('Moving release artifacts to %s' % dist_release_url)
+ proc = subprocess.Popen(svnmucc_cmd)
+ proc.wait()
#----------------------------------------------------------------------
# Write announcements
@@ -541,7 +564,7 @@ def write_news(args):
'Write text for the Subversion website.'
data = { 'date' : datetime.date.today().strftime('%Y%m%d'),
'date_pres' : datetime.date.today().strftime('%Y-%m-%d'),
- 'major-minor' : args.version.base[:3],
+ 'major-minor' : '%d.%d' % (args.version.major, args.version.minor),
'version' : str(args.version),
'version_base' : args.version.base,
}
@@ -556,7 +579,7 @@ def write_news(args):
template.generate(sys.stdout, data)
-def get_sha1info(args):
+def get_sha1info(args, replace=False):
'Return a list of sha1 info for the release'
sha1s = glob.glob(os.path.join(get_deploydir(args.base_dir), '*.sha1'))
@@ -566,7 +589,13 @@ def get_sha1info(args):
sha1info = []
for s in sha1s:
i = info()
- i.filename = os.path.basename(s)[:-5]
+ # strip ".sha1"
+ fname = os.path.basename(s)[:-5]
+ if replace:
+ # replace the version number with the [version] reference
+ i.filename = Version.regex.sub('[version]', fname)
+ else:
+ i.filename = fname
i.sha1 = open(s, 'r').read()
sha1info.append(i)
@@ -580,7 +609,8 @@ def write_announcement(args):
data = { 'version' : str(args.version),
'sha1info' : sha1info,
'siginfo' : open('getsigs-output', 'r').read(),
- 'major-minor' : args.version.base[:3],
+ 'major-minor' : '%d.%d' % (args.version.major,
+ args.version.minor),
'major-minor-patch' : args.version.base,
}
@@ -596,7 +626,7 @@ def write_announcement(args):
def write_downloads(args):
'Output the download section of the website.'
- sha1info = get_sha1info(args)
+ sha1info = get_sha1info(args, replace=True)
data = { 'version' : str(args.version),
'fileinfo' : sha1info,
@@ -625,15 +655,16 @@ def check_sigs(args):
if args.target:
target = args.target
else:
- target = os.path.join(os.getenv('HOME'), 'public_html', 'svn',
- str(args.version))
+ target = get_deploydir(args.base_dir)
good_sigs = {}
- for filename in glob.glob(os.path.join(target, 'subversion-*.asc')):
+ glob_pattern = os.path.join(target, 'subversion*-%s*.asc' % args.version)
+ for filename in glob.glob(glob_pattern):
text = open(filename).read()
keys = text.split(key_start)
+ logging.info("Checking %d sig(s) in %s" % (len(keys[1:]), filename))
for key in keys[1:]:
fd, fn = tempfile.mkstemp()
os.write(fd, key_start + key)
@@ -689,6 +720,8 @@ def main():
help='''Download release prerequisistes, including autoconf,
libtool, and swig.''')
subparser.set_defaults(func=build_env)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
subparser.add_argument('--sf-mirror', default='softlayer',
help='''The mirror to use for downloading files from
SourceForge. If in the EU, you may want to use
@@ -708,25 +741,28 @@ def main():
subparser.add_argument('--branch',
help='''The branch to base the release on.''')
+ # Setup the parser for the sign-candidates subcommand
+ subparser = subparsers.add_parser('sign-candidates',
+ help='''Sign the release artifacts.''')
+ subparser.set_defaults(func=sign_candidates)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+
# Setup the parser for the post-candidates subcommand
subparser = subparsers.add_parser('post-candidates',
- help='''Build the website to host the candidate tarballs.
- The default location is somewhere in ~/public_html.
- ''')
+ help='''Commit candidates to the release development area
+ of the dist.apache.org repository.''')
subparser.set_defaults(func=post_candidates)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
- subparser.add_argument('revnum', type=int,
- help='''The revision number to base the release on.''')
- subparser.add_argument('--target',
- help='''The full path to the destination.''')
# The clean-dist subcommand
subparser = subparsers.add_parser('clean-dist',
help='''Clean the distribution directory (and mirrors) of
- all but the most recent MAJOR.MINOR release. If no
- dist-dir is given, this command will assume it is
- running on people.apache.org.''')
+ all but the most recent MAJOR.MINOR release.''')
subparser.set_defaults(func=clean_dist)
subparser.add_argument('--dist-dir',
help='''The directory to clean.''')
@@ -734,17 +770,11 @@ def main():
# The move-to-dist subcommand
subparser = subparsers.add_parser('move-to-dist',
help='''Move candiates and signatures from the temporary
- post location to the permanent distribution
- directory. If no dist-dir is given, this command
- will assume it is running on people.apache.org.''')
+ release dev location to the permanent distribution
+ directory.''')
subparser.set_defaults(func=move_to_dist)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
- subparser.add_argument('--dist-dir',
- help='''The directory to clean.''')
- subparser.add_argument('--target',
- help='''The full path to the destination used in
- 'post-candiates'..''')
# The write-news subcommand
subparser = subparsers.add_parser('write-news',
@@ -776,8 +806,8 @@ def main():
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
subparser.add_argument('--target',
- help='''The full path to the destination used in
- 'post-candiates'..''')
+ help='''The full path to the directory containing
+ release artifacts.''')
# A meta-target
subparser = subparsers.add_parser('clean',
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/README.txt
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/README.txt?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/README.txt (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/README.txt Fri Mar 30 13:55:26 2012
@@ -1 +1,16 @@
### write a README
+
+
+TODO:
+- bulk update at startup time to avoid backlog warnings
+- switch to host:port format in config file
+- fold BDEC into Daemon
+- fold WorkingCopy._get_match() into __init__
+- remove wc_ready(). assume all WorkingCopy instances are usable.
+ place the instances into .watch at creation. the .update_applies()
+ just returns if the wc is disabled (eg. could not find wc dir)
+- figure out way to avoid the ASF-specific PRODUCTION_RE_FILTER
+ (a base path exclusion list should work for the ASF)
+- add support for SIGHUP to reread the config and reinitialize working copies
+- joes will write documentation for svnpubsub as these items become fulfilled
+- make LOGLEVEL configurable
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub Fri Mar 30 13:55:26 2012
@@ -18,22 +18,21 @@ load_rc_config $name
svnwcsub_enable=${svnwcsub_enable-"NO"}
svnwcsub_user=${svnwcsub_user-"svnwc"}
svnwcsub_group=${svnwcsub_group-"svnwc"}
-svnwcsub_reactor=${svnwcsub_reactor-"poll"}
svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub/svnwcsub.pub"}
-svnwcsub_program=${svnwcsub_program-"/usr/local/bin/twistd"}
svnwcsub_env="PYTHON_EGG_CACHE"
svnwcsub_cmd_int=${svnwcsub_cmd_int-"python"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/var/log/svnwcsub/svnwcsub.log"}
pidfile="${svnwcsub_pidfile}"
export PYTHON_EGG_CACHE="/var/run/svnwcsub"
-command="/usr/local/bin/twistd"
+command="/usr/local/svnpubsub/svnwcsub.py"
command_interpreter="/usr/local/bin/${svnwcsub_cmd_int}"
-command_args="-y /usr/local/svnpubsub/svnwcsub.tac \
- --logfile=/var/log/svnwcsub.log \
- --pidfile=${pidfile} \
- --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
- --umask=002 -r${svnwcsub_reactor}"
+command_args="--daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ --umask=002 ${svnwcsub_config}"
run_rc_command "$1"
-
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.debian
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.debian?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.debian (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.debian Fri Mar 30 13:55:26 2012
@@ -14,21 +14,23 @@
svnwcsub_user=${svnwcsub_user-"svnwc"}
svnwcsub_group=${svnwcsub_group-"svnwc"}
-svnwcsub_reactor=${svnwcsub_reactor-"poll"}
svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub.pid"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/var/bwlog/svnwcsub/svnwcsub.log"}
pidfile="${svnwcsub_pidfile}"
-TWSITD_CMD="/usr/bin/twistd -y /opt/svnpubsub/svnwcsub.tac \
- --logfile=/var/bwlog/svnpubsub/svnwcsub.log \
- --pidfile=${pidfile} \
- --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
- -r${svnwcsub_reactor}"
+SVNWCSUB_CMD="/opt/svnpubsub/svnwcsub.py \
+ --daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ ${svnwcsub_config} "
RETVAL=0
start() {
echo "Starting SvnWcSub Server: "
- $TWSITD_CMD
+ $SVNWCSUB_CMD
RETVAL=$?
[ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
return $RETVAL
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris Fri Mar 30 13:55:26 2012
@@ -5,22 +5,24 @@
svnwcsub_user=${svnwcsub_user-"svnwc"}
svnwcsub_group=${svnwcsub_group-"other"}
-svnwcsub_reactor=${svnwcsub_reactor-"poll"}
svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub/svnwcsub.pid"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/x1/log/svnwcsub/svnwcsub.log"}
pidfile="${svnwcsub_pidfile}"
-TWSITD_CMD="/opt/python/2.6.2/bin/twistd -y /usr/local/svnpubsub/svnwcsub.tac \
- --logfile=/x1/log/svnwcsub.log \
- --pidfile=${pidfile} \
- --umask=002 \
- --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
- -r${svnwcsub_reactor}"
+SVNWCSUB_CMD="/usr/local/svnpubsub/svnwcsub.py \
+ --daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --umask=002 \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ ${svnwcsub_config}"
RETVAL=0
start() {
echo "Starting SvnWcSub Server: "
- $TWSITD_CMD
+ $SVNWCSUB_CMD
RETVAL=$?
[ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
return $RETVAL
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnpubsub/client.py
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnpubsub/client.py?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnpubsub/client.py (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnpubsub/client.py Fri Mar 30 13:55:26 2012
@@ -74,7 +74,12 @@ class Client(asynchat.async_chat):
self.skipping_headers = True
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- self.connect((host, port))
+ try:
+ self.connect((host, port))
+ except:
+ self.handle_error()
+ return
+
### should we allow for repository restrictions?
self.push('GET /commits/xml HTTP/1.0\r\n\r\n')
@@ -117,7 +122,7 @@ class XMLStreamHandler(xml.sax.handler.C
def startElement(self, name, attrs):
if name == 'commit':
- self.rev = Revision(attrs['repository'], attrs['revision'])
+ self.rev = Revision(attrs['repository'], int(attrs['revision']))
# No other elements to worry about.
def characters(self, data):
Modified: subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnwcsub.py
URL: http://svn.apache.org/viewvc/subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnwcsub.py?rev=1307424&r1=1307423&r2=1307424&view=diff
==============================================================================
--- subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnwcsub.py (original)
+++ subversion/branches/revprop-packing/tools/server-side/svnpubsub/svnwcsub.py Fri Mar 30 13:55:26 2012
@@ -39,23 +39,19 @@ import time
import logging.handlers
import Queue
import optparse
+import functools
+import urlparse
-from twisted.internet import reactor, task, threads
-from twisted.internet.utils import getProcessOutput
-from twisted.application import internet
-from twisted.web.client import HTTPClientFactory, HTTPPageDownloader
-from urlparse import urlparse
-from xml.sax import handler, make_parser
-from twisted.internet import protocol
-
+import daemonize
+import svnpubsub.client
# check_output() is only available in Python 2.7. Allow us to run with
# earlier versions
try:
check_output = subprocess.check_output
except AttributeError:
- def check_output(args): # note: we don't use anything beyond args
- pipe = subprocess.Popen(args, stdout=subprocess.PIPE)
+ def check_output(args, env): # note: we only use these two args
+ pipe = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
output, _ = pipe.communicate()
if pipe.returncode:
raise subprocess.CalledProcessError(pipe.returncode, args)
@@ -65,10 +61,10 @@ except AttributeError:
### note: this runs synchronously. within the current Twisted environment,
### it is called from ._get_match() which is run on a thread so it won't
### block the Twisted main loop.
-def svn_info(svnbin, path):
+def svn_info(svnbin, env, path):
"Run 'svn info' on the target path, returning a dict of info data."
args = [svnbin, "info", "--non-interactive", "--", path]
- output = check_output(args).strip()
+ output = check_output(args, env=env).strip()
info = { }
for line in output.split('\n'):
idx = line.index(':')
@@ -78,20 +74,14 @@ def svn_info(svnbin, path):
class WorkingCopy(object):
def __init__(self, bdec, path, url):
- self.bdec = bdec
self.path = path
self.url = url
- self.repos = None
- self.match = None
- d = threads.deferToThread(self._get_match)
- d.addCallback(self._set_match)
-
- def _set_match(self, value):
- self.match = str(value[0])
- self.url = value[1]
- self.repos = value[2]
- self.uuid = value[3]
- self.bdec.wc_ready(self)
+
+ try:
+ self.match, self.uuid = self._get_match(bdec.svnbin, bdec.env)
+ bdec.wc_ready(self)
+ except:
+ logging.exception('problem with working copy: %s', path)
def update_applies(self, uuid, path):
if self.uuid != uuid:
@@ -114,181 +104,44 @@ class WorkingCopy(object):
return True
return False
- def _get_match(self):
+ def _get_match(self, svnbin, env):
### quick little hack to auto-checkout missing working copies
if not os.path.isdir(self.path):
logging.info("autopopulate %s from %s" % (self.path, self.url))
- subprocess.check_call([self.bdec.svnbin, 'co', '-q',
+ subprocess.check_call([svnbin, 'co', '-q',
'--non-interactive',
- '--config-dir',
- '/home/svnwc/.subversion',
- '--', self.url, self.path])
+ '--', self.url, self.path],
+ env=env)
# Fetch the info for matching dirs_changed against this WC
- info = svn_info(self.bdec.svnbin, self.path)
+ info = svn_info(svnbin, env, self.path)
+ root = info['Repository Root']
url = info['URL']
- repos = info['Repository Root']
+ relpath = url[len(root):] # also has leading '/'
uuid = info['Repository UUID']
- relpath = url[len(repos):] # also has leading '/'
- return [relpath, url, repos, uuid]
-
-
-class HTTPStream(HTTPClientFactory):
- protocol = HTTPPageDownloader
-
- def __init__(self, url):
- self.url = url
- HTTPClientFactory.__init__(self, url, method="GET", agent="SvnWcSub/0.1.0")
-
- def pageStart(self, partial):
- pass
-
- def pagePart(self, data):
- pass
-
- def pageEnd(self):
- pass
-
-class Revision:
- def __init__(self, repos, rev):
- self.repos = repos
- self.rev = rev
- self.dirs_changed = []
-
-class StreamHandler(handler.ContentHandler):
- def __init__(self, stream, bdec):
- handler.ContentHandler.__init__(self)
- self.stream = stream
- self.bdec = bdec
- self.rev = None
- self.text_value = None
-
- def startElement(self, name, attrs):
- #print "start element: %s" % (name)
- """
- <commit revision="7">
- <dirs_changed><path>/</path></dirs_changed>
- </commit>
- """
- if name == "commit":
- self.rev = Revision(attrs['repository'], int(attrs['revision']))
- elif name == "stillalive":
- self.bdec.stillalive(self.stream)
- def characters(self, data):
- if self.text_value is not None:
- self.text_value = self.text_value + data
- else:
- self.text_value = data
+ return str(relpath), uuid
- def endElement(self, name):
- #print "end element: %s" % (name)
- if name == "commit":
- self.bdec.commit(self.stream, self.rev)
- self.rev = None
- if name == "path" and self.text_value is not None and self.rev is not None:
- self.rev.dirs_changed.append(self.text_value.strip())
- self.text_value = None
-
-
-class XMLHTTPStream(HTTPStream):
- def __init__(self, url, bdec):
- HTTPStream.__init__(self, url)
- self.alive = 0
- self.bdec = bdec
- self.parser = make_parser(['xml.sax.expatreader'])
- self.handler = StreamHandler(self, bdec)
- self.parser.setContentHandler(self.handler)
-
- def pageStart(self, parital):
- self.bdec.pageStart(self)
-
- def pagePart(self, data):
- self.parser.feed(data)
-
- def pageEnd(self):
- self.bdec.pageEnd(self)
-
-def connectTo(url, bdec):
- u = urlparse(url)
- port = u.port
- if not port:
- port = 80
- s = XMLHTTPStream(url, bdec)
- if bdec.service:
- conn = internet.TCPClient(u.hostname, u.port, s)
- conn.setServiceParent(bdec.service)
- else:
- conn = reactor.connectTCP(u.hostname, u.port, s)
- return [s, conn]
-
-CHECKBEAT_TIME = 60
PRODUCTION_RE_FILTER = re.compile("/websites/production/[^/]+/")
class BigDoEverythingClasss(object):
- def __init__(self, config, service = None):
- self.urls = [s.strip() for s in config.get_value('streams').split()]
+ def __init__(self, config):
self.svnbin = config.get_value('svnbin')
self.env = config.get_env()
+ self.tracking = config.get_track()
self.worker = BackgroundWorker(self.svnbin, self.env)
- self.service = service
- self.failures = 0
- self.alive = time.time()
- self.checker = task.LoopingCall(self._checkalive)
- self.transports = {}
- self.streams = {}
- for u in self.urls:
- self._restartStream(u)
- self.watch = []
- for path, url in config.get_track().items():
+ self.watch = [ ]
+
+ self.hostports = [ ]
+ ### switch from URLs in the config to just host:port pairs
+ for url in config.get_value('streams').split():
+ parsed = urlparse.urlparse(url.strip())
+ self.hostports.append((parsed.hostname, parsed.port))
+
+ def start(self):
+ for path, url in self.tracking.items():
# working copies auto-register with the BDEC when they are ready.
WorkingCopy(self, path, url)
- self.checker.start(CHECKBEAT_TIME)
-
- def pageStart(self, stream):
- logging.info("Stream %s Connection Established" % (stream.url))
- self.failures = 0
-
- def pageEnd(self, stream):
- logging.info("Stream %s Connection Dead" % (stream.url))
- self.streamDead(stream.url)
-
- def _restartStream(self, url):
- (self.streams[url], self.transports[url]) = connectTo(url, self)
- self.streams[url].deferred.addBoth(self.streamDead, url)
- self.streams[url].alive = time.time()
-
- def _checkalive(self):
- n = time.time()
- for k in self.streams.keys():
- s = self.streams[k]
- if n - s.alive > CHECKBEAT_TIME:
- logging.info("Stream %s is dead, reconnecting" % (s.url))
- #self.transports[s.url].disconnect()
- self.streamDead(self, s.url)
-
-# d=filter(lambda x:x not in self.streams.keys(), self.urls)
-# for u in d:
-# self._restartStream(u)
-
- def stillalive(self, stream):
- stream.alive = time.time()
-
- def streamDead(self, url, result=None):
- s = self.streams.get(url)
- if not s:
- logging.info("Stream %s is messed up" % (url))
- return
- BACKOFF_SECS = 5
- BACKOFF_MAX = 60
- #self.checker.stop()
-
- self.streams[url] = None
- self.transports[url] = None
- self.failures += 1
- backoff = min(self.failures * BACKOFF_SECS, BACKOFF_MAX)
- logging.info("Stream disconnected, trying again in %d seconds.... %s" % (backoff, s.url))
- reactor.callLater(backoff, self._restartStream, url)
def wc_ready(self, wc):
# called when a working copy object has its basic info/url,
@@ -302,8 +155,10 @@ class BigDoEverythingClasss(object):
return "/" + path
return os.path.abspath(path)
- def commit(self, stream, rev):
- logging.info("COMMIT r%d (%d paths) via %s" % (rev.rev, len(rev.dirs_changed), stream.url))
+ def commit(self, host, port, rev):
+ logging.info("COMMIT r%d (%d paths) from %s:%d"
+ % (rev.rev, len(rev.dirs_changed), host, port))
+
paths = map(self._normalize_path, rev.dirs_changed)
if len(paths):
pre = os.path.commonprefix(paths)
@@ -317,7 +172,7 @@ class BigDoEverythingClasss(object):
break
#print "Common Prefix: %s" % (pre)
- wcs = [wc for wc in self.watch if wc.update_applies(rev.repos, pre)]
+ wcs = [wc for wc in self.watch if wc.update_applies(rev.uuid, pre)]
logging.info("Updating %d WC for r%d" % (len(wcs), rev.rev))
for wc in wcs:
self.worker.add_work(OP_UPDATE, wc)
@@ -384,7 +239,6 @@ class BackgroundWorker(threading.Thread)
### still specific to the ASF setup.
args = [self.svnbin, 'update',
'--quiet',
- '--config-dir', '/home/svnwc/.subversion',
'--non-interactive',
'--trust-server-cert',
'--ignore-externals',
@@ -392,7 +246,7 @@ class BackgroundWorker(threading.Thread)
subprocess.check_call(args, env=self.env)
### check the loglevel before running 'svn info'?
- info = svn_info(self.svnbin, wc.path)
+ info = svn_info(self.svnbin, self.env, wc.path)
logging.info("updated: %s now at r%s", wc.path, info['Revision'])
def _cleanup(self, wc):
@@ -401,7 +255,6 @@ class BackgroundWorker(threading.Thread)
### we need to move some of these args into the config. these are
### still specific to the ASF setup.
args = [self.svnbin, 'cleanup',
- '--config-dir', '/home/svnwc/.subversion',
'--non-interactive',
'--trust-server-cert',
wc.path]
@@ -452,6 +305,45 @@ class ReloadableConfig(ConfigParser.Safe
return str(option)
+class Daemon(daemonize.Daemon):
+ def __init__(self, logfile, pidfile, umask, bdec):
+ daemonize.Daemon.__init__(self, logfile, pidfile)
+
+ self.umask = umask
+ self.bdec = bdec
+
+ def setup(self):
+ # There is no setup which the parent needs to wait for.
+ pass
+
+ def run(self):
+ logging.info('svnwcsub started, pid=%d', os.getpid())
+
+ # Set the umask in the daemon process. Defaults to 000 for
+ # daemonized processes. Foreground processes simply inherit
+ # the value from the parent process.
+ if self.umask is not None:
+ umask = int(self.umask, 8)
+ os.umask(umask)
+ logging.info('umask set to %03o', umask)
+
+ # Start the BDEC (on the main thread), then start the client
+ self.bdec.start()
+
+ mc = svnpubsub.client.MultiClient(self.bdec.hostports,
+ self.bdec.commit,
+ self._event)
+ mc.run_forever()
+
+ def _event(self, host, port, event_name):
+ if event_name == 'error':
+ logging.exception('from %s:%s', host, port)
+ elif event_name == 'ping':
+ logging.debug('ping from %s:%s', host, port)
+ else:
+ logging.info('"%s" from %s:%s', event_name, host, port)
+
+
def prepare_logging(logfile):
"Log to the specified file, or to stdout if None."
@@ -480,20 +372,13 @@ def handle_options(options):
# Set up the logging, then process the rest of the options.
prepare_logging(options.logfile)
- if options.pidfile:
+ # In daemon mode, we let the daemonize module handle the pidfile.
+ # Otherwise, we should write this (foreground) PID into the file.
+ if options.pidfile and not options.daemon:
pid = os.getpid()
open(options.pidfile, 'w').write('%s\n' % pid)
logging.info('pid %d written to %s', pid, options.pidfile)
- if options.uid:
- try:
- uid = int(options.uid)
- except ValueError:
- import pwd
- uid = pwd.getpwnam(options.uid)[2]
- logging.info('setting uid %d', uid)
- os.setuid(uid)
-
if options.gid:
try:
gid = int(options.gid)
@@ -503,10 +388,14 @@ def handle_options(options):
logging.info('setting gid %d', gid)
os.setgid(gid)
- if options.umask:
- umask = int(options.umask, 8)
- os.umask(umask)
- logging.info('umask set to %03o', umask)
+ if options.uid:
+ try:
+ uid = int(options.uid)
+ except ValueError:
+ import pwd
+ uid = pwd.getpwnam(options.uid)[2]
+ logging.info('setting uid %d', uid)
+ os.setuid(uid)
def main(args):
@@ -525,6 +414,8 @@ def main(args):
help='switch to this GID before running')
parser.add_option('--umask',
help='set this (octal) umask before running')
+ parser.add_option('--daemon', action='store_true',
+ help='run as a background daemon')
options, extra = parser.parse_args(args)
@@ -532,12 +423,26 @@ def main(args):
parser.error('CONFIG_FILE is required')
config_file = extra[0]
+ if options.daemon and not options.logfile:
+ parser.error('LOGFILE is required when running as a daemon')
+ if options.daemon and not options.pidfile:
+ parser.error('PIDFILE is required when running as a daemon')
+
# Process any provided options.
handle_options(options)
c = ReloadableConfig(config_file)
- big = BigDoEverythingClasss(c)
- reactor.run()
+ bdec = BigDoEverythingClasss(c)
+
+ # We manage the logfile ourselves (along with possible rotation). The
+ # daemon process can just drop stdout/stderr into /dev/null.
+ d = Daemon('/dev/null', options.pidfile, options.umask, bdec)
+ if options.daemon:
+ # Daemonize the process and call sys.exit() with appropriate code
+ d.daemonize_exit()
+ else:
+ # Just run in the foreground (the default)
+ d.foreground()
if __name__ == "__main__":