You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by je...@apache.org on 2015/07/09 10:33:56 UTC

[6/9] allura git commit: [#6373] delete scripts that are no longer needed. Rename to clarify a few

[#6373] delete scripts that are no longer needed.  Rename to clarify a few


Project: http://git-wip-us.apache.org/repos/asf/allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/allura/commit/3189602c
Tree: http://git-wip-us.apache.org/repos/asf/allura/tree/3189602c
Diff: http://git-wip-us.apache.org/repos/asf/allura/diff/3189602c

Branch: refs/heads/master
Commit: 3189602c97b6c04414b60cbf1cfaf82893dc2437
Parents: c95885e
Author: Dave Brondsema <da...@brondsema.net>
Authored: Wed Jul 8 15:59:29 2015 -0400
Committer: Dave Brondsema <da...@brondsema.net>
Committed: Wed Jul 8 17:00:50 2015 -0400

----------------------------------------------------------------------
 scripts/allura_import.py                     | 119 ------------------
 scripts/import_trove_categories.py           |  48 -------
 scripts/open_relay.py                        |  89 -------------
 scripts/prep-scm-sandbox.py                  |  94 --------------
 scripts/prepare-allura-tickets-for-import.py | 147 ----------------------
 scripts/recover-user-databases.py            |  72 -----------
 scripts/setup-scm-server.py                  | 127 -------------------
 scripts/trac_export_wiki.py                  |  56 +++++++++
 scripts/trac_import.py                       | 119 ++++++++++++++++++
 scripts/wiki-export.py                       |  56 ---------
 10 files changed, 175 insertions(+), 752 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/allura_import.py
----------------------------------------------------------------------
diff --git a/scripts/allura_import.py b/scripts/allura_import.py
deleted file mode 100644
index d4e51fd..0000000
--- a/scripts/allura_import.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import json
-from optparse import OptionParser
-
-from allura.lib.import_api import AlluraImportApiClient
-from tracwikiimporter.scripts.wiki_from_trac.loaders import import_wiki
-
-
-def main():
-    optparser, options, args = parse_options()
-
-    import_options = {}
-    for s in options.import_opts:
-        k, v = s.split('=', 1)
-        if v == 'false':
-            v = False
-        import_options[k] = v
-
-    user_map = {}
-    if options.user_map_file:
-        f = open(options.user_map_file)
-        try:
-            user_map = json.load(f)
-            if type(user_map) is not type({}):
-                raise ValueError
-            for k, v in user_map.iteritems():
-                print k, v
-                if not isinstance(k, basestring) or not isinstance(v, basestring):
-                    raise ValueError
-        except ValueError:
-            optparser.error(
-                '--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
-        finally:
-            f.close()
-
-    import_options['user_map'] = user_map
-
-    cli = AlluraImportApiClient(options.base_url, options.token, options.verbose)
-    doc_txt = open(args[0]).read()
-
-    if options.forum:
-        import_forum(cli, options.project, options.forum, user_map, doc_txt,
-                     validate=options.validate, neighborhood=options.neighborhood)
-    elif options.wiki:
-        import_wiki(cli, options.project, options.wiki, options, doc_txt)
-
-
-
-def import_forum(cli, project, tool, user_map, doc_txt, validate=True,
-        neighborhood='p'):
-    url = '/rest/{neighborhood}/{project}/{tool}'.format(
-            neighborhood=neighborhood,
-            project=project,
-            tool=tool,
-            )
-    if validate:
-        url += '/validate_import'
-        print cli.call(url, doc=doc_txt, user_map=json.dumps(user_map))
-    else:
-        url += '/perform_import'
-        print cli.call(url, doc=doc_txt, user_map=json.dumps(user_map))
-
-
-def parse_options():
-    optparser = OptionParser(usage='''%prog [options] <JSON dump>
-
-Import project data dump in JSON format into an Allura project.''')
-    optparser.add_option('-t', '--token', dest='token',
-                         help='OAuth bearer token (generate at /auth/oauth/)')
-    optparser.add_option('-p', '--project', dest='project',
-                         help='Project to import to')
-    optparser.add_option('-n', '--neighborhood', dest='neighborhood',
-                         help="URL prefix of destination neighborhood (default is 'p')",
-                         default='p')
-    optparser.add_option('-f', '--forum', dest='forum',
-                         help='Forum tool to import to')
-    optparser.add_option('-w', '--wiki', dest='wiki',
-                         help='Wiki tool to import to')
-    optparser.add_option('-u', '--base-url', dest='base_url',
-                         default='https://sourceforge.net', help='Base Allura URL (%default)')
-    optparser.add_option('-o', dest='import_opts',
-                         default=[], action='append', help='Specify import option(s)', metavar='opt=val')
-    optparser.add_option('--user-map', dest='user_map_file',
-                         help='Map original users to SF.net users', metavar='JSON_FILE')
-    optparser.add_option('--validate', dest='validate',
-                         action='store_true', help='Validate import data')
-    optparser.add_option('-v', '--verbose', dest='verbose',
-                         action='store_true', help='Verbose operation')
-    optparser.add_option('-c', '--continue', dest='cont',
-                         action='store_true', help='Continue import into existing tracker')
-    options, args = optparser.parse_args()
-    if len(args) != 1:
-        optparser.error("Wrong number of arguments")
-    if not options.token:
-        optparser.error("OAuth bearer token is required")
-    if not options.project:
-        optparser.error("Target project is required")
-    options.neighborhood = options.neighborhood.strip('/')
-    return optparser, options, args
-
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/import_trove_categories.py
----------------------------------------------------------------------
diff --git a/scripts/import_trove_categories.py b/scripts/import_trove_categories.py
deleted file mode 100644
index 123a56c..0000000
--- a/scripts/import_trove_categories.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-
-from tg import config
-
-from ming.orm import session
-
-import sfx
-from allura import model as M
-from allura.lib import helpers as h
-from sfx.model import tables as T
-
-log = logging.getLogger(__name__)
-
-
-def main():
-    sfx.middleware.configure_databases(h.config_with_prefix(config, 'sfx.'))
-    topic_trove = T.trove_cat.select(
-        T.trove_cat.c.shortname == 'topic').execute().fetchone()
-    M.ProjectCategory.query.remove()
-    for t in T.trove_cat.select(
-            T.trove_cat.c.parent == topic_trove.trove_cat_id).execute():
-        parent = M.ProjectCategory(
-            name=t.shortname, label=t.fullname, description=t.description)
-        for tt in T.trove_cat.select(
-                T.trove_cat.c.parent == t.trove_cat_id).execute():
-            M.ProjectCategory(parent_id=parent._id,
-                              name=tt.shortname, label=tt.fullname, description=tt.description)
-    session(M.ProjectCategory).flush()
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/open_relay.py
----------------------------------------------------------------------
diff --git a/scripts/open_relay.py b/scripts/open_relay.py
deleted file mode 100644
index ba21862..0000000
--- a/scripts/open_relay.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python
-
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import logging
-import os
-import smtpd
-import smtplib
-import asyncore
-from ConfigParser import ConfigParser
-
-log = logging.getLogger(__name__)
-
-
-def main():
-    cp = ConfigParser()
-    log.info('Read config from: %s',
-             cp.read([os.path.join(os.environ['HOME'], '.open_relay.ini')]))
-    host = cp.get('open_relay', 'host')
-    port = cp.getint('open_relay', 'port')
-    ssl = cp.getboolean('open_relay', 'ssl')
-    tls = cp.getboolean('open_relay', 'tls')
-    username = cp.get('open_relay', 'username')
-    password = cp.get('open_relay', 'password')
-    smtp_client = MailClient(host,
-                             port,
-                             ssl, tls,
-                             username, password)
-    MailServer(('0.0.0.0', 8826), None,
-               smtp_client=smtp_client)
-    asyncore.loop()
-
-
-class MailClient(object):
-
-    def __init__(self, host, port, ssl, tls, username, password):
-        self.host, self.port, self.ssl, self.tls, self.username, self.password = \
-            host, port, ssl, tls, username, password
-        self._client = None
-        self._connect()
-
-    def sendmail(self, mailfrom, rcpttos, data):
-        if str(mailfrom) == 'None':
-            mailfrom = rcpttos[0]
-        log.info('Sending mail to %s' % rcpttos)
-        log.info('Sending mail from %s' % mailfrom)
-        try:
-            self._client.sendmail(mailfrom, rcpttos, data)
-        except:
-            self._connect()
-            self._client.sendmail(mailfrom, rcpttos, data)
-
-    def _connect(self):
-        if self.ssl:
-            self._client = smtplib.SMTP_SSL(self.host, int(self.port))
-        else:
-            self._client = smtplib.SMTP(self.host, int(self.port))
-        if self.tls:
-            self._client.starttls()
-        if self.username:
-            self._client.login(self.username, self.password)
-
-
-class MailServer(smtpd.SMTPServer):
-
-    def __init__(self, *args, **kwargs):
-        self._client = kwargs.pop('smtp_client')
-        smtpd.SMTPServer.__init__(self, *args, **kwargs)
-
-    def process_message(self, peer, mailfrom, rcpttos, data):
-        self._client.sendmail(mailfrom, rcpttos, data)
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/prep-scm-sandbox.py
----------------------------------------------------------------------
diff --git a/scripts/prep-scm-sandbox.py b/scripts/prep-scm-sandbox.py
deleted file mode 100644
index 414280f..0000000
--- a/scripts/prep-scm-sandbox.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import os
-import string
-
-HOME = os.environ['HOME']
-
-USERS = ['user%.2d' % i for i in range(1, 21)]
-USERS += [
-    'admin1', 'admin2',
-    'dovethunder', 'dovetail', 'dovestream', 'dovetree', 'dovespangle',
-    'dovemeade', 'dovestar', 'dovebuyer', 'dovesomething', 'dovesweet', 'dovewood']
-SSH_CONFIG = '%s/.ssh/config' % HOME
-LDIF_FILE = '%s/users.ldif' % HOME
-KEYFILE = '%s/.ssh/allura_rsa' % HOME
-
-
-def main():
-
-    # Generate ssh key for SCM login
-    os.system('cp %s %s.bak' % (SSH_CONFIG, SSH_CONFIG))
-    with open(SSH_CONFIG) as fp:
-        lines = fp.readlines()
-    new_lines = [
-        SSH_TMPL.substitute(
-            sb_host=sb_host,
-            sb=sb,
-            veid='%d0%.2d' % (sb_host, sb))
-        for sb_host in 5, 6, 7, 9
-        for sb in range(99)]
-    new_lines = '\n'.join(new_lines)
-    found_star = False
-    with open(SSH_CONFIG, 'w') as fp:
-        for line in lines:
-            if not found_star and line.startswith('Host *'):
-                print >> fp, new_lines
-                found_star = True
-            print >> fp, line.rstrip()
-        if not found_star:
-            print >> fp, new_lines
-    os.system("ssh-keygen -t rsa -b 2048 -N '' -f %s" % KEYFILE)
-
-    # Generate ldif
-    pubkey = open(KEYFILE + '.pub').read()
-    with open(LDIF_FILE, 'w') as fp:
-        for user in USERS:
-            print >> fp, LDIF_TMPL.substitute(
-                user=user, pubkey=pubkey)
-
-    # Update LDAP
-    assert 0 == os.system('/usr/local/sbin/ldaptool modify -v -f %s' %
-                          LDIF_FILE)
-
-SSH_TMPL = string.Template('''
-Host hg*-$veid hg*-${veid}.sb.sf.net
-  Hostname 10.58.${sb_host}.${sb}
-  Port 17
-  IdentityFile ~/.ssh/allura_rsa
-
-Host svn*-$veid svn*-${veid}.sb.sf.net
-  Hostname 10.58.${sb_host}.${sb}
-  Port 16
-  IdentityFile ~/.ssh/allura_rsa
-
-Host git*-$veid git*-${veid}.sb.sf.net
-  Hostname 10.58.${sb_host}.${sb}
-  Port 23
-  IdentityFile ~/.ssh/allura_rsa
-''')
-
-LDIF_TMPL = string.Template('''
-dn: cn=$user,ou=users,dc=sf,dc=net
-changetype: modify
-add: sshPublicKey
-sshPublicKey: $pubkey
-''')
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/prepare-allura-tickets-for-import.py
----------------------------------------------------------------------
diff --git a/scripts/prepare-allura-tickets-for-import.py b/scripts/prepare-allura-tickets-for-import.py
deleted file mode 100644
index dc6a695..0000000
--- a/scripts/prepare-allura-tickets-for-import.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-from itertools import tee, izip, chain
-import json
-import git
-from collections import Counter
-
-'''
-This script is for one-time conversion of Allura's own tickets from SourceForge
-host to forge-allura.apache.org hosting
-
-Change path variables here:
-'''
-
-filename = "/var/local/allura/tickets.json"  # Absolute path to exported tickets.json
-output = "/var/local/allura/updated_tickets.json"  # Absolute path to the output
-ticket_list = "/var/local/allura/ticket_ids.list"
-top_usernames = "/var/local/allura/top_usernames.list"
-gitrepository = "/var/local/allura"  # Path to allura repository
-g = git.Git(gitrepository)
-
-reviews = ['code-review', 'design-review']
-
-with open(filename, 'r') as json_file:
-    data = json.loads(json_file.read())
-
-
-def pairwise(iterable):
-    """s -> (s0,s1), (s1,s2), (s2, s3), ..."""
-    a, b = tee(iterable)
-    next(b, None)
-    return izip(a, b)
-
-
-tags = ['asf_release_1.0.0', 'asf_release_1.0.0-RC1', 'asf_release_1.0.1', 'asf_release_1.1.0', 'HEAD']
-
-tag_log = dict()
-for tag1, tag2 in pairwise(tags):
-    log = g.log('%s...%s' % (tag1, tag2), '--pretty=oneline')
-    tag_log[tag2] = log
-
-ticket_tag = dict()
-tickets = data.pop('tickets')
-for ticket in tickets:
-    for key, value in tag_log.iteritems():
-        if "[#%s]" % ticket['ticket_num'] in value:
-            ticket_tag[ticket['ticket_num']] = key
-            continue
-
-data.pop('milestones', None)
-data.pop('saved_bins', None)
-
-updated = []
-for ticket in tickets:
-    if not ticket['private'] or ticket['ticket_num'] == 6054:
-        if ticket['status'] in reviews:
-            ticket['status'] = 'review'
-
-        milestone = ticket_tag.get(ticket['ticket_num'], None)
-        if not milestone:
-            if ticket['status'] == 'closed':
-                milestone = tags[0]
-        ticket['custom_fields']['_milestone'] = milestone if milestone and milestone != 'HEAD' else 'unreleased'
-        if '_size' in ticket['custom_fields'].keys():
-            size = ticket['custom_fields']['_size']
-            if size:
-                ticket['labels'].append("sf-%d" % int(size))
-            ticket['custom_fields'].pop('_size', None)
-
-        if '_qa' in ticket['custom_fields'].keys():
-            ticket['custom_fields']['_reviewer'] = ticket['custom_fields']['_qa']
-            ticket['custom_fields'].pop('_qa', None)
-        updated.append(ticket)
-tags[-1] = 'unreleased'
-
-data['tickets'] = updated
-
-# Remove milestones from the list
-custom_fields = filter(lambda d: d.get('name') not in ['_milestone', 'name', '_size', '_qa'], data['custom_fields'])
-data['custom_fields'] = custom_fields
-
-milestones = {
-    "milestones": [
-        dict(name=milestone_name,
-             old_name=milestone_name,
-             default=False,
-             complete=False,
-             due_date="",
-             description="")
-        for milestone_name in tags
-    ],
-    "name": "_milestone",
-    "show_in_search": False,
-    "label": "Milestone",
-    "type": "milestone",
-    "options": ""
-}
-data['custom_fields'].append(milestones)
-data['custom_fields'].append({
-    "show_in_search": True,
-    "label": "Reviewer",
-    "type": "user",
-    "options": "",
-    "name": "_reviewer"
-})
-data['milestones'] = milestones
-data['saved_bins'] = []
-
-# Count top used usernames
-
-assigned_to = [ticket.get('assigned_to', None) for ticket in updated]
-reported_by = [ticket.get('reported_by', None) for ticket in updated]
-reviewed_by = [ticket['custom_fields'].get('_reviewer', None) for ticket in updated]
-
-posts = [ticket['discussion_thread']['posts'] for ticket in updated]
-
-post_authors = [post.get('author', None) for post in list(chain(*posts))]
-
-usernames = filter(lambda x: bool(x), chain(assigned_to, reported_by, reviewed_by, post_authors))
-
-top_users = Counter(usernames).most_common(50)
-
-with open(output, 'w') as outfile:
-    json.dump(data, outfile, indent=2)
-
-with open(ticket_list, 'w') as outfile:
-    outfile.write('\n'.join(sorted([str(ticket['ticket_num']) for ticket in updated])))
-
-with open(top_usernames, 'w') as outfile:
-    lines = ["%s - %s" % (username, frequency) for username, frequency in top_users]
-    outfile.write('\n'.join(lines))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/recover-user-databases.py
----------------------------------------------------------------------
diff --git a/scripts/recover-user-databases.py b/scripts/recover-user-databases.py
deleted file mode 100644
index 5abf6f3..0000000
--- a/scripts/recover-user-databases.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import sys
-import logging
-
-from ming.orm import session
-
-from allura import model as M
-
-log = logging.getLogger(__name__)
-
-IGNORED_COLLECTIONS = [
-    '_flyway_migration_info',
-    'user',
-    'config',
-    'system.indexes']
-
-
-def main():
-    conn = M.session.main_doc_session.bind.conn
-    n = M.Neighborhood.query.get(url_prefix='/u/')
-    for p in M.Project.query.find(dict(neighborhood_id=n._id)):
-        if not p.database_configured:
-            continue
-        if not p.shortname.startswith('u/'):
-            continue
-        log.info('Checking to see if %s is configured...', p.database)
-        db = conn[p.database]
-        if is_unconfigured(db):
-            if sys.argv[-1] == 'test':
-                log.info('... it is not, so I would drop it.')
-                continue
-            log.info('... it is not, so dropping it.')
-            conn.drop_database(p.database)
-            p.database_configured = False
-            session(p).flush()
-        else:
-            log.info('... it is.')
-
-
-def is_unconfigured(db):
-    # Check for data in collections other than those we pre-fill with data
-    for collection_name in db.collection_names():
-        if collection_name in IGNORED_COLLECTIONS:
-            continue
-        collection = db[collection_name]
-        if collection.count():
-            log.info('...%s has data', collection_name)
-            return False
-    # DB is configured if it has more than profile/admin/search tools installed
-    if db.config.count() != 3:
-        log.info('...has %d tools', db.config.count())
-        return False
-    return True
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/setup-scm-server.py
----------------------------------------------------------------------
diff --git a/scripts/setup-scm-server.py b/scripts/setup-scm-server.py
deleted file mode 100644
index 050122c..0000000
--- a/scripts/setup-scm-server.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import os
-import string
-from tempfile import mkstemp
-from ConfigParser import ConfigParser, NoOptionError
-
-config = ConfigParser()
-
-
-def main():
-    config.read('.setup-scm-cache')
-    if not config.has_section('scm'):
-        config.add_section('scm')
-    domain = get_value('domain', 'dc=example,dc=com')
-    if config.get('start slapd', 'y') == 'y':
-        run('service slapd start')
-    if config.get('add base ldap schemas', 'y') == 'y':
-        run('ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/cosine.ldif')
-        run('ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/nis.ldif')
-        run('ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/ldap/schema/inetorgperson.ldif')
-    secret = config.get('admin password', 'secret')
-    if config.get('add backend ldif', 'y') == 'y':
-        add_ldif(backend_ldif, domain=domain, secret=secret)
-    if config.get('add frontend ldif', 'y') == 'y':
-        add_ldif(frontend_ldif, domain=domain, secret=secret)
-
-
-def get_value(key, default):
-    try:
-        value = config.get('scm', key)
-    except NoOptionError:
-        value = raw_input('%s? [%s]' % key, default)
-        if not value:
-            value = default
-        config.set('scm', key, value)
-    return value
-
-
-def run(command):
-    rc = os.system(command)
-    assert rc == 0
-    return rc
-
-
-def add_ldif(template, **values):
-    fd, name = mkstemp()
-    os.write(fd, template.substitute(values))
-    os.close(fd)
-    run('ldapadd -Y EXTERNAL -H ldapi:/// -f %s' % name)
-    os.remove(name)
-
-backend_ldif = string.Template('''
-# Load dynamic backend modules
-dn: cn=module,cn=config
-objectClass: olcModuleList
-cn: module
-olcModulepath: /usr/lib/ldap
-olcModuleload: back_hdb
-
-# Database settings
-dn: olcDatabase=hdb,cn=config
-objectClass: olcDatabaseConfig
-objectClass: olcHdbConfig
-olcDatabase: {1}hdb
-olcSuffix: $domain
-olcDbDirectory: /var/lib/ldap
-olcRootDN: cn=admin,$domain
-olcRootPW: $secret
-olcDbConfig: set_cachesize 0 2097152 0
-olcDbConfig: set_lk_max_objects 1500
-olcDbConfig: set_lk_max_locks 1500
-olcDbConfig: set_lk_max_lockers 1500
-olcDbIndex: objectClass eq
-olcLastMod: TRUE
-olcDbCheckpoint: 512 30
-olcAccess: to attrs=userPassword by dn="cn=admin,$domain" write by anonymous auth by self write by * none
-olcAccess: to attrs=shadowLastChange by self write by * read
-olcAccess: to dn.base="" by * read
-olcAccess: to * by dn="cn=admin,$domain" write by * read
-
-''')
-
-frontend_ldif = string.Template('''
-# Create top-level object in domain
-dn: $domain
-objectClass: top
-objectClass: dcObject
-objectclass: organization
-o: SCM Host Organization
-dc: SCM
-description: SCM Host Server
-
-# Admin user.
-dn: cn=admin,$domain
-objectClass: simpleSecurityObject
-objectClass: organizationalRole
-cn: admin
-description: LDAP administrator
-userPassword: $secret
-
-dn: ou=people,$domain
-objectClass: organizationalUnit
-ou: people
-
-dn: ou=groups,$domain
-objectClass: organizationalUnit
-ou: groups
-''')
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/trac_export_wiki.py
----------------------------------------------------------------------
diff --git a/scripts/trac_export_wiki.py b/scripts/trac_export_wiki.py
new file mode 100755
index 0000000..025d3ca
--- /dev/null
+++ b/scripts/trac_export_wiki.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+#       Licensed to the Apache Software Foundation (ASF) under one
+#       or more contributor license agreements.  See the NOTICE file
+#       distributed with this work for additional information
+#       regarding copyright ownership.  The ASF licenses this file
+#       to you under the Apache License, Version 2.0 (the
+#       "License"); you may not use this file except in compliance
+#       with the License.  You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#       Unless required by applicable law or agreed to in writing,
+#       software distributed under the License is distributed on an
+#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#       KIND, either express or implied.  See the License for the
+#       specific language governing permissions and limitations
+#       under the License.
+
+import sys
+from optparse import OptionParser
+
+from tracwikiimporter.scripts.wiki_from_trac.extractors import WikiExporter
+
+
+def parse_options():
+    parser = OptionParser(
+        usage='%prog <Trac URL>\n\nExport wiki pages from a trac instance')
+
+    parser.add_option('-o', '--out-file', dest='out_filename',
+                      help='Write to file (default stdout)')
+    parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
+                      help='Verbose operation')
+    parser.add_option('-c', '--converter', dest='converter',
+                      default='html2text',
+                      help='Converter to use on wiki text. '
+                           'Available options: html2text (default) or regex')
+    options, args = parser.parse_args()
+    if len(args) != 1:
+        parser.error('Wrong number of arguments.')
+    converters = ['html2text', 'regex']
+    if options.converter not in converters:
+        parser.error('Wrong converter. Available options: ' +
+                     ', '.join(converters))
+    return options, args
+
+
+if __name__ == '__main__':
+    options, args = parse_options()
+    exporter = WikiExporter(args[0], options)
+
+    out = sys.stdout
+    if options.out_filename:
+        out = open(options.out_filename, 'w')
+
+    exporter.export(out)

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/trac_import.py
----------------------------------------------------------------------
diff --git a/scripts/trac_import.py b/scripts/trac_import.py
new file mode 100644
index 0000000..d4e51fd
--- /dev/null
+++ b/scripts/trac_import.py
@@ -0,0 +1,119 @@
+#       Licensed to the Apache Software Foundation (ASF) under one
+#       or more contributor license agreements.  See the NOTICE file
+#       distributed with this work for additional information
+#       regarding copyright ownership.  The ASF licenses this file
+#       to you under the Apache License, Version 2.0 (the
+#       "License"); you may not use this file except in compliance
+#       with the License.  You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#       Unless required by applicable law or agreed to in writing,
+#       software distributed under the License is distributed on an
+#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#       KIND, either express or implied.  See the License for the
+#       specific language governing permissions and limitations
+#       under the License.
+
+import json
+from optparse import OptionParser
+
+from allura.lib.import_api import AlluraImportApiClient
+from tracwikiimporter.scripts.wiki_from_trac.loaders import import_wiki
+
+
+def main():
+    optparser, options, args = parse_options()
+
+    import_options = {}
+    for s in options.import_opts:
+        k, v = s.split('=', 1)
+        if v == 'false':
+            v = False
+        import_options[k] = v
+
+    user_map = {}
+    if options.user_map_file:
+        f = open(options.user_map_file)
+        try:
+            user_map = json.load(f)
+            if type(user_map) is not type({}):
+                raise ValueError
+            for k, v in user_map.iteritems():
+                print k, v
+                if not isinstance(k, basestring) or not isinstance(v, basestring):
+                    raise ValueError
+        except ValueError:
+            optparser.error(
+                '--user-map should specify JSON file with format {"original_user": "sf_user", ...}')
+        finally:
+            f.close()
+
+    import_options['user_map'] = user_map
+
+    cli = AlluraImportApiClient(options.base_url, options.token, options.verbose)
+    doc_txt = open(args[0]).read()
+
+    if options.forum:
+        import_forum(cli, options.project, options.forum, user_map, doc_txt,
+                     validate=options.validate, neighborhood=options.neighborhood)
+    elif options.wiki:
+        import_wiki(cli, options.project, options.wiki, options, doc_txt)
+
+
+
+def import_forum(cli, project, tool, user_map, doc_txt, validate=True,
+        neighborhood='p'):
+    url = '/rest/{neighborhood}/{project}/{tool}'.format(
+            neighborhood=neighborhood,
+            project=project,
+            tool=tool,
+            )
+    if validate:
+        url += '/validate_import'
+        print cli.call(url, doc=doc_txt, user_map=json.dumps(user_map))
+    else:
+        url += '/perform_import'
+        print cli.call(url, doc=doc_txt, user_map=json.dumps(user_map))
+
+
+def parse_options():
+    optparser = OptionParser(usage='''%prog [options] <JSON dump>
+
+Import project data dump in JSON format into an Allura project.''')
+    optparser.add_option('-t', '--token', dest='token',
+                         help='OAuth bearer token (generate at /auth/oauth/)')
+    optparser.add_option('-p', '--project', dest='project',
+                         help='Project to import to')
+    optparser.add_option('-n', '--neighborhood', dest='neighborhood',
+                         help="URL prefix of destination neighborhood (default is 'p')",
+                         default='p')
+    optparser.add_option('-f', '--forum', dest='forum',
+                         help='Forum tool to import to')
+    optparser.add_option('-w', '--wiki', dest='wiki',
+                         help='Wiki tool to import to')
+    optparser.add_option('-u', '--base-url', dest='base_url',
+                         default='https://sourceforge.net', help='Base Allura URL (%default)')
+    optparser.add_option('-o', dest='import_opts',
+                         default=[], action='append', help='Specify import option(s)', metavar='opt=val')
+    optparser.add_option('--user-map', dest='user_map_file',
+                         help='Map original users to SF.net users', metavar='JSON_FILE')
+    optparser.add_option('--validate', dest='validate',
+                         action='store_true', help='Validate import data')
+    optparser.add_option('-v', '--verbose', dest='verbose',
+                         action='store_true', help='Verbose operation')
+    optparser.add_option('-c', '--continue', dest='cont',
+                         action='store_true', help='Continue import into existing tracker')
+    options, args = optparser.parse_args()
+    if len(args) != 1:
+        optparser.error("Wrong number of arguments")
+    if not options.token:
+        optparser.error("OAuth bearer token is required")
+    if not options.project:
+        optparser.error("Target project is required")
+    options.neighborhood = options.neighborhood.strip('/')
+    return optparser, options, args
+
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/allura/blob/3189602c/scripts/wiki-export.py
----------------------------------------------------------------------
diff --git a/scripts/wiki-export.py b/scripts/wiki-export.py
deleted file mode 100755
index 025d3ca..0000000
--- a/scripts/wiki-export.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import sys
-from optparse import OptionParser
-
-from tracwikiimporter.scripts.wiki_from_trac.extractors import WikiExporter
-
-
-def parse_options():
-    parser = OptionParser(
-        usage='%prog <Trac URL>\n\nExport wiki pages from a trac instance')
-
-    parser.add_option('-o', '--out-file', dest='out_filename',
-                      help='Write to file (default stdout)')
-    parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
-                      help='Verbose operation')
-    parser.add_option('-c', '--converter', dest='converter',
-                      default='html2text',
-                      help='Converter to use on wiki text. '
-                           'Available options: html2text (default) or regex')
-    options, args = parser.parse_args()
-    if len(args) != 1:
-        parser.error('Wrong number of arguments.')
-    converters = ['html2text', 'regex']
-    if options.converter not in converters:
-        parser.error('Wrong converter. Available options: ' +
-                     ', '.join(converters))
-    return options, args
-
-
-if __name__ == '__main__':
-    options, args = parse_options()
-    exporter = WikiExporter(args[0], options)
-
-    out = sys.stdout
-    if options.out_filename:
-        out = open(options.out_filename, 'w')
-
-    exporter.export(out)