You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@allura.apache.org by br...@apache.org on 2014/01/02 21:55:23 UTC

[2/3] git commit: [#4091] ticket:489 Fix scripts which depend on per project database_uri

[#4091] ticket:489 Fix scripts which depend on per project database_uri


Project: http://git-wip-us.apache.org/repos/asf/incubator-allura/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-allura/commit/0e8e6de5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-allura/tree/0e8e6de5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-allura/diff/0e8e6de5

Branch: refs/heads/master
Commit: 0e8e6de57a4fac1d27ff6e39fea66458d0c10c07
Parents: ef0d3ea
Author: Igor Bondarenko <je...@gmail.com>
Authored: Thu Dec 26 12:52:27 2013 +0200
Committer: Dave Brondsema <db...@slashdotmedia.com>
Committed: Thu Jan 2 20:55:05 2014 +0000

----------------------------------------------------------------------
 scripts/migrate_project_database.py             | 95 --------------------
 scripts/migrations/003-migrate_project_roles.py | 26 ++----
 .../004-make-attachments-polymorphic.py         | 50 +++++------
 scripts/migrations/006-migrate-artifact-refs.py | 36 +++-----
 scripts/migrations/007-update-acls.py           |  4 +-
 5 files changed, 46 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/0e8e6de5/scripts/migrate_project_database.py
----------------------------------------------------------------------
diff --git a/scripts/migrate_project_database.py b/scripts/migrate_project_database.py
deleted file mode 100644
index a30af02..0000000
--- a/scripts/migrate_project_database.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#       Licensed to the Apache Software Foundation (ASF) under one
-#       or more contributor license agreements.  See the NOTICE file
-#       distributed with this work for additional information
-#       regarding copyright ownership.  The ASF licenses this file
-#       to you under the Apache License, Version 2.0 (the
-#       "License"); you may not use this file except in compliance
-#       with the License.  You may obtain a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#       Unless required by applicable law or agreed to in writing,
-#       software distributed under the License is distributed on an
-#       "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#       KIND, either express or implied.  See the License for the
-#       specific language governing permissions and limitations
-#       under the License.
-
-import os
-import sys
-import shutil
-import logging
-
-from pylons import tmpl_context as c
-from ming.orm import session
-from allura import model as M
-log = logging.getLogger(__name__)
-
-MONGO_HOME=os.environ.get('MONGO_HOME', '/usr')
-MONGO_DUMP=os.path.join(MONGO_HOME, 'bin/mongodump')
-MONGO_RESTORE=os.path.join(MONGO_HOME, 'bin/mongorestore')
-SHARD_LENGTH=1
-
-def main():
-    if len(sys.argv) > 1:
-        shortnames = sys.argv[1:]
-    else:
-        shortnames = [ p.shortname for p in M.Project.query.find(dict(is_root=True)) ]
-    M.main_orm_session.clear()
-    for pname in shortnames:
-        # This needs to be a .find() instead of a .get() because of the
-        # __init__ projects, which have the same shortname but exist in
-        # multiple neighborhoods.
-        for project in M.Project.query.find(dict(shortname=pname)):
-            migrate_project_database(project)
-
-def migrate_project_database(project):
-    c.project = project
-    target_uri = M.Project.default_database_uri(project.shortname)
-    target_db = target_uri.rsplit('/')[-1]
-    if project.database_uri == target_uri:
-        log.info('Project %s is already migrated to %s', project.shortname, project.database_uri)
-        return 2
-    conn = M.session.main_doc_session.db.connection
-    host = '%s:%s' % (conn.host, conn.port)
-    dirname = os.tempnam()
-    try:
-        log.info('Backing up %s to %s', project.shortname, dirname)
-        db_uri = project.database_uri
-        db = db_uri.rsplit('/')[-1]
-        assert 0 == os.system('%s --host %s --db %s -o %s' % (
-                MONGO_DUMP, host, db, dirname))
-        assert 0 == os.system('%s --host %s --db %s %s/%s ' % (
-                MONGO_RESTORE, host, target_db, dirname, db))
-        for p in M.Project.query.find(dict(database_uri=db_uri)):
-            p.database_uri = M.Project.default_database_uri(project.shortname)
-        session(project).flush()
-        conn.drop_database(db)
-    finally:
-        if os.path.exists(dirname):
-            shutil.rmtree(dirname)
-    return 0
-
-def pm(etype, value, tb): # pragma no cover
-    import pdb, traceback
-    try:
-        from IPython.ipapi import make_session; make_session()
-        from IPython.Debugger import Pdb
-        sys.stderr.write('Entering post-mortem IPDB shell\n')
-        p = Pdb(color_scheme='Linux')
-        p.reset()
-        p.setup(None, tb)
-        p.print_stack_trace()
-        sys.stderr.write('%s: %s\n' % ( etype, value))
-        p.cmdloop()
-        p.forget()
-        # p.interaction(None, tb)
-    except ImportError:
-        sys.stderr.write('Entering post-mortem PDB shell\n')
-        traceback.print_exception(etype, value, tb)
-        pdb.post_mortem(tb)
-
-sys.excepthook = pm
-
-if __name__ == '__main__':
-    sys.exit(main())

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/0e8e6de5/scripts/migrations/003-migrate_project_roles.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/003-migrate_project_roles.py b/scripts/migrations/003-migrate_project_roles.py
index 51dc2d8..580f93f 100644
--- a/scripts/migrations/003-migrate_project_roles.py
+++ b/scripts/migrations/003-migrate_project_roles.py
@@ -15,30 +15,18 @@
 #       specific language governing permissions and limitations
 #       under the License.
 
-'''Merge all the OldProjectRole collections in various project databases into a
-central ProjectRole collection.
+'''Merge all the OldProjectRole collections in into a ProjectRole collection.
 '''
 import logging
 
-from pylons import tmpl_context as c
-
 from ming.orm import session, state
 from allura import model as M
 
 log = logging.getLogger(__name__)
 
-seen_databases = set()
-projects = M.Project.query.find().all()
-
-for p in projects:
-    if p.database_uri in seen_databases:
-        continue
-    seen_databases.add(p.database_uri)
-    log.info('Moving project roles in database %s to main DB',
-             p.database_uri)
-    c.project = p
-    for opr in M.OldProjectRole.query.find():
-        pr = M.ProjectRole(**state(opr).document)
-    session(opr).clear()
-    session(pr).flush()
-    session(pr).clear()
+log.info('Moving project roles in database %s to main DB', M.Project.database_uri())
+for opr in M.OldProjectRole.query.find():
+    pr = M.ProjectRole(**state(opr).document)
+session(opr).clear()
+session(pr).flush()
+session(pr).clear()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/0e8e6de5/scripts/migrations/004-make-attachments-polymorphic.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/004-make-attachments-polymorphic.py b/scripts/migrations/004-make-attachments-polymorphic.py
index 8839e51..0c34d58 100644
--- a/scripts/migrations/004-make-attachments-polymorphic.py
+++ b/scripts/migrations/004-make-attachments-polymorphic.py
@@ -17,8 +17,6 @@
 
 import logging
 
-from pylons import tmpl_context as c
-
 from ming.orm import ThreadLocalORMSession
 from allura import model as M
 from forgetracker import model as TM
@@ -28,31 +26,29 @@ from forgediscussion import model as DM
 log = logging.getLogger(__name__)
 
 def main():
-    dbs = dict((p.database_uri, p) for p in M.Project.query.find())
-    for db, p in sorted(dbs.items()):
-        log.info('=== Making attachments in %s polymorphic ===', db)
-        c.project = p
-        log.info('Fixing %d discussions', M.Discussion.query.find().count())
-        for d in M.Discussion.query.find():
-            for a in M.DiscussionAttachment.query.find(dict(
-                    discussion_id=d._id)):
-                log.info('%s: %s', d.url(), a.filename)
-        log.info('Fixing %d forums', DM.Forum.query.find().count())
-        for d in DM.Forum.query.find():
-            for a in DM.ForumAttachment.query.find(dict(
-                    discussion_id=d._id)):
-                log.info('%s: %s', d.url(), a.filename)
-        log.info('Fixing %d tickets', TM.Ticket.query.find().count())
-        for t in TM.Ticket.query.find():
-            for a in TM.TicketAttachment.query.find(dict(
-                    artifact_id=t._id)):
-                log.info('%s: %s', t.url(), a.filename)
-        log.info('Fixing %d wikis', WM.Page.query.find().count())
-        for p in WM.Page.query.find():
-            for a in WM.WikiAttachment.query.find(dict(
-                    artifact_id=p._id)):
-                log.info('%s: %s', p.url(), a.filename)
-        ThreadLocalORMSession.flush_all()
+    db = M.project_doc_session.db
+    log.info('=== Making attachments in %s polymorphic ===', db)
+    log.info('Fixing %d discussions', M.Discussion.query.find().count())
+    for d in M.Discussion.query.find():
+        for a in M.DiscussionAttachment.query.find(dict(
+                discussion_id=d._id)):
+            log.info('%s: %s', d.url(), a.filename)
+    log.info('Fixing %d forums', DM.Forum.query.find().count())
+    for d in DM.Forum.query.find():
+        for a in DM.ForumAttachment.query.find(dict(
+                discussion_id=d._id)):
+            log.info('%s: %s', d.url(), a.filename)
+    log.info('Fixing %d tickets', TM.Ticket.query.find().count())
+    for t in TM.Ticket.query.find():
+        for a in TM.TicketAttachment.query.find(dict(
+                artifact_id=t._id)):
+            log.info('%s: %s', t.url(), a.filename)
+    log.info('Fixing %d wikis', WM.Page.query.find().count())
+    for p in WM.Page.query.find():
+        for a in WM.WikiAttachment.query.find(dict(
+                artifact_id=p._id)):
+            log.info('%s: %s', p.url(), a.filename)
+    ThreadLocalORMSession.flush_all()
 
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/0e8e6de5/scripts/migrations/006-migrate-artifact-refs.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/006-migrate-artifact-refs.py b/scripts/migrations/006-migrate-artifact-refs.py
index f1afa84..dc9149c 100644
--- a/scripts/migrations/006-migrate-artifact-refs.py
+++ b/scripts/migrations/006-migrate-artifact-refs.py
@@ -19,8 +19,6 @@ import sys
 import logging
 from cPickle import loads
 
-from pylons import tmpl_context as c
-
 from allura import model as M
 
 log = logging.getLogger('allura.migrate-artifact-refs')
@@ -28,27 +26,21 @@ log = logging.getLogger('allura.migrate-artifact-refs')
 # Threads have artifact references that must be migrated to the new system
 def main():
     test = sys.argv[-1] == 'test'
-    all_projects = M.Project.query.find().all()
     log.info('Fixing artifact references in threads')
-    seen_dbs = set()
-    for project in all_projects:
-        if project.database_uri in seen_dbs: continue
-        seen_dbs.add(project.database_uri)
-        c.project = project
-        db = M.project_doc_session.db
-        for thread in db.thread.find():
-            ref = thread.pop('artifact_reference', None)
-            if ref is None: continue
-            Artifact = loads(ref['artifact_type'])
-            artifact = Artifact.query.get(_id=ref['artifact_id'])
-            M.ArtifactReference.from_artifact(artifact)
-            thread['ref_id'] = artifact.index_id()
-            if not test:
-                db.thread.save(thread)
-                log.info('saving thread %s', thread['_id'])
-            else:
-                log.info('would save thread %s', thread['_id'])
-            M.artifact_orm_session.clear()
+    db = M.project_doc_session.db
+    for thread in db.thread.find():
+        ref = thread.pop('artifact_reference', None)
+        if ref is None: continue
+        Artifact = loads(ref['artifact_type'])
+        artifact = Artifact.query.get(_id=ref['artifact_id'])
+        M.ArtifactReference.from_artifact(artifact)
+        thread['ref_id'] = artifact.index_id()
+        if not test:
+            db.thread.save(thread)
+            log.info('saving thread %s', thread['_id'])
+        else:
+            log.info('would save thread %s', thread['_id'])
+        M.artifact_orm_session.clear()
 
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-allura/blob/0e8e6de5/scripts/migrations/007-update-acls.py
----------------------------------------------------------------------
diff --git a/scripts/migrations/007-update-acls.py b/scripts/migrations/007-update-acls.py
index 4c7aaa3..16e28a8 100644
--- a/scripts/migrations/007-update-acls.py
+++ b/scripts/migrations/007-update-acls.py
@@ -38,7 +38,7 @@ c_project =  main_db.project
 c_user = main_db.user
 c_project_role = main_db.project_role
 c.project = Object(
-    database_uri=c_project.find().next()['database_uri'])
+    database_uri=M.Project.database_uri())
 
 project_db = M.project_doc_session.db
 c_app_config = project_db.config
@@ -171,7 +171,7 @@ def _project_role(project_id, name=None, user_id=None):
         roles=[])
     c_project_role.save(doc)
     return doc
-                                 
+
 
 def simple_acl_update(doc, collection_name):
     '''Update dict-style to list-style ACL'''