You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bloodhound.apache.org by gj...@apache.org on 2012/12/20 04:26:20 UTC
svn commit: r1424277 -
/incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
Author: gjm
Date: Thu Dec 20 03:26:20 2012
New Revision: 1424277
URL: http://svn.apache.org/viewvc?rev=1424277&view=rev
Log:
adding admin commands for dumping and loading fixtures - towards #314
Modified:
incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
Modified: incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
URL: http://svn.apache.org/viewvc/incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py?rev=1424277&r1=1424276&r2=1424277&view=diff
==============================================================================
--- incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py (original)
+++ incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py Thu Dec 20 03:26:20 2012
@@ -23,16 +23,41 @@ r"""Project dashboard for Apache(TM) Blo
Administration commands for Bloodhound Dashboard.
"""
+import json
import pkg_resources
+from sys import stdout
from trac.admin.api import IAdminCommandProvider, AdminCommandError
from trac.core import Component, implements
+from trac.db_default import schema as tracschema
from trac.util.text import printout
from trac.util.translation import _
from trac.wiki.admin import WikiAdmin
from trac.wiki.model import WikiPage
from bhdashboard import wiki
+try:
+ from multiproduct.model import Product, ProductResourceMap
+except ImportError:
+ Product = None
+ ProductResourceMap = None
+
+schema = tracschema[:]
+if Product is not None:
+ schema.extend([Product._get_schema(), ProductResourceMap._get_schema()])
+
+structure = dict([(table.name, [col.name for col in table.columns])
+ for table in schema])
+
+# add product for any columns required
+for table in ['ticket',]:
+ structure[table].append('product')
+
+# probably no point in keeping data from these tables
+ignored = ['auth_cookie', 'session', 'session_attribute', 'cache']
+IGNORED_DB_STRUCTURE = dict([(k, structure[k]) for k in ignored])
+DB_STRUCTURE = dict([(k, structure[k]) for k in structure if k not in ignored])
+
class BloodhoundAdmin(Component):
"""Bloodhound administration commands.
"""
@@ -47,6 +72,21 @@ class BloodhoundAdmin(Component):
'Move Trac* wiki pages to %s/*' % wiki.GUIDE_NAME,
None, self._do_wiki_upgrade)
+ yield ('devfixture dump', '[filename]',
+ """Dumps database to stdout in a form suitable for reloading
+
+ If a filename is not provided, data will be sent standard out.
+ """,
+ None, self._dump_as_fixture)
+
+ yield ('devfixture load', '<filename> <backedup>',
+ """Loads database fixture from json dump file
+
+ You need to specify a filename and confirm that you have backed
+ up your data.
+ """,
+ None, self._load_fixture_from_file)
+
def _do_wiki_upgrade(self):
"""Move all wiki pages starting with Trac prefix to unbranded user
guide pages.
@@ -95,3 +135,56 @@ class BloodhoundAdmin(Component):
WHERE name=%s
""",
(re.sub(r'\b%s\b' % old_name, new_name, text), name))
+
+ def _get_tdump(self, db, table, fields):
+ """Dumps all the data from a table for a known set of fields"""
+ return db("SELECT %s from %s" %(', '.join(fields), table))
+
+ def _dump_as_fixture(self, *args):
+ """Dumps database to a json fixture"""
+ def dump_json(fp):
+ """Dump to json given a file"""
+ with self.env.db_query as db:
+ data = [(k, v, self._get_tdump(db, k, v))
+ for k, v in DB_STRUCTURE.iteritems()]
+ jd = json.dumps(data, sort_keys=True, indent=2,
+ separators=(',', ':'))
+ fp.write(jd)
+
+ if len(args):
+ f = open(args[0], mode='w+')
+ dump_json(f)
+ f.close()
+ else:
+ dump_json(stdout)
+
+ def _load_fixture_from_file(self, fname):
+ """Calls _load_fixture with an open file"""
+ try:
+ fp = open(fname, mode='r')
+ self._load_fixture(fp)
+ fp.close()
+ except IOError:
+ printout(_("The file '%(fname)s' does not exist", fname=fname))
+
+ def _load_fixture(self, fp):
+ """Extract fixture data from a file like object, expecting json"""
+ # Only delete if we think it unlikely that there is data to lose
+ with self.env.db_query as db:
+ if db('SELECT * FROM ticket'):
+ printout(_("This command is only intended to run on fresh "
+ "environments as it will overwrite the database.\n"
+ "If it is safe to lose bloodhound data, delete the "
+ "environment and re-run python bloodhound_setup.py "
+ "before attempting to load the fixture again."))
+ return
+ data = json.load(fp)
+ with self.env.db_transaction as db:
+ for tab, cols, vals in data:
+ db("DELETE FROM %s" %(tab))
+ for tab, cols, vals in data:
+ printout("Populating %s table" % tab)
+ db.executemany("INSERT INTO %s (%s) VALUES (%s)" % (tab,
+ ','.join(cols), ','.join(['%s' for c in cols])), vals)
+ printout("%d records added" % len(vals))
+
Re: svn commit: r1424277 - /incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
Posted by Gary Martin <ga...@wandisco.com>.
Hi,
I just added some admin commands to help with dumping and loading fairly
simple json fixture data. I've not attempted anything too clever - for a
start there is a fairly dumb restriction on loading the data as I didn't
want to think too hard about how to protect production installs from
losing data.
Anyway, there are bound to be a number of other problems to sort out at
some point but I think it is good to have something.
The associated ticket is here:
https://issues.apache.org/bloodhound/ticket/314
Cheers,
Gary
On 20/12/12 03:26, gjm@apache.org wrote:
> Author: gjm
> Date: Thu Dec 20 03:26:20 2012
> New Revision: 1424277
>
> URL: http://svn.apache.org/viewvc?rev=1424277&view=rev
> Log:
> adding admin commands for dumping and loading fixtures - towards #314
>
> Modified:
> incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
>
> Modified: incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py
> URL: http://svn.apache.org/viewvc/incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py?rev=1424277&r1=1424276&r2=1424277&view=diff
> ==============================================================================
> --- incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py (original)
> +++ incubator/bloodhound/trunk/bloodhound_dashboard/bhdashboard/admin.py Thu Dec 20 03:26:20 2012
> @@ -23,16 +23,41 @@ r"""Project dashboard for Apache(TM) Blo
>
> Administration commands for Bloodhound Dashboard.
> """
> +import json
> import pkg_resources
> +from sys import stdout
>
> from trac.admin.api import IAdminCommandProvider, AdminCommandError
> from trac.core import Component, implements
> +from trac.db_default import schema as tracschema
> from trac.util.text import printout
> from trac.util.translation import _
> from trac.wiki.admin import WikiAdmin
> from trac.wiki.model import WikiPage
> from bhdashboard import wiki
>
> +try:
> + from multiproduct.model import Product, ProductResourceMap
> +except ImportError:
> + Product = None
> + ProductResourceMap = None
> +
> +schema = tracschema[:]
> +if Product is not None:
> + schema.extend([Product._get_schema(), ProductResourceMap._get_schema()])
> +
> +structure = dict([(table.name, [col.name for col in table.columns])
> + for table in schema])
> +
> +# add product for any columns required
> +for table in ['ticket',]:
> + structure[table].append('product')
> +
> +# probably no point in keeping data from these tables
> +ignored = ['auth_cookie', 'session', 'session_attribute', 'cache']
> +IGNORED_DB_STRUCTURE = dict([(k, structure[k]) for k in ignored])
> +DB_STRUCTURE = dict([(k, structure[k]) for k in structure if k not in ignored])
> +
> class BloodhoundAdmin(Component):
> """Bloodhound administration commands.
> """
> @@ -47,6 +72,21 @@ class BloodhoundAdmin(Component):
> 'Move Trac* wiki pages to %s/*' % wiki.GUIDE_NAME,
> None, self._do_wiki_upgrade)
>
> + yield ('devfixture dump', '[filename]',
> + """Dumps database to stdout in a form suitable for reloading
> +
> + If a filename is not provided, data will be sent standard out.
> + """,
> + None, self._dump_as_fixture)
> +
> + yield ('devfixture load', '<filename> <backedup>',
> + """Loads database fixture from json dump file
> +
> + You need to specify a filename and confirm that you have backed
> + up your data.
> + """,
> + None, self._load_fixture_from_file)
> +
> def _do_wiki_upgrade(self):
> """Move all wiki pages starting with Trac prefix to unbranded user
> guide pages.
> @@ -95,3 +135,56 @@ class BloodhoundAdmin(Component):
> WHERE name=%s
> """,
> (re.sub(r'\b%s\b' % old_name, new_name, text), name))
> +
> + def _get_tdump(self, db, table, fields):
> + """Dumps all the data from a table for a known set of fields"""
> + return db("SELECT %s from %s" %(', '.join(fields), table))
> +
> + def _dump_as_fixture(self, *args):
> + """Dumps database to a json fixture"""
> + def dump_json(fp):
> + """Dump to json given a file"""
> + with self.env.db_query as db:
> + data = [(k, v, self._get_tdump(db, k, v))
> + for k, v in DB_STRUCTURE.iteritems()]
> + jd = json.dumps(data, sort_keys=True, indent=2,
> + separators=(',', ':'))
> + fp.write(jd)
> +
> + if len(args):
> + f = open(args[0], mode='w+')
> + dump_json(f)
> + f.close()
> + else:
> + dump_json(stdout)
> +
> + def _load_fixture_from_file(self, fname):
> + """Calls _load_fixture with an open file"""
> + try:
> + fp = open(fname, mode='r')
> + self._load_fixture(fp)
> + fp.close()
> + except IOError:
> + printout(_("The file '%(fname)s' does not exist", fname=fname))
> +
> + def _load_fixture(self, fp):
> + """Extract fixture data from a file like object, expecting json"""
> + # Only delete if we think it unlikely that there is data to lose
> + with self.env.db_query as db:
> + if db('SELECT * FROM ticket'):
> + printout(_("This command is only intended to run on fresh "
> + "environments as it will overwrite the database.\n"
> + "If it is safe to lose bloodhound data, delete the "
> + "environment and re-run python bloodhound_setup.py "
> + "before attempting to load the fixture again."))
> + return
> + data = json.load(fp)
> + with self.env.db_transaction as db:
> + for tab, cols, vals in data:
> + db("DELETE FROM %s" %(tab))
> + for tab, cols, vals in data:
> + printout("Populating %s table" % tab)
> + db.executemany("INSERT INTO %s (%s) VALUES (%s)" % (tab,
> + ','.join(cols), ','.join(['%s' for c in cols])), vals)
> + printout("%d records added" % len(vals))
> +
>
>