From: Thierry Parmentelat Date: Wed, 18 Jan 2012 10:59:43 +0000 (+0100) Subject: make persistentobjs loadable from the client-side X-Git-Tag: after-alchemy-1~3 X-Git-Url: http://git.onelab.eu/?a=commitdiff_plain;h=6813131cbee1cd54b0af890f325d1a6667790bad;p=sfa.git make persistentobjs loadable from the client-side --- diff --git a/sfa/importer/sfa-import-plc.py b/sfa/importer/sfa-import-plc.py index f3ac37ef..768473f3 100755 --- a/sfa/importer/sfa-import-plc.py +++ b/sfa/importer/sfa-import-plc.py @@ -66,7 +66,7 @@ def _get_site_hrn(interface_hrn, site): hrn = ".".join([interface_hrn, "internet2", site['login_base']]) return hrn -# xxx could use a call to persistentobjs.init_tables somewhere +# maybe could use a call to persistentobjs.init_tables somewhere # however now import s expected to be done after service creation.. def main(): diff --git a/sfa/importer/sfa-nuke-plc.py b/sfa/importer/sfa-nuke-plc.py index cfc7d070..6cf58367 100755 --- a/sfa/importer/sfa-nuke-plc.py +++ b/sfa/importer/sfa-nuke-plc.py @@ -13,6 +13,7 @@ from optparse import OptionParser from sfa.util.sfalogging import logger +from sfa.storage.alchemy import dbsession from sfa.storage.persistentobjs import init_tables,drop_tables def main(): @@ -29,13 +30,13 @@ def main(): parser.print_help() sys.exit(1) logger.info("Purging SFA records from database") - drop_tables() + drop_tables(dbsession) # for convenience we re-create the schema here, so there's no need for an explicit # service sfa restart # however in some (upgrade) scenarios this might be wrong if options.reinit: logger.info("re-creating empty schema") - init_tables() + init_tables(dbsession) if options.clean_certs: # remove the server certificate and all gids found in /var/lib/sfa/authorities diff --git a/sfa/server/registry.py b/sfa/server/registry.py index 72d4abb3..5bc7af56 100644 --- a/sfa/server/registry.py +++ b/sfa/server/registry.py @@ -21,8 +21,9 @@ class Registry(SfaServer): def __init__(self, ip, port, key_file, cert_file): SfaServer.__init__(self, ip, port, key_file, cert_file,'registry') # initialize db schema + from sfa.storage.alchemy import dbsession from sfa.storage.persistentobjs import init_tables - init_tables() + init_tables(dbsession) # # Registries is a dictionary of registry connections keyed on the registry hrn diff --git a/sfa/storage/alchemy.py b/sfa/storage/alchemy.py index fce441b4..e83a0f1e 100644 --- a/sfa/storage/alchemy.py +++ b/sfa/storage/alchemy.py @@ -3,14 +3,14 @@ from types import StringTypes from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker -from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String from sqlalchemy.orm import relationship, backref from sqlalchemy import ForeignKey from sfa.util.sfalogging import logger -Base=declarative_base() +# this module is designed to be loaded when the configured db server is reachable +# OTOH persistentobjs can be loaded from anywhere including the client-side class Alchemy: @@ -48,15 +48,6 @@ class Alchemy: def check (self): self.engine.execute ("select 1").scalar() - # create schema - # warning: need to have all Base subclass loaded for this to work - def create_schema (self): - return Base.metadata.create_all(self.engine) - - # does a complete wipe of the schema, use with care - def drop_schema (self): - return Base.metadata.drop_all(self.engine) - def session (self): if self._session is None: Session=sessionmaker () @@ -85,54 +76,6 @@ class Alchemy: del record self.commit() -#################### -# dicts vs objects -#################### -# historically the front end to the db dealt with dicts, so the code was only dealing with dicts -# sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id'] -# which is admittedly much nicer -# however we still need to deal with dictionaries if only for the xmlrpc layer -# -# here are a few utilities for this -# -# (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick -# is to use obj.__dict__ -# this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field -# however this depends on sqlalchemy's implementation so it should be avoided -# -# (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict -# remember though that writing the resulting dictionary won't change the object -# essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_' -# 2 ways are provided for that: -# . dict(obj) -# . obj.todict() -# the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names -# although it seems to work fine, I've found cases where it issues a weird python error that I could not get right -# so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved -# -# (*) finally for converting a dictionary into an sqlalchemy object, we provide -# obj.set_from_dict(dict) - -from sqlalchemy.orm import object_mapper -class AlchemyObj: - def __iter__(self): - self._i = iter(object_mapper(self).columns) - return self - def next(self): - n = self._i.next().name - return n, getattr(self, n) - def todict (self): - d=self.__dict__ - keys=[k for k in d.keys() if not k.startswith('_')] - return dict ( [ (k,d[k]) for k in keys ] ) - def set_from_dict (self, d): - for (k,v) in d.iteritems(): - # experimental - if isinstance(v, StringTypes): - if v.lower() in ['true']: v=True - if v.lower() in ['false']: v=False - setattr(self,k,v) - #################### from sfa.util.config import Config diff --git a/sfa/storage/persistentobjs.py b/sfa/storage/persistentobjs.py index acfda8ca..a1f4cb60 100644 --- a/sfa/storage/persistentobjs.py +++ b/sfa/storage/persistentobjs.py @@ -1,18 +1,66 @@ from types import StringTypes from datetime import datetime -from sqlalchemy import create_engine from sqlalchemy import Column, Integer, String, DateTime from sqlalchemy import Table, Column, MetaData, join, ForeignKey from sqlalchemy.orm import relationship, backref from sqlalchemy.orm import column_property +from sqlalchemy.orm import object_mapper from sqlalchemy.ext.declarative import declarative_base from sfa.util.sfalogging import logger from sfa.trust.gid import GID -from sfa.storage.alchemy import Base, alchemy, dbsession, engine, AlchemyObj +############################## +Base=declarative_base() + +#################### +# dicts vs objects +#################### +# historically the front end to the db dealt with dicts, so the code was only dealing with dicts +# sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id'] +# which is admittedly much nicer +# however we still need to deal with dictionaries if only for the xmlrpc layer +# +# here are a few utilities for this +# +# (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick +# is to use obj.__dict__ +# this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field +# however this depends on sqlalchemy's implementation so it should be avoided +# +# (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict +# remember though that writing the resulting dictionary won't change the object +# essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_' +# 2 ways are provided for that: +# . dict(obj) +# . obj.todict() +# the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names +# although it seems to work fine, I've found cases where it issues a weird python error that I could not get right +# so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved +# +# (*) finally for converting a dictionary into an sqlalchemy object, we provide +# obj.set_from_dict(dict) + +class AlchemyObj: + def __iter__(self): + self._i = iter(object_mapper(self).columns) + return self + def next(self): + n = self._i.next().name + return n, getattr(self, n) + def todict (self): + d=self.__dict__ + keys=[k for k in d.keys() if not k.startswith('_')] + return dict ( [ (k,d[k]) for k in keys ] ) + def set_from_dict (self, d): + for (k,v) in d.iteritems(): + # experimental + if isinstance(v, StringTypes): + if v.lower() in ['true']: v=True + if v.lower() in ['false']: v=False + setattr(self,k,v) ############################## class Type (Base): @@ -27,8 +75,7 @@ class Type (Base): BUILTIN_TYPES = [ 'authority', 'slice', 'node', 'user', 'authority+sa', 'authority+am', 'authority+sm' ] -def insert_builtin_types(engine,dbsession): - Base.metadata.create_all(engine) +def insert_builtin_types(dbsession): for type in BUILTIN_TYPES : count = dbsession.query (Type).filter_by (type=type).count() if count==0: @@ -106,11 +153,13 @@ class UserRecord (Base): def __repr__ (self): return ""%(self.email,self.gid) ############################## -def init_tables(): +def init_tables(dbsession): logger.info("Initializing db schema and builtin types") + engine=dbsession.get_bind() Base.metadata.create_all(engine) - insert_builtin_types(engine,dbsession) + insert_builtin_types(dbsession) -def drop_tables(): +def drop_tables(dbsession): logger.info("Dropping tables") + engine=dbsession.get_bind() Base.metadata.drop_all(engine)