From eb7d0272ff33c228dbb5d1207fbcb057f835d00b Mon Sep 17 00:00:00 2001 From: Thierry Parmentelat Date: Tue, 17 Jan 2012 12:59:01 +0100 Subject: [PATCH] checkpoint commit - import, register, create_slice all work fine update & delete ahve not moved to sqlalchemy yet --- sfa/importer/sfa-import-openstack.py | 47 +-- sfa/importer/sfa-import-plc.py | 85 ++--- sfa/importer/sfa-nuke-plc.py | 23 +- sfa/importer/sfaImport.py | 43 ++- sfa/managers/registry_manager.py | 156 ++++----- sfa/plc/pldriver.py | 18 +- sfa/server/registry.py | 3 + sfa/storage/alchemy.py | 65 +++- sfa/storage/persistentobjs.py | 106 ++++++ sfa/storage/record.py | 463 --------------------------- sfa/storage/table.py | 151 --------- sfa/util/xrn.py | 9 +- 12 files changed, 373 insertions(+), 796 deletions(-) create mode 100644 sfa/storage/persistentobjs.py delete mode 100644 sfa/storage/record.py delete mode 100644 sfa/storage/table.py diff --git a/sfa/importer/sfa-import-openstack.py b/sfa/importer/sfa-import-openstack.py index ec785e81..9794ec71 100755 --- a/sfa/importer/sfa-import-openstack.py +++ b/sfa/importer/sfa-import-openstack.py @@ -21,14 +21,19 @@ import sys from sfa.util.config import Config from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename -from sfa.storage.table import SfaTable -from sfa.storage.record import SfaRecord +from sfa.util.sfalogging import logger + from sfa.trust.certificate import convert_public_key, Keypair from sfa.trust.gid import create_uuid -from sfa.importer.sfaImport import sfaImport, _cleanup_string -from sfa.util.sfalogging import logger + from sfa.openstack.openstack_shell import OpenstackShell +from sfa.storage.alchemy import dbsession +from sfa.storage.persistentobjs import RegRecord + +from sfa.importer.sfaImport import sfaImport, _cleanup_string + + def process_options(): (options, args) = getopt.getopt(sys.argv[1:], '', []) @@ -71,11 +76,9 @@ def main(): existing_records = {} existing_hrns = [] key_ids = [] - table = SfaTable() - results = table.find() - for result in results: - existing_records[(result['hrn'], result['type'])] = result - existing_hrns.append(result['hrn']) + for record in dbsession.query(RegRecord): + existing_records[ (record.hrn, record.type,) ] = record + existing_hrns.append(record.hrn) # Get all users @@ -107,10 +110,11 @@ def main(): logger.warn("Import: person %s does not have a PL public key"%hrn) pkey = Keypair(create=True) person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - person_record = SfaRecord(hrn=hrn, gid=person_gid, type="user", \ - authority=get_authority(hrn)) - logger.info("Import: importing %s " % person_record.summary_string()) - person_record.sync() + person_record = RegRecord("user", hrn=hrn, gid=person_gid, + authority=get_authority(hrn)) + logger.info("Import: importing %s " % person_record) + dbsession.add(person_record) + dbsession.commit() # Get all projects projects = shell.project_get_all() @@ -123,11 +127,12 @@ def main(): pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'slice') project_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - project_record = SfaRecord(hrn=hrn, gid=project_gid, type="slice", + project_record = RegRecord("slice", hrn=hrn, gid=project_gid, authority=get_authority(hrn)) - projects_dict[project_record['hrn']] = project_record - logger.info("Import: importing %s " % project_record.summary_string()) - project_record.sync() + projects_dict[project_record.hrn] = project_record + logger.info("Import: importing %s " % project_record) + dbsession.add(project_record) + dbsession.commit() # remove stale records system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] @@ -136,7 +141,7 @@ def main(): continue record = existing_records[(record_hrn, type)] - if record['peer_authority']: + if record.peer_authority: continue if type == 'user': @@ -148,10 +153,10 @@ def main(): else: continue - record_object = existing_records[(record_hrn, type)] - record = SfaRecord(dict=record_object) + record_object = existing_records[ (record_hrn, type) ] logger.info("Import: removing %s " % record.summary_string()) - record.delete() + del record_object + dbsession.commit() # save pub keys logger.info('Import: saving current pub keys') diff --git a/sfa/importer/sfa-import-plc.py b/sfa/importer/sfa-import-plc.py index 723f473f..f3ac37ef 100755 --- a/sfa/importer/sfa-import-plc.py +++ b/sfa/importer/sfa-import-plc.py @@ -21,13 +21,17 @@ import sys from sfa.util.config import Config from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename -from sfa.storage.table import SfaTable -from sfa.storage.record import SfaRecord + from sfa.trust.gid import create_uuid from sfa.trust.certificate import convert_public_key, Keypair -from sfa.importer.sfaImport import sfaImport, _cleanup_string + from sfa.plc.plshell import PlShell +from sfa.storage.alchemy import dbsession +from sfa.storage.persistentobjs import RegRecord + +from sfa.importer.sfaImport import sfaImport, _cleanup_string + def process_options(): (options, args) = getopt.getopt(sys.argv[1:], '', []) @@ -62,6 +66,8 @@ def _get_site_hrn(interface_hrn, site): hrn = ".".join([interface_hrn, "internet2", site['login_base']]) return hrn +# xxx could use a call to persistentobjs.init_tables somewhere +# however now import s expected to be done after service creation.. def main(): process_options() @@ -82,11 +88,9 @@ def main(): existing_hrns = [] key_ids = [] person_keys = {} - table = SfaTable() - results = table.find() - for result in results: - existing_records[(result['hrn'], result['type'])] = result - existing_hrns.append(result['hrn']) + for record in dbsession.query(RegRecord): + existing_records[ (record.hrn, record.type,) ] = record + existing_hrns.append(record.hrn) # Get all plc sites sites = shell.GetSites({'peer_id': None}) @@ -144,10 +148,12 @@ def main(): if not sfaImporter.AuthHierarchy.auth_exists(urn): sfaImporter.AuthHierarchy.create_auth(urn) auth_info = sfaImporter.AuthHierarchy.get_auth_info(urn) - auth_record = SfaRecord(hrn=site_hrn, gid=auth_info.get_gid_object(), \ - type="authority", pointer=site['site_id'], + auth_record = RegRecord("authority", hrn=site_hrn, gid=auth_info.get_gid_object(), + pointer=site['site_id'], authority=get_authority(site_hrn)) - auth_record.sync(verbose=True) + logger.info("Import: Importing auth %s"%auth_record) + dbsession.add(auth_record) + dbsession.commit() # start importing for site in sites: @@ -159,16 +165,16 @@ def main(): if site_hrn not in existing_hrns or \ (site_hrn, 'authority') not in existing_records: try: - logger.info("Import: site %s " % site_hrn) urn = hrn_to_urn(site_hrn, 'authority') if not sfaImporter.AuthHierarchy.auth_exists(urn): sfaImporter.AuthHierarchy.create_auth(urn) auth_info = sfaImporter.AuthHierarchy.get_auth_info(urn) - auth_record = SfaRecord(hrn=site_hrn, gid=auth_info.get_gid_object(), \ - type="authority", pointer=site['site_id'], + auth_record = RegRecord("authority", hrn=site_hrn, gid=auth_info.get_gid_object(), + pointer=site['site_id'], authority=get_authority(site_hrn)) - logger.info("Import: importing site: %s" % auth_record.summary_string()) - auth_record.sync() + logger.info("Import: importing site: %s" % auth_record) + dbsession.add(auth_record) + dbsession.commit() except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. @@ -191,9 +197,12 @@ def main(): pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'node') node_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - node_record = SfaRecord(hrn=hrn, gid=node_gid, type="node", pointer=node['node_id'], authority=get_authority(hrn)) - logger.info("Import: importing node: %s" % node_record.summary_string()) - node_record.sync() + node_record = RegRecord("node", hrn=hrn, gid=node_gid, + pointer=node['node_id'], + authority=get_authority(hrn)) + logger.info("Import: importing node: %s" % node_record) + dbsession.add(node_record) + dbsession.commit() except: logger.log_exc("Import: failed to import node") @@ -212,10 +221,12 @@ def main(): pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'slice') slice_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - slice_record = SfaRecord(hrn=hrn, gid=slice_gid, type="slice", pointer=slice['slice_id'], + slice_record = RegRecord("slice", hrn=hrn, gid=slice_gid, + pointer=slice['slice_id'], authority=get_authority(hrn)) - logger.info("Import: importing slice: %s" % slice_record.summary_string()) - slice_record.sync() + logger.info("Import: importing slice: %s" % slice_record) + dbsession.add(slice_record) + dbsession.commit() except: logger.log_exc("Import: failed to import slice") @@ -228,8 +239,8 @@ def main(): if len(hrn) > 64: hrn = hrn[:64] - # if user's primary key has chnaged then we need to update the - # users gid by forcing a update here + # if user's primary key has changed then we need to update the + # users gid by forcing an update here old_keys = [] new_keys = [] if person_id in old_person_keys: @@ -257,10 +268,12 @@ def main(): pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'user') person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - person_record = SfaRecord(hrn=hrn, gid=person_gid, type="user", \ - pointer=person['person_id'], authority=get_authority(hrn)) - logger.info("Import: importing person: %s" % person_record.summary_string()) - person_record.sync() + person_record = RegRecord("user", hrn=hrn, gid=person_gid, + pointer=person['person_id'], + authority=get_authority(hrn)) + logger.info("Import: importing person: %s" % person_record) + dbsession.add (person_record) + dbsession.commit() except: logger.log_exc("Import: failed to import person.") @@ -271,7 +284,7 @@ def main(): continue record = existing_records[(record_hrn, type)] - if record['peer_authority']: + if record.peer_authority: continue # dont delete vini's internet2 placeholdder record @@ -285,7 +298,7 @@ def main(): if type == 'authority': for site in sites: site_hrn = interface_hrn + "." + site['login_base'] - if site_hrn == record_hrn and site['site_id'] == record['pointer']: + if site_hrn == record_hrn and site['site_id'] == record.pointer: found = True break @@ -299,7 +312,7 @@ def main(): alt_username = person['email'].split("@")[0].replace(".", "_").replace("+", "_") if username in [tmp_username, alt_username] and \ site['site_id'] in person['site_ids'] and \ - person['person_id'] == record['pointer']: + person['person_id'] == record.pointer: found = True break @@ -307,7 +320,7 @@ def main(): slicename = hrn_to_pl_slicename(record_hrn) for slice in slices: if slicename == slice['name'] and \ - slice['slice_id'] == record['pointer']: + slice['slice_id'] == record.pointer: found = True break @@ -320,7 +333,7 @@ def main(): tmp_nodename = node['hostname'] if tmp_nodename == nodename and \ node['site_id'] == site['site_id'] and \ - node['node_id'] == record['pointer']: + node['node_id'] == record.pointer: found = True break else: @@ -329,9 +342,9 @@ def main(): if not found: try: record_object = existing_records[(record_hrn, type)] - record = SfaRecord(dict=record_object) - logger.info("Import: deleting record: %s" % record.summary_string()) - record.delete() + logger.info("Import: deleting record: %s" % record) + del record_object + dbsession.commit() except: logger.log_exc("Import: failded to delete record") # save pub keys diff --git a/sfa/importer/sfa-nuke-plc.py b/sfa/importer/sfa-nuke-plc.py index bd116c88..cfc7d070 100755 --- a/sfa/importer/sfa-nuke-plc.py +++ b/sfa/importer/sfa-nuke-plc.py @@ -13,22 +13,29 @@ from optparse import OptionParser from sfa.util.sfalogging import logger -from sfa.storage.table import SfaTable +from sfa.storage.persistentobjs import init_tables,drop_tables def main(): - usage="%prog: trash the registry DB (the 'sfa' table in the 'planetlab5' database)" + usage="%prog: trash the registry DB" parser = OptionParser(usage=usage) - parser.add_option('-f','--file-system',dest='clean_fs',action='store_true',default=False, - help='Clean up the /var/lib/sfa/authorities area as well') - parser.add_option('-c','--certs',dest='clean_certs',action='store_true',default=False, - help='Remove all cached certs/gids found in /var/lib/sfa/authorities area as well') + parser.add_option("-f","--file-system",dest='clean_fs',action='store_true',default=False, + help="Clean up the /var/lib/sfa/authorities area as well") + parser.add_option("-c","--certs",dest='clean_certs',action='store_true',default=False, + help="Remove all cached certs/gids found in /var/lib/sfa/authorities area as well") + parser.add_option("-0","--no-reinit",dest='reinit',action='store_false',default=True, + help="Do not reinitialize the database schema") (options,args)=parser.parse_args() if args: parser.print_help() sys.exit(1) logger.info("Purging SFA records from database") - table = SfaTable() - table.nuke() + drop_tables() + # for convenience we re-create the schema here, so there's no need for an explicit + # service sfa restart + # however in some (upgrade) scenarios this might be wrong + if options.reinit: + logger.info("re-creating empty schema") + init_tables() if options.clean_certs: # remove the server certificate and all gids found in /var/lib/sfa/authorities diff --git a/sfa/importer/sfaImport.py b/sfa/importer/sfaImport.py index ca0bff2f..c95599cd 100644 --- a/sfa/importer/sfaImport.py +++ b/sfa/importer/sfaImport.py @@ -16,9 +16,8 @@ from sfa.trust.certificate import convert_public_key, Keypair from sfa.trust.trustedroots import TrustedRoots from sfa.trust.hierarchy import Hierarchy from sfa.trust.gid import create_uuid -from sfa.storage.table import SfaTable -from sfa.storage.record import SfaRecord - +from sfa.storage.persistentobjs import RegRecord +from sfa.storage.alchemy import dbsession def _un_unicode(str): if isinstance(str, unicode): @@ -49,7 +48,6 @@ class sfaImport: def __init__(self): self.logger = _SfaLogger(logfile='/var/log/sfa_import.log', loggername='importlog') self.AuthHierarchy = Hierarchy() -# self.table = SfaTable() self.config = Config() self.TrustedRoots = TrustedRoots(Config.get_trustedroots_dir(self.config)) self.root_auth = self.config.SFA_REGISTRY_ROOT_AUTH @@ -85,13 +83,15 @@ class sfaImport: if not parent_hrn == hrn: self.create_top_level_auth_records(parent_hrn) - # enxure key and cert exists: + # ensure key and cert exists: self.AuthHierarchy.create_top_level_auth(hrn) # create the db record if it doesnt already exist auth_info = self.AuthHierarchy.get_auth_info(hrn) - auth_record = SfaRecord(hrn=hrn, gid=auth_info.get_gid_object(), type="authority", pointer=-1, authority=get_authority(hrn)) - self.logger.info("Import: importing %s " % auth_record.summary_string()) - auth_record.sync() + auth_record = RegRecord("authority", hrn=hrn, gid=auth_info.get_gid_object(), + authority=get_authority(hrn)) + self.logger.info("Import: importing auth %s " % auth_record) + dbsession.add (auth_record) + dbsession.commit() def create_sm_client_record(self): """ @@ -104,10 +104,11 @@ class sfaImport: self.AuthHierarchy.create_auth(urn) auth_info = self.AuthHierarchy.get_auth_info(hrn) - record = SfaRecord(hrn=hrn, gid=auth_info.get_gid_object(), \ - type="user", pointer=-1, authority=get_authority(hrn)) - self.logger.info("Import: importing %s " % record.summary_string()) - record.sync() + user_record = RegRecord("user", hrn=hrn, gid=auth_info.get_gid_object(), \ + authority=get_authority(hrn)) + self.logger.info("Import: importing user %s " % user_record) + dbsession.add (user_record) + dbsession.commit() def create_interface_records(self): """ @@ -117,21 +118,19 @@ class sfaImport: # arent enabled hrn = self.config.SFA_INTERFACE_HRN interfaces = ['authority+sa', 'authority+am', 'authority+sm'] - table = SfaTable() auth_info = self.AuthHierarchy.get_auth_info(hrn) pkey = auth_info.get_pkey_object() for interface in interfaces: urn = hrn_to_urn(hrn, interface) gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) - interface_record = SfaRecord(hrn=hrn, type=interface, pointer=-1, - gid = gid, authority=get_authority(hrn)) - self.logger.info("Import: importing %s " % interface_record.summary_string()) - interface_record.sync() + interface_record = RegRecord(interface, hrn=hrn, gid = gid, + authority=get_authority(hrn)) + self.logger.info("Import: importing %s " % interface_record) + dbsession.add (interface_record) + dbsession.commit() def delete_record(self, hrn, type): # delete the record - table = SfaTable() - record_list = table.find({'type': type, 'hrn': hrn}) - for record in record_list: - self.logger.info("Import: removing record %s %s" % (type, hrn)) - table.remove(record) + for rec in dbsession.query(RegRecord).filter_by(type=type,hrn=hrn): + del rec + dbsession.commit() diff --git a/sfa/managers/registry_manager.py b/sfa/managers/registry_manager.py index b5be45ff..d957c578 100644 --- a/sfa/managers/registry_manager.py +++ b/sfa/managers/registry_manager.py @@ -19,8 +19,8 @@ from sfa.trust.credential import Credential from sfa.trust.certificate import Certificate, Keypair, convert_public_key from sfa.trust.gid import create_uuid -from sfa.storage.record import SfaRecord -from sfa.storage.table import SfaTable +from sfa.storage.persistentobjs import RegRecord +from sfa.storage.alchemy import dbsession class RegistryManager: @@ -49,19 +49,19 @@ class RegistryManager: auth_hrn = api.auth.get_authority(hrn) if not auth_hrn or hrn == api.config.SFA_INTERFACE_HRN: auth_hrn = hrn - # get record info auth_info = api.auth.get_auth_info(auth_hrn) - table = SfaTable() - records = table.findObjects({'type': type, 'hrn': hrn}) - if not records: + # get record info + record=dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).first() + if not record: raise RecordNotFound(hrn) - record = records[0] # verify_cancreate_credential requires that the member lists # (researchers, pis, etc) be filled in - self.driver.augment_records_with_testbed_info (record) - if not self.driver.is_enabled (record): - raise AccountNotEnabled(": PlanetLab account %s is not enabled. Please contact your site PI" %(record['email'])) + logger.debug("get credential before augment dict, keys=%s"%record.__dict__.keys()) + self.driver.augment_records_with_testbed_info (record.__dict__) + logger.debug("get credential after augment dict, keys=%s"%record.__dict__.keys()) + if not self.driver.is_enabled (record.__dict__): + raise AccountNotEnabled(": PlanetLab account %s is not enabled. Please contact your site PI" %(record.email)) # get the callers gid # if this is a self cred the record's gid is the caller's gid @@ -73,12 +73,12 @@ class RegistryManager: caller_hrn = caller_gid.get_hrn() object_hrn = record.get_gid_object().get_hrn() - rights = api.auth.determine_user_rights(caller_hrn, record) + rights = api.auth.determine_user_rights(caller_hrn, record.__dict__) # make sure caller has rights to this object if rights.is_empty(): - raise PermissionError(caller_hrn + " has no rights to " + record['name']) + raise PermissionError(caller_hrn + " has no rights to " + record.hrn) - object_gid = GID(string=record['gid']) + object_gid = GID(string=record.gid) new_cred = Credential(subject = object_gid.get_subject()) new_cred.set_gid_caller(caller_gid) new_cred.set_gid_object(object_gid) @@ -86,8 +86,8 @@ class RegistryManager: #new_cred.set_pubkey(object_gid.get_pubkey()) new_cred.set_privileges(rights) new_cred.get_privileges().delegate_all_privileges(True) - if 'expires' in record: - date = utcparse(record['expires']) + if hasattr(record,'expires'): + date = utcparse(record.expires) expires = datetime_to_epoch(date) new_cred.set_expiration(int(expires)) auth_kind = "authority,ma,sa" @@ -99,18 +99,19 @@ class RegistryManager: return new_cred.save_to_string(save_parents=True) - def Resolve(self, api, xrns, type=None, full=True): + def Resolve(self, api, xrns, intype=None, full=True): if not isinstance(xrns, types.ListType): xrns = [xrns] # try to infer type if not set and we get a single input - if not type: - type = Xrn(xrns).get_type() + if not intype: + intype = Xrn(xrns).get_type() hrns = [urn_to_hrn(xrn)[0] for xrn in xrns] + # load all known registry names into a prefix tree and attempt to find # the longest matching prefix - # create a dict where key is a registry hrn and its value is a - # hrns at that registry (determined by the known prefix tree). + # create a dict where key is a registry hrn and its value is a list + # of hrns at that registry (determined by the known prefix tree). xrn_dict = {} registries = api.registries tree = prefixTree() @@ -136,47 +137,49 @@ class RegistryManager: credential = api.getCredential() interface = api.registries[registry_hrn] server_proxy = api.server_proxy(interface, credential) - peer_records = server_proxy.Resolve(xrns, credential) - records.extend([SfaRecord(dict=record).as_dict() for record in peer_records]) + peer_records = server_proxy.Resolve(xrns, credential,intype) + # pass foreign records as-is + records.extend(peer_records) # try resolving the remaining unfound records at the local registry local_hrns = list ( set(hrns).difference([record['hrn'] for record in records]) ) # - table = SfaTable() - local_records = table.findObjects({'hrn': local_hrns}) + local_records = dbsession.query(RegRecord).filter(RegRecord.hrn.in_(local_hrns)) + if intype: + local_records = local_records.filter_by(type=intype) + local_records=local_records.all() + local_dicts = [ record.__dict__ for record in local_records ] if full: # in full mode we get as much info as we can, which involves contacting the # testbed for getting implementation details about the record - self.driver.augment_records_with_testbed_info(local_records) - # also we fill the 'url' field for known authorities - # used to be in the driver code, sounds like a poorman thing though + for record in local_dicts: logger.info("resolve augment %s"%record) + self.driver.augment_records_with_testbed_info(local_dicts) +# # also we fill the 'url' field for known authorities +# # used to be in the driver code, sounds like a poorman thing though def solve_neighbour_url (record): - if not record['type'].startswith('authority'): return - hrn=record['hrn'] + if not record.type.startswith('authority'): return + hrn=record.hrn for neighbour_dict in [ api.aggregates, api.registries ]: if hrn in neighbour_dict: - record['url']=neighbour_dict[hrn].get_url() + record.url=neighbour_dict[hrn].get_url() return - [ solve_neighbour_url (record) for record in local_records ] - - + for record in local_records: solve_neighbour_url (record) - # convert local record objects to dicts - records.extend([dict(record) for record in local_records]) - if type: - records = filter(lambda rec: rec['type'] in [type], records) - + # convert local record objects to dicts for xmlrpc + # xxx somehow here calling dict(record) issues a weird error + # however record.todict() seems to work fine +# records.extend( [ dict(record) for record in local_records ] ) + records.extend( [ record.todict() for record in local_records ] ) if not records: raise RecordNotFound(str(hrns)) return records - def List(self, api, xrn, origin_hrn=None): + def List (self, api, xrn, origin_hrn=None): hrn, type = urn_to_hrn(xrn) # load all know registry names into a prefix tree and attempt to find # the longest matching prefix - records = [] registries = api.registries registry_hrns = registries.keys() tree = prefixTree() @@ -188,23 +191,23 @@ class RegistryManager: raise MissingAuthority(xrn) # if the best match (longest matching hrn) is not the local registry, # forward the request - records = [] + record_dicts = [] if registry_hrn != api.hrn: credential = api.getCredential() interface = api.registries[registry_hrn] server_proxy = api.server_proxy(interface, credential) record_list = server_proxy.List(xrn, credential) - records = [SfaRecord(dict=record).as_dict() for record in record_list] + # pass foreign records as-is + record_dicts = record_list # if we still have not found the record yet, try the local registry - if not records: + if not record_dicts: if not api.auth.hierarchy.auth_exists(hrn): raise MissingAuthority(hrn) + records = dbsession.query(RegRecord).filter_by(authority=hrn) + record_dicts=[ record.__dict__ for record in records ] - table = SfaTable() - records = table.find({'authority': hrn}) - - return records + return record_dicts def CreateGid(self, api, xrn, cert): @@ -227,57 +230,57 @@ class RegistryManager: # subject_record describes the subject of the relationships # ref_record contains the target values for the various relationships we need to manage # (to begin with, this is just the slice x person relationship) - def update_relations (self, subject_record, ref_record): - type=subject_record['type'] + def update_relations (self, subject_obj, ref_obj): + type=subject_obj.type if type=='slice': - self.update_relation(subject_record, 'researcher', ref_record.get('researcher'), 'user') + self.update_relation(subject_obj, 'researcher', ref_obj.researcher, 'user') # field_key is the name of one field in the record, typically 'researcher' for a 'slice' record # hrns is the list of hrns that should be linked to the subject from now on # target_type would be e.g. 'user' in the 'slice' x 'researcher' example - def update_relation (self, sfa_record, field_key, hrns, target_type): + def update_relation (self, record_obj, field_key, hrns, target_type): # locate the linked objects in our db - subject_type=sfa_record['type'] - subject_id=sfa_record['pointer'] - table = SfaTable() - link_sfa_records = table.find ({'type':target_type, 'hrn': hrns}) - link_ids = [ rec.get('pointer') for rec in link_sfa_records ] + subject_type=record_obj.type + subject_id=record_obj.pointer + # get the 'pointer' field of all matching records + link_id_tuples = dbsession.query(RegRecord.pointer).filter_by(type=target_type).filter(RegRecord.hrn.in_(hrns)).all() + # sqlalchemy returns named tuples for columns + link_ids = [ tuple.pointer for tuple in link_id_tuples ] self.driver.update_relation (subject_type, target_type, subject_id, link_ids) - - def Register(self, api, record): + def Register(self, api, record_dict): - hrn, type = record['hrn'], record['type'] + hrn, type = record_dict['hrn'], record_dict['type'] urn = hrn_to_urn(hrn,type) # validate the type if type not in ['authority', 'slice', 'node', 'user']: raise UnknownSfaType(type) - # check if record already exists - table = SfaTable() - existing_records = table.find({'type': type, 'hrn': hrn}) + # check if record_dict already exists + existing_records = dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).all() if existing_records: raise ExistingRecord(hrn) - record = SfaRecord(dict = record) - record['authority'] = get_authority(record['hrn']) - auth_info = api.auth.get_auth_info(record['authority']) + assert ('type' in record_dict) + record = RegRecord("undefined") + record.set_from_dict(record_dict) + record.authority = get_authority(record.hrn) + auth_info = api.auth.get_auth_info(record.authority) pub_key = None # make sure record has a gid - if 'gid' not in record: + if not record.gid: uuid = create_uuid() pkey = Keypair(create=True) - if 'keys' in record and record['keys']: - pub_key=record['keys'] + if getattr(record,'keys',None): + pub_key=record.keys # use only first key in record - if isinstance(record['keys'], types.ListType): - pub_key = record['keys'][0] + if isinstance(record.keys, types.ListType): + pub_key = record.keys[0] pkey = convert_public_key(pub_key) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) - record['gid'] = gid - record.set_gid(gid) + record.gid = gid if type in ["authority"]: # update the tree @@ -286,14 +289,13 @@ class RegistryManager: # get the GID from the newly created authority gid = auth_info.get_gid_object() - record.set_gid(gid.save_to_string(save_parents=True)) + record.gid=gid.save_to_string(save_parents=True) # update testbed-specific data if needed - pointer = self.driver.register (record, hrn, pub_key) + pointer = self.driver.register (record.__dict__, hrn, pub_key) - record.set_pointer(pointer) - record_id = table.insert(record) - record['record_id'] = record_id + record.pointer=pointer + dbsession.add(record) # update membership for researchers, pis, owners, operators self.update_relations (record, record) diff --git a/sfa/plc/pldriver.py b/sfa/plc/pldriver.py index 3f01e7f3..9a7db2dd 100644 --- a/sfa/plc/pldriver.py +++ b/sfa/plc/pldriver.py @@ -11,7 +11,8 @@ from sfa.util.xrn import hrn_to_urn, get_leaf, urn_to_sliver_id from sfa.util.cache import Cache # one would think the driver should not need to mess with the SFA db, but.. -from sfa.storage.table import SfaTable +from sfa.storage.alchemy import dbsession +from sfa.storage.persistentobjs import RegRecord # used to be used in get_ticket #from sfa.trust.sfaticket import SfaTicket @@ -206,7 +207,7 @@ class PlDriver (Driver): ## - # Convert SFA fields to PLC fields for use when registering up updating + # Convert SFA fields to PLC fields for use when registering or updating # registry record in the PLC database # @@ -446,16 +447,15 @@ class PlDriver (Driver): # we'll replace pl ids (person_ids) with hrns from the sfa records # we obtain - # get the sfa records - table = SfaTable() + # get the registry records person_list, persons = [], {} - person_list = table.find({'type': 'user', 'pointer': person_ids}) + person_list = dbsession.query (RegRecord).filter(RegRecord.pointer.in_(person_ids)) # create a hrns keyed on the sfa record's pointer. # Its possible for multiple records to have the same pointer so # the dict's value will be a list of hrns. persons = defaultdict(list) for person in person_list: - persons[person['pointer']].append(person) + persons[person.pointer].append(person) # get the pl records pl_person_list, pl_persons = [], {} @@ -475,7 +475,7 @@ class PlDriver (Driver): record['PI'] = [] record['researcher'] = [] for person_id in record.get('person_ids', []): - hrns = [person['hrn'] for person in persons[person_id]] + hrns = [person.hrn for person in persons[person_id]] record['researcher'].extend(hrns) # pis at the slice's site @@ -483,7 +483,7 @@ class PlDriver (Driver): pl_pis = site_pis[record['site_id']] pi_ids = [pi['person_id'] for pi in pl_pis] for person_id in pi_ids: - hrns = [person['hrn'] for person in persons[person_id]] + hrns = [person.hrn for person in persons[person_id]] record['PI'].extend(hrns) record['geni_creator'] = record['PI'] @@ -497,7 +497,7 @@ class PlDriver (Driver): if pointer not in persons or pointer not in pl_persons: # this means there is not sfa or pl record for this user continue - hrns = [person['hrn'] for person in persons[pointer]] + hrns = [person.hrn for person in persons[pointer]] roles = pl_persons[pointer]['roles'] if 'pi' in roles: record['PI'].extend(hrns) diff --git a/sfa/server/registry.py b/sfa/server/registry.py index bdad7df2..72d4abb3 100644 --- a/sfa/server/registry.py +++ b/sfa/server/registry.py @@ -20,6 +20,9 @@ class Registry(SfaServer): def __init__(self, ip, port, key_file, cert_file): SfaServer.__init__(self, ip, port, key_file, cert_file,'registry') + # initialize db schema + from sfa.storage.persistentobjs import init_tables + init_tables() # # Registries is a dictionary of registry connections keyed on the registry hrn diff --git a/sfa/storage/alchemy.py b/sfa/storage/alchemy.py index dff6151f..9e00e1bb 100644 --- a/sfa/storage/alchemy.py +++ b/sfa/storage/alchemy.py @@ -1,25 +1,21 @@ from sqlalchemy import create_engine - from sqlalchemy.orm import sessionmaker -Session=sessionmaker () -session=Session(bind=engine) -#session.configure(bind=engine) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String from sqlalchemy.orm import relationship, backref from sqlalchemy import ForeignKey -from sfa.util.sfalogger import logger +from sfa.util.sfalogging import logger Base=declarative_base() -class DB: +class Alchemy: def __init__ (self, config): dbname="sfa" # will be created lazily on-demand - self.session = None + self._session = None # the former PostgreSQL.py used the psycopg2 directly and was doing #self.connection.set_client_encoding("UNICODE") # it's unclear how to achieve this in sqlalchemy, nor if it's needed at all @@ -32,7 +28,7 @@ class DB: # the TCP fallback method tcp_desc = "postgresql+psycopg2://%s:%s@%s:%s/%s"%\ (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_HOST,config.SFA_DB_PORT,dbname) - for desc in [ unix_desc, tcp_desc ] : + for engine_desc in [ unix_desc, tcp_desc ] : try: self.engine = create_engine (engine_desc) self.check() @@ -51,6 +47,7 @@ class DB: self.engine.execute ("select 1").scalar() # create schema + # warning: need to have all Base subclass loaded for this to work def create_schema (self): return Base.metadata.create_all(self.engine) @@ -85,3 +82,55 @@ class DB: def remove (self, record): del record self.commit() + +#################### +# dicts vs objects +#################### +# historically the front end to the db dealt with dicts, so the code was only dealing with dicts +# sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id'] +# which is admittedly much nicer +# however we still need to deal with dictionaries if only for the xmlrpc layer +# +# here are a few utilities for this +# +# (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick +# is to use obj.__dict__ +# this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field +# however this depends on sqlalchemy's implementation so it should be avoided +# +# (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict +# remember though that writing the resulting dictionary won't change the object +# essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_' +# 2 ways are provided for that: +# . dict(obj) +# . obj.todict() +# the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names +# although it seems to work fine, I've found cases where it issues a weird python error that I could not get right +# so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved +# +# (*) finally for converting a dictionary into an sqlalchemy object, we provide +# obj.set_from_dict(dict) + +from sqlalchemy.orm import object_mapper +class AlchemyObj: + def __iter__(self): + self._i = iter(object_mapper(self).columns) + return self + def next(self): + n = self._i.next().name + return n, getattr(self, n) + def todict (self): + d=self.__dict__ + keys=[k for k in d.keys() if not k.startswith('_')] + return dict ( [ (k,d[k]) for k in keys ] ) + def set_from_dict (self, d): + for (k,v) in d.iteritems(): + setattr(self,k,v) + +#################### +from sfa.util.config import Config + +alchemy=Alchemy (Config()) +engine=alchemy.engine +dbsession=alchemy.session() + diff --git a/sfa/storage/persistentobjs.py b/sfa/storage/persistentobjs.py new file mode 100644 index 00000000..be0d49f6 --- /dev/null +++ b/sfa/storage/persistentobjs.py @@ -0,0 +1,106 @@ +from types import StringTypes + +from sqlalchemy import create_engine +from sqlalchemy import Column, Integer, String +from sqlalchemy import Table, Column, MetaData, join, ForeignKey +from sqlalchemy.orm import relationship, backref +from sqlalchemy.orm import column_property +from sqlalchemy.ext.declarative import declarative_base + +from sfa.util.sfalogging import logger + +from sfa.trust.gid import GID + +from sfa.storage.alchemy import Base, alchemy, dbsession, engine, AlchemyObj + +############################## +class Type (Base): + __table__ = Table ('types', Base.metadata, + Column ('type',String, primary_key=True) + ) + def __init__ (self, type): self.type=type + def __repr__ (self): return ""%self.type + +#BUILTIN_TYPES = [ 'authority', 'slice', 'node', 'user' ] +# xxx for compat but sounds useless +BUILTIN_TYPES = [ 'authority', 'slice', 'node', 'user', + 'authority+sa', 'authority+am', 'authority+sm' ] + +def insert_builtin_types(engine,dbsession): + Base.metadata.create_all(engine) + for type in BUILTIN_TYPES : + count = dbsession.query (Type).filter_by (type=type).count() + if count==0: + dbsession.add (Type (type)) + dbsession.commit() + +############################## +class RegRecord (Base,AlchemyObj): + # xxx tmp would be 'records' + __table__ = Table ('records', Base.metadata, + Column ('record_id', Integer, primary_key=True), + Column ('type', String, ForeignKey ("types.type")), + Column ('hrn',String), + Column ('gid',String), + Column ('authority',String), + Column ('peer_authority',String), + Column ('pointer',Integer,default=-1), + Column ('date_created',String), + Column ('last_updated',String), + ) + def __init__ (self, type, hrn=None, gid=None, authority=None, peer_authority=None, pointer=-1): + self.type=type + if hrn: self.hrn=hrn + if gid: + if isinstance(gid, StringTypes): self.gid=gid + else: self.gid=gid.save_to_string(save_parents=True) + if authority: self.authority=authority + if peer_authority: self.peer_authority=peer_authority + self.pointer=pointer + + def __repr__(self): + result="[Record(record_id=%s, hrn=%s, type=%s, authority=%s, pointer=%s" % \ + (self.record_id, self.hrn, self.type, self.authority, self.pointer) + if self.gid: result+=" %s..."%self.gid[:10] + else: result+=" no-gid" + result += "]" + return result + + def get_gid_object (self): + if not self.gid: return None + else: return GID(string=self.gid) + +############################## +class User (Base): + __table__ = Table ('users', Base.metadata, + Column ('user_id', Integer, primary_key=True), + Column ('record_id',Integer, ForeignKey('records.record_id')), + Column ('email', String), + ) + def __init__ (self, email): + self.email=email + def __repr__ (self): return ""%(self.user_id,self.email,self.record_id,) + +record_table = RegRecord.__table__ +user_table = User.__table__ +record_user_join = join (record_table, user_table) + +class UserRecord (Base): + __table__ = record_user_join + record_id = column_property (record_table.c.record_id, user_table.c.record_id) + user_id = user_table.c.user_id + def __init__ (self, gid, email): + self.type='user' + self.gid=gid + self.email=email + def __repr__ (self): return ""%(self.email,self.gid) + +############################## +def init_tables(): + logger.info("Initializing db schema and builtin types") + Base.metadata.create_all(engine) + insert_builtin_types(engine,dbsession) + +def drop_tables(): + logger.info("Dropping tables") + Base.metadata.drop_all(engine) diff --git a/sfa/storage/record.py b/sfa/storage/record.py deleted file mode 100644 index f9f807e9..00000000 --- a/sfa/storage/record.py +++ /dev/null @@ -1,463 +0,0 @@ -## -# Implements support for SFA records -# -# TODO: Use existing PLC database methods? or keep this separate? -## - -from types import StringTypes -from sfa.trust.gid import GID -from sfa.storage.parameter import Parameter -from sfa.util.xrn import get_authority -from sfa.storage.row import Row -from sfa.util.xml import XML -from sfa.util.sfalogging import logger - -class SfaRecord(Row): - """ - The SfaRecord class implements an SFA Record. A SfaRecord is a tuple - (Hrn, GID, Type, Info). - - Hrn specifies the Human Readable Name of the object - GID is the GID of the object - Type is user | authority | slice | component - - Info is comprised of the following sub-fields - pointer = a pointer to the record in the PL database - - The pointer is interpreted depending on the type of the record. For example, - if the type=="user", then pointer is assumed to be a person_id that indexes - into the persons table. - - A given HRN may have more than one record, provided that the records are - of different types. - """ - -# table_name = 'sfa' -# primary_key = 'record_id' - - ### the wsdl generator assumes this is named 'fields' - internal_fields = { - 'record_id': Parameter(int, "An id that uniquely identifies this record", ro=True), - 'pointer': Parameter(int, "An id that uniquely identifies this record in an external database") - } - - fields = { - 'authority': Parameter(str, "The authority for this record"), - 'peer_authority': Parameter(str, "The peer authority for this record"), - 'hrn': Parameter(str, "Human readable name of object"), - 'gid': Parameter(str, "GID of the object"), - 'type': Parameter(str, "Record type"), - 'last_updated': Parameter(int, "Date and time of last update", ro=True), - 'date_created': Parameter(int, "Date and time this record was created", ro=True), - } - all_fields = dict(fields.items() + internal_fields.items()) - ## - # Create an SFA Record - # - # @param name if !=None, assign the name of the record - # @param gid if !=None, assign the gid of the record - # @param type one of user | authority | slice | component - # @param pointer is a pointer to a PLC record - # @param dict if !=None, then fill in this record from the dictionary - - def __init__(self, hrn=None, gid=None, type=None, pointer=None, authority=None, - peer_authority=None, dict=None, string=None): - self.dirty = True - self.hrn = None - self.gid = None - self.type = None - self.pointer = None - self.set_peer_auth(peer_authority) - self.set_authority(authority) - if hrn: - self.set_name(hrn) - if gid: - self.set_gid(gid) - if type: - self.set_type(type) - if pointer: - self.set_pointer(pointer) - if dict: - self.load_from_dict(dict) - if string: - self.load_from_string(string) - - - def validate_last_updated(self, last_updated): - return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) - - def update(self, new_dict): - if isinstance(new_dict, list): - new_dict = new_dict[0] - - # Convert any boolean strings to real bools - for key in new_dict: - if isinstance(new_dict[key], StringTypes): - if new_dict[key].lower() in ["true"]: - new_dict[key] = True - elif new_dict[key].lower() in ["false"]: - new_dict[key] = False - dict.update(self, new_dict) - - ## - # Set the name of the record - # - # @param hrn is a string containing the HRN - - def set_name(self, hrn): - """ - Set the name of the record - """ - self.hrn = hrn - self['hrn'] = hrn - self.dirty = True - - def set_authority(self, authority): - """ - Set the authority - """ - if not authority: - authority = "" - self.authority = authority - self['authority'] = authority - self.dirty = True - - - ## - # Set the GID of the record - # - # @param gid is a GID object or the string representation of a GID object - - def set_gid(self, gid): - """ - Set the GID of the record - """ - - if isinstance(gid, StringTypes): - self.gid = gid - self['gid'] = gid - else: - self.gid = gid.save_to_string(save_parents=True) - self['gid'] = gid.save_to_string(save_parents=True) - self.dirty = True - - ## - # Set the type of the record - # - # @param type is a string: user | authority | slice | component - - def set_type(self, type): - """ - Set the type of the record - """ - self.type = type - self['type'] = type - self.dirty = True - - ## - # Set the pointer of the record - # - # @param pointer is an integer containing the ID of a PLC record - - def set_pointer(self, pointer): - """ - Set the pointer of the record - """ - self.pointer = pointer - self['pointer'] = pointer - self.dirty = True - - - def set_peer_auth(self, peer_authority): - self.peer_authority = peer_authority - self['peer_authority'] = peer_authority - self.dirty = True - - ## - # Return the name (HRN) of the record - - def get_name(self): - """ - Return the name (HRN) of the record - """ - return self.hrn - - ## - # Return the type of the record - - def get_type(self): - """ - Return the type of the record - """ - return self.type - - ## - # Return the pointer of the record. The pointer is an integer that may be - # used to look up the record in the PLC database. The evaluation of pointer - # depends on the type of the record - - def get_pointer(self): - """ - Return the pointer of the record. The pointer is an integer that may be - used to look up the record in the PLC database. The evaluation of pointer - depends on the type of the record - """ - return self.pointer - - ## - # Return the GID of the record, in the form of a GID object - # TODO: not the best name for the function, because we have things called - # gidObjects in the Cred - - def get_gid_object(self): - """ - Return the GID of the record, in the form of a GID object - """ - return GID(string=self.gid) - - ## - # Returns the value of a field - - def get_field(self, fieldname, default=None): - # sometimes records act like classes, and sometimes they act like dicts - try: - return getattr(self, fieldname) - except AttributeError: - try: - return self[fieldname] - except KeyError: - if default != None: - return default - else: - raise - - ## - # Returns a list of field names in this record. - - def get_field_names(self): - """ - Returns a list of field names in this record. - """ - return self.fields.keys() - - ## - # Given a field name ("hrn", "gid", ...) return the value of that field. - # - # @param fieldname is the name of field to be returned - - def get_field_value_string(self, fieldname): - """ - Given a field name ("hrn", "gid", ...) return the value of that field. - """ - if fieldname == "authority": - val = get_authority(self['hrn']) - else: - try: - val = getattr(self, fieldname) - except: - val = self[fieldname] - if isinstance(val, str): - return "'" + str(val) + "'" - else: - return str(val) - - ## - # Given a list of field names, return a list of values for those public. - # - # @param fieldnames is a list of field names - - def get_field_value_strings(self, fieldnames): - """ - Given a list of field names, return a list of values for those public. - """ - return [ self.get_field_value_string (fieldname) for fieldname in fieldnames ] - - ## - # Return the record in the form of a dictionary - - def as_dict(self): - """ - Return the record in the form of a dictionary - """ - return dict(self) - - ## - # Load the record from a dictionary - # - # @param dict dictionary to load record public from - - def load_from_dict(self, dict): - """ - Load the record from a dictionary - """ - - self.set_name(dict['hrn']) - gidstr = dict.get("gid", None) - if gidstr: - self.set_gid(dict['gid']) - - if "pointer" in dict: - self.set_pointer(dict['pointer']) - - self.set_type(dict['type']) - self.update(dict) - - ## - # Save the record to a string. The string contains an XML representation of - # the record. - - def save_to_string(self): - """ - Save the record to a string. The string contains an XML representation of - the record. - """ - recorddict = self.as_dict() - filteredDict = dict([(key, val) for (key, val) in recorddict.iteritems() if key in self.fields.keys()]) - xml_record = XML('') - xml_record.parse_dict(filteredDict) - str = xml_record.toxml() - return str - - ## - # Load the record from a string. The string is assumed to contain an XML - # representation of the record. - - def load_from_string(self, str): - """ - Load the record from a string. The string is assumed to contain an XML - representation of the record. - """ - #dict = xmlrpclib.loads(str)[0][0] - - xml_record = XML(str) - self.load_from_dict(xml_record.todict()) - - ## - # Dump the record to stdout - # - # @param dump_parents if true, then the parents of the GID will be dumped - - def dump(self, dump_parents=False): - """ - Walk tree and dump records. - """ - #print "RECORD", self.name - #print " hrn:", self.name - #print " type:", self.type - #print " gid:" - #if (not self.gid): - # print " None" - #else: - # self.get_gid_object().dump(8, dump_parents) - #print " pointer:", self.pointer - - order = SfaRecord.fields.keys() - for key in self.keys(): - if key not in order: - order.append(key) - for key in order: - if key in self and key in self.fields: - if key in 'gid' and self[key]: - gid = GID(string=self[key]) - print " %s:" % key - gid.dump(8, dump_parents) - else: - print " %s: %s" % (key, self[key]) - - def summary_string(self): - return "Record(record_id=%s, hrn=%s, type=%s, authority=%s, pointer=%s)" % \ - (self.get('record_id'), self.get('hrn'), self.get('type'), self.get('authority'), \ - self.get('pointer')) - - def getdict(self): - return dict(self) - - def sync(self): - """ - Sync this record with the database. - """ - from sfa.storage.table import SfaTable - table = SfaTable() - filter = {} - if self.get('record_id'): - filter['record_id'] = self.get('record_id') - if self.get('hrn') and self.get('type'): - filter['hrn'] = self.get('hrn') - filter['type'] = self.get('type') - if self.get('pointer'): - filter['pointer'] = self.get('pointer') - existing_records = table.find(filter) - if not existing_records: - table.insert(self) - else: - existing_record = existing_records[0] - self['record_id'] = existing_record['record_id'] - table.update(self) - - def delete(self): - """ - Remove record from the database. - """ - from sfa.storage.table import SfaTable - table = SfaTable() - filter = {} - if self.get('record_id'): - filter['record_id'] = self.get('record_id') - if self.get('hrn') and self.get('type'): - filter['hrn'] = self.get('hrn') - filter['type'] = self.get('type') - if self.get('pointer'): - filter['pointer'] = self.get('pointer') - if filter: - existing_records = table.find(filter) - for record in existing_records: - table.remove(record) - -class UserRecord(SfaRecord): - - fields = { - 'email': Parameter(str, 'email'), - 'first_name': Parameter(str, 'First name'), - 'last_name': Parameter(str, 'Last name'), - 'phone': Parameter(str, 'Phone Number'), - 'keys': Parameter(str, 'Public key'), - 'slices': Parameter([str], 'List of slices this user belongs to'), - } - fields.update(SfaRecord.fields) - -class SliceRecord(SfaRecord): - fields = { - 'name': Parameter(str, 'Slice name'), - 'url': Parameter(str, 'Slice url'), - 'expires': Parameter(int, 'Date and time this slice exipres'), - 'researcher': Parameter([str], 'List of users for this slice'), - 'PI': Parameter([str], 'List of PIs responsible for this slice'), - 'description': Parameter([str], 'Description of this slice'), - } - fields.update(SfaRecord.fields) - - -class NodeRecord(SfaRecord): - fields = { - 'hostname': Parameter(str, 'This nodes dns name'), - 'node_type': Parameter(str, 'Type of node this is'), - 'latitude': Parameter(str, 'latitude'), - 'longitude': Parameter(str, 'longitude'), - } - fields.update(SfaRecord.fields) - - -class AuthorityRecord(SfaRecord): - fields = { - 'name': Parameter(str, 'Name'), - 'login_base': Parameter(str, 'login base'), - 'enabled': Parameter(bool, 'Is this site enabled'), - 'url': Parameter(str, 'URL'), - 'nodes': Parameter([str], 'List of nodes at this site'), - 'operator': Parameter([str], 'List of operators'), - 'researcher': Parameter([str], 'List of researchers'), - 'PI': Parameter([str], 'List of Principal Investigators'), - } - fields.update(SfaRecord.fields) - - diff --git a/sfa/storage/table.py b/sfa/storage/table.py deleted file mode 100644 index 678d5dc5..00000000 --- a/sfa/storage/table.py +++ /dev/null @@ -1,151 +0,0 @@ -# -# implements support for SFA records stored in db tables -# -# TODO: Use existing PLC database methods? or keep this separate? - -from types import StringTypes - -from sfa.util.config import Config - -from sfa.storage.alchemy import DB -#from sfa.storage.PostgreSQL import PostgreSQL -from sfa.storage.parameter import Parameter -from sfa.storage.filter import Filter -from sfa.storage.record import SfaRecord, AuthorityRecord, NodeRecord, SliceRecord, UserRecord - -class SfaTable(list): - - SFA_TABLE_PREFIX = "records" - - def __init__(self, record_filter = None): - - self.tablename = SfaTable.SFA_TABLE_PREFIX - self.config = Config() - self.db = PostgreSQL(self.config) - - if record_filter: - records = self.find(record_filter) - for record in records: - self.append(record) - - def db_fields(self, obj=None): - - db_fields = self.db.fields(self.SFA_TABLE_PREFIX) - return dict( [ (key,value) for (key, value) in obj.iteritems() \ - if key in db_fields and - self.is_writable(key, value, SfaRecord.fields)] ) - - @staticmethod - def is_writable (key,value,dict): - # if not mentioned, assume it's writable (e.g. deleted ...) - if key not in dict: return True - # if mentioned but not linked to a Parameter object, idem - if not isinstance(dict[key], Parameter): return True - # if not marked ro, it's writable - if not dict[key].ro: return True - - return False - - - def clear (self): - self.db.do("DELETE from %s"%self.tablename) - self.db.commit() - - # what sfa-nuke does - def nuke (self): - self.clear() - - def remove(self, record): - params = {'record_id': record['record_id']} - template = "DELETE FROM %s " % self.tablename - sql = template + "WHERE record_id = %(record_id)s" - self.db.do(sql, params) - - # if this is a site, remove all records where 'authority' == the - # site's hrn - if record['type'] == 'authority': - params = {'authority': record['hrn']} - sql = template + "WHERE authority = %(authority)s" - self.db.do(sql, params) - self.db.commit() - - def insert(self, record): - db_fields = self.db_fields(record) - keys = db_fields.keys() - values = [self.db.param(key, value) for (key, value) in db_fields.iteritems()] - query_str = "INSERT INTO " + self.tablename + \ - "(" + ",".join(keys) + ") " + \ - "VALUES(" + ",".join(values) + ")" - self.db.do(query_str, db_fields) - self.db.commit() - result = self.find({'hrn': record['hrn'], 'type': record['type'], 'peer_authority': record['peer_authority']}) - if not result: - record_id = None - elif isinstance(result, list): - record_id = result[0]['record_id'] - else: - record_id = result['record_id'] - - return record_id - - def update(self, record): - db_fields = self.db_fields(record) - keys = db_fields.keys() - values = [self.db.param(key, value) for (key, value) in db_fields.iteritems()] - columns = ["%s = %s" % (key, value) for (key, value) in zip(keys, values)] - query_str = "UPDATE %s SET %s WHERE record_id = %s" % \ - (self.tablename, ", ".join(columns), record['record_id']) - self.db.do(query_str, db_fields) - self.db.commit() - - def quote_string(self, value): - return str(self.db.quote(value)) - - def quote(self, value): - return self.db.quote(value) - - def find(self, record_filter = None, columns=None): - if not columns: - columns = "*" - else: - columns = ",".join(columns) - sql = "SELECT %s FROM %s WHERE True " % (columns, self.tablename) - - if isinstance(record_filter, (list, tuple, set)): - ints = filter(lambda x: isinstance(x, (int, long)), record_filter) - strs = filter(lambda x: isinstance(x, StringTypes), record_filter) - record_filter = Filter(SfaRecord.all_fields, {'record_id': ints, 'hrn': strs}) - sql += "AND (%s) %s " % record_filter.sql("OR") - elif isinstance(record_filter, dict): - record_filter = Filter(SfaRecord.all_fields, record_filter) - sql += " AND (%s) %s" % record_filter.sql("AND") - elif isinstance(record_filter, StringTypes): - record_filter = Filter(SfaRecord.all_fields, {'hrn':[record_filter]}) - sql += " AND (%s) %s" % record_filter.sql("AND") - elif isinstance(record_filter, int): - record_filter = Filter(SfaRecord.all_fields, {'record_id':[record_filter]}) - sql += " AND (%s) %s" % record_filter.sql("AND") - - results = self.db.selectall(sql) - if isinstance(results, dict): - results = [results] - return results - - def findObjects(self, record_filter = None, columns=None): - - results = self.find(record_filter, columns) - result_rec_list = [] - for result in results: - if result['type'] in ['authority']: - result_rec_list.append(AuthorityRecord(dict=result)) - elif result['type'] in ['node']: - result_rec_list.append(NodeRecord(dict=result)) - elif result['type'] in ['slice']: - result_rec_list.append(SliceRecord(dict=result)) - elif result['type'] in ['user']: - result_rec_list.append(UserRecord(dict=result)) - else: - result_rec_list.append(SfaRecord(dict=result)) - return result_rec_list - - diff --git a/sfa/util/xrn.py b/sfa/util/xrn.py index 1f506289..fb9e864f 100644 --- a/sfa/util/xrn.py +++ b/sfa/util/xrn.py @@ -130,6 +130,13 @@ class Xrn: # if not type: # debug_logger.debug("type-less Xrn's are not safe") + def __repr__ (self): + result="