X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=sfa%2Fimporter%2Fplimporter.py;h=c3204824f79d9799107473fb2d90b869817e0365;hb=f743a7aa83001dd4bcbbf48e49c4d4e0fb38f883;hp=94854ae313d734baa068154cb441c58620dcfd1c;hpb=afca89d90825fcfe3da53c417324a375f145d123;p=sfa.git diff --git a/sfa/importer/plimporter.py b/sfa/importer/plimporter.py index 94854ae3..c3204824 100644 --- a/sfa/importer/plimporter.py +++ b/sfa/importer/plimporter.py @@ -24,7 +24,9 @@ from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn from sfa.trust.gid import create_uuid from sfa.trust.certificate import convert_public_key, Keypair -from sfa.storage.alchemy import dbsession +# using global alchemy.session() here is fine +# as importer is on standalone one-shot process +from sfa.storage.alchemy import global_dbsession from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey from sfa.planetlab.plshell import PlShell @@ -115,8 +117,8 @@ class PlImporter: pointer=site['site_id'], authority=get_authority(site_hrn)) auth_record.just_created() - dbsession.add(auth_record) - dbsession.commit() + global_dbsession.add(auth_record) + global_dbsession.commit() self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record) self.remember_record ( site_record ) @@ -127,7 +129,7 @@ class PlImporter: shell = PlShell (config) ######## retrieve all existing SFA objects - all_records = dbsession.query(RegRecord).all() + all_records = global_dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA @@ -145,12 +147,12 @@ class PlImporter: # Get all plc sites # retrieve only required stuf sites = shell.GetSites({'peer_id': None, 'enabled' : True}, - ['site_id','login_base','node_ids','slice_ids','person_ids',]) + ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn']) # create a hash of sites by login_base # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all plc users persons = shell.GetPersons({'peer_id': None, 'enabled': True}, - ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids']) + ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn']) # create a hash of persons by person_id persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] ) # also gather non-enabled user accounts so as to issue relevant warnings @@ -183,16 +185,28 @@ class PlImporter: # create hash by node_id nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] ) # Get all plc slices - slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids']) + slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn']) # create hash by slice_id slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] ) # isolate special vini case in separate method self.create_special_vini_record (interface_hrn) + # Get top authority record + top_auth_record=self.locate_by_type_hrn ('authority', root_auth) + admins = [] + # start importing for site in sites: - site_hrn = _get_site_hrn(interface_hrn, site) + try: + site_sfa_created = shell.GetSiteSfaCreated(site['site_id']) + except: + site_sfa_created = None + if site['name'].startswith('sfa:') or site_sfa_created == 'True': + continue + + #site_hrn = _get_site_hrn(interface_hrn, site) + site_hrn = site['hrn'] # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record=self.locate_by_type_hrn ('authority', site_hrn) @@ -206,8 +220,8 @@ class PlImporter: pointer=site['site_id'], authority=get_authority(site_hrn)) site_record.just_created() - dbsession.add(site_record) - dbsession.commit() + global_dbsession.add(site_record) + global_dbsession.commit() self.logger.info("PlImporter: imported authority (site) : %s" % site_record) self.remember_record (site_record) except: @@ -242,8 +256,8 @@ class PlImporter: pointer =node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() - dbsession.add(node_record) - dbsession.commit() + global_dbsession.add(node_record) + global_dbsession.commit() self.logger.info("PlImporter: imported node: %s" % node_record) self.remember_record (node_record) except: @@ -268,7 +282,8 @@ class PlImporter: # make sure to NOT run this if anything is wrong if not proceed: continue - person_hrn = email_to_hrn(site_hrn, person['email']) + #person_hrn = email_to_hrn(site_hrn, person['email']) + person_hrn = person['hrn'] # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') @@ -297,8 +312,7 @@ class PlImporter: plc_keys = keys_by_person_id.get(person['person_id'],[]) if not user_record: (pubkey,pkey) = init_person_key (person, plc_keys ) - person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) - person_gid.set_email(person['email']) + person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email']) user_record = RegUser (hrn=person_hrn, gid=person_gid, pointer=person['person_id'], authority=get_authority(person_hrn), @@ -308,8 +322,8 @@ class PlImporter: else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() - dbsession.add (user_record) - dbsession.commit() + global_dbsession.add (user_record) + global_dbsession.commit() self.logger.info("PlImporter: imported person: %s" % user_record) self.remember_record ( user_record ) else: @@ -357,13 +371,18 @@ class PlImporter: user_record.just_updated() self.logger.info("PlImporter: updated person: %s" % user_record) user_record.email = person['email'] - dbsession.commit() + global_dbsession.commit() user_record.stale=False # accumulate PIs - PLCAPI has a limitation that when someone has PI role # this is valid for all sites she is in.. # PI is coded with role_id==20 if 20 in person['role_ids']: site_pis.append (user_record) + + # PL Admins need to marked as PI of the top authority record + if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis: + admins.append(user_record) + except: self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email'])) @@ -374,8 +393,8 @@ class PlImporter: # being improperly handled, and where the whole loop on persons # could be performed twice with the same person... # so hopefully we do not need to eliminate duplicates explicitly here anymore - site_record.reg_pis = site_pis - dbsession.commit() + site_record.reg_pis = list(set(site_pis)) + global_dbsession.commit() # import slices for slice_id in site['slice_ids']: @@ -383,7 +402,9 @@ class PlImporter: slice = slices_by_id[slice_id] except: self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id) - slice_hrn = slicename_to_hrn(interface_hrn, slice['name']) + continue + #slice_hrn = slicename_to_hrn(interface_hrn, slice['name']) + slice_hrn = slice['hrn'] slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: try: @@ -394,8 +415,8 @@ class PlImporter: pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() - dbsession.add(slice_record) - dbsession.commit() + global_dbsession.add(slice_record) + global_dbsession.commit() self.logger.info("PlImporter: imported slice: %s" % slice_record) self.remember_record ( slice_record ) except: @@ -408,9 +429,15 @@ class PlImporter: # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ] - dbsession.commit() + global_dbsession.commit() slice_record.stale=False + # Set PL Admins as PI's of the top authority + if admins: + top_auth_record.reg_pis = list(set(admins)) + global_dbsession.commit() + self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn)) + ### remove stale records # special records must be preserved system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] @@ -430,5 +457,5 @@ class PlImporter: self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("PlImporter: deleting stale record: %s" % record) - dbsession.delete(record) - dbsession.commit() + global_dbsession.delete(record) + global_dbsession.commit()