def locate_by_type_pointer (self, type, pointer):
return self.records_by_type_pointer.get ( (type, pointer), None)
- # convenience : try to locate first based on type+pointer
- # if so, the record was created already even if e.g. its hrn has changed meanwhile
- # otherwise we try by type+hrn (is this truly useful ?)
- def locate (self, type, hrn=None, pointer=-1):
- if pointer!=-1:
- attempt = self.locate_by_type_pointer (type, pointer)
- if attempt : return attempt
- if hrn is not None:
- attempt = self.locate_by_type_hrn (type, hrn,)
- if attempt : return attempt
- return None
+ # a convenience/helper function to see if a record is already known
+ # a former, broken, attempt (in 2.1-9) had been made
+ # to try and use 'pointer' as a first, most significant attempt
+ # the idea being to preserve stuff as much as possible, and thus
+ # to avoid creating a new gid in the case of a simple hrn rename
+ # however this of course doesn't work as the gid depends on the hrn...
+ #def locate (self, type, hrn=None, pointer=-1):
+ # if pointer!=-1:
+ # attempt = self.locate_by_type_pointer (type, pointer)
+ # if attempt : return attempt
+ # if hrn is not None:
+ # attempt = self.locate_by_type_hrn (type, hrn,)
+ # if attempt : return attempt
+ # return None
# this makes the run method a bit abtruse - out of the way
def create_special_vini_record (self, interface_hrn):
['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
# create a hash of persons by person_id
persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
+ # also gather non-enabled user accounts so as to issue relevant warnings
+ disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
+ disabled_person_ids = [ person['person_id'] for person in disabled_persons ]
# Get all plc public keys
# accumulate key ids for keys retrieval
key_ids = []
for person in persons:
pubkeys = []
for key_id in person['key_ids']:
- key = keys_by_id[key_id]
- if key['key_type'] == 'ssh':
+ # by construction all the keys we fetched are ssh keys
+ # so gpg keys won't be in there
+ try:
+ key = keys_by_id[key_id]
pubkeys.append(key)
+ except:
+ self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
keys_by_person_id[person['person_id']] = pubkeys
# Get all plc nodes
nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
site_hrn = _get_site_hrn(interface_hrn, site)
# import if hrn is not in list of existing hrns or if the hrn exists
# but its not a site record
- site_record=self.locate ('authority', site_hrn, site['site_id'])
+ site_record=self.locate_by_type_hrn ('authority', site_hrn)
if not site_record:
try:
urn = hrn_to_urn(site_hrn, 'authority')
except:
# if the site import fails then there is no point in trying to import the
# site's child records (node, slices, persons), so skip them.
- self.logger.log_exc("PlImporter: failed to import site. Skipping child records")
+ self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn)
continue
else:
# xxx update the record ...
continue
site_auth = get_authority(site_hrn)
site_name = site['login_base']
- hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
+ node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
# xxx this sounds suspicious
- if len(hrn) > 64: hrn = hrn[:64]
- node_record = self.locate ( 'node', hrn , node['node_id'] )
+ if len(node_hrn) > 64: node_hrn = node_hrn[:64]
+ node_record = self.locate_by_type_hrn ( 'node', node_hrn )
if not node_record:
try:
pkey = Keypair(create=True)
- urn = hrn_to_urn(hrn, 'node')
+ urn = hrn_to_urn(node_hrn, 'node')
node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
- node_record = RegNode (hrn=hrn, gid=node_gid,
+ node_record = RegNode (hrn=node_hrn, gid=node_gid,
pointer =node['node_id'],
- authority=get_authority(hrn))
+ authority=get_authority(node_hrn))
node_record.just_created()
dbsession.add(node_record)
dbsession.commit()
self.logger.info("PlImporter: imported node: %s" % node_record)
self.remember_record (node_record)
except:
- self.logger.log_exc("PlImporter: failed to import node")
+ self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn)
+ continue
else:
# xxx update the record ...
pass
site_pis=[]
# import persons
for person_id in site['person_ids']:
- try:
- person = persons_by_id[person_id]
- except:
- self.logger.warning ("PlImporter: cannot locate person_id %s - ignored"%person_id)
+ proceed=False
+ if person_id in persons_by_id:
+ person=persons_by_id[person_id]
+ proceed=True
+ elif person_id in disabled_person_ids:
+ pass
+ else:
+ self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
+ # make sure to NOT run this if anything is wrong
+ if not proceed: continue
+
person_hrn = email_to_hrn(site_hrn, person['email'])
# xxx suspicious again
if len(person_hrn) > 64: person_hrn = person_hrn[:64]
person_urn = hrn_to_urn(person_hrn, 'user')
- user_record = self.locate ( 'user', person_hrn, person['person_id'])
+ user_record = self.locate_by_type_hrn ( 'user', person_hrn)
# return a tuple pubkey (a plc key object) and pkey (a Keypair object)
def init_person_key (person, plc_keys):
person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
person_gid.set_email(person['email'])
user_record = RegUser (hrn=person_hrn, gid=person_gid,
- pointer=person['person_id'],
- authority=get_authority(person_hrn),
- email=person['email'])
+ pointer=person['person_id'],
+ authority=get_authority(person_hrn),
+ email=person['email'])
if pubkey:
user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
else:
self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
# maintain the list of PIs for a given site
+ # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
+ # site_pis = list(set(site_pis))
+ # this was likely due to a bug in the above logic,
+ # that had to do with enabled persons, and where the whole loop on persons
+ # could be performed twice with the same person...
+ # so hopefully we do not need to eliminate duplicates explicitly here anymore
site_record.reg_pis = site_pis
+ dbsession.commit()
# import slices
for slice_id in site['slice_ids']:
except:
self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
- slice_record = self.locate ('slice', slice_hrn, slice['slice_id'])
+ slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
if not slice_record:
try:
pkey = Keypair(create=True)
self.logger.info("PlImporter: imported slice: %s" % slice_record)
self.remember_record ( slice_record )
except:
- self.logger.log_exc("PlImporter: failed to import slice")
+ self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
else:
# xxx update the record ...
- self.logger.warning ("Slice update not yet implemented")
+ # given that we record the current set of users anyways, there does not seem to be much left to do here
+ # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
pass
# record current users affiliated with the slice
slice_record.reg_researchers = \