6 # read the planetlab database and update the local registry database accordingly
7 # (in other words, with this testbed, the SFA registry is *not* authoritative)
8 # so we update the following collections
9 # . authorities (from pl sites)
10 # . node (from pl nodes)
11 # . users+keys (from pl persons and attached keys)
12 # known limitation : *one* of the ssh keys is chosen at random here
13 # xxx todo/check xxx at the very least, when a key is known to the registry
14 # and is still current in plc
15 # then we should definitely make sure to keep that one in sfa...
16 # . slice+researchers (from pl slices and attached users)
21 from sfa.util.config import Config
22 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
24 from sfa.trust.gid import create_uuid
25 from sfa.trust.certificate import convert_public_key, Keypair
27 # using global alchemy.session() here is fine
28 # as importer is on standalone one-shot process
29 from sfa.storage.alchemy import global_dbsession
30 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
32 from sfa.planetlab.plshell import PlShell
33 from sfa.planetlab.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
35 def _get_site_hrn(interface_hrn, site):
36 # Hardcode 'internet2' into the hrn for sites hosting
37 # internet2 nodes. This is a special operation for some vini
39 hrn = ".".join([interface_hrn, site['login_base']])
40 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
41 if site['login_base'].startswith("i2") or site['login_base'].startswith("nlr"):
42 hrn = ".".join([interface_hrn, "internet2", site['login_base']])
48 def __init__ (self, auth_hierarchy, logger):
49 self.auth_hierarchy = auth_hierarchy
52 def add_options (self, parser):
53 # we don't have any options for now
56 # hrn hash is initialized from current db
57 # remember just-created records as we go
58 # xxx might make sense to add a UNIQUE constraint in the db itself
59 def remember_record_by_hrn (self, record):
60 tuple = (record.type, record.hrn)
61 if tuple in self.records_by_type_hrn:
62 self.logger.warning ("PlImporter.remember_record_by_hrn: duplicate {}".format(tuple))
64 self.records_by_type_hrn [ tuple ] = record
66 # ditto for pointer hash
67 def remember_record_by_pointer (self, record):
68 if record.pointer == -1:
69 self.logger.warning ("PlImporter.remember_record_by_pointer: pointer is void")
71 tuple = (record.type, record.pointer)
72 if tuple in self.records_by_type_pointer:
73 self.logger.warning ("PlImporter.remember_record_by_pointer: duplicate {}".format(tuple))
75 self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
77 def remember_record (self, record):
78 self.remember_record_by_hrn (record)
79 self.remember_record_by_pointer (record)
81 def locate_by_type_hrn (self, type, hrn):
82 return self.records_by_type_hrn.get ( (type, hrn), None)
84 def locate_by_type_pointer (self, type, pointer):
85 return self.records_by_type_pointer.get ( (type, pointer), None)
87 # a convenience/helper function to see if a record is already known
88 # a former, broken, attempt (in 2.1-9) had been made
89 # to try and use 'pointer' as a first, most significant attempt
90 # the idea being to preserve stuff as much as possible, and thus
91 # to avoid creating a new gid in the case of a simple hrn rename
92 # however this of course doesn't work as the gid depends on the hrn...
93 #def locate (self, type, hrn=None, pointer=-1):
95 # attempt = self.locate_by_type_pointer (type, pointer)
96 # if attempt : return attempt
98 # attempt = self.locate_by_type_hrn (type, hrn,)
99 # if attempt : return attempt
102 # this makes the run method a bit abtruse - out of the way
103 def create_special_vini_record (self, interface_hrn):
104 # special case for vini
105 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
106 # create a fake internet2 site first
107 i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
108 site_hrn = _get_site_hrn(interface_hrn, i2site)
109 # import if hrn is not in list of existing hrns or if the hrn exists
110 # but its not a site record
111 if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
112 urn = hrn_to_urn(site_hrn, 'authority')
113 if not self.auth_hierarchy.auth_exists(urn):
114 self.auth_hierarchy.create_auth(urn)
115 auth_info = self.auth_hierarchy.get_auth_info(urn)
116 auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
117 pointer=site['site_id'],
118 authority=get_authority(site_hrn))
119 auth_record.just_created()
120 global_dbsession.add(auth_record)
121 global_dbsession.commit()
122 self.logger.info("PlImporter: Imported authority (vini site) {}".format(auth_record))
123 self.remember_record ( site_record )
125 def run (self, options):
127 interface_hrn = config.SFA_INTERFACE_HRN
128 root_auth = config.SFA_REGISTRY_ROOT_AUTH
129 shell = PlShell (config)
131 ######## retrieve all existing SFA objects
132 all_records = global_dbsession.query(RegRecord).all()
134 # create hash by (type,hrn)
135 # we essentially use this to know if a given record is already known to SFA
136 self.records_by_type_hrn = \
137 dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
138 # create hash by (type,pointer)
139 self.records_by_type_pointer = \
140 dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
141 if record.pointer != -1] )
143 # initialize record.stale to True by default, then mark stale=False on the ones that are in use
144 for record in all_records:
147 ######## retrieve PLC data
149 # retrieve only required stuf
150 sites = shell.GetSites({'peer_id': None, 'enabled' : True},
151 ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn'])
152 # create a hash of sites by login_base
153 # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
155 persons = shell.GetPersons({'peer_id': None, 'enabled': True},
156 ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn'])
157 # create a hash of persons by person_id
158 persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
159 # also gather non-enabled user accounts so as to issue relevant warnings
160 disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
161 disabled_person_ids = [ person['person_id'] for person in disabled_persons ]
162 # Get all plc public keys
163 # accumulate key ids for keys retrieval
165 for person in persons:
166 key_ids.extend(person['key_ids'])
167 keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
169 # create a hash of keys by key_id
170 keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] )
171 # create a dict person_id -> [ (plc)keys ]
172 keys_by_person_id = {}
173 for person in persons:
175 for key_id in person['key_ids']:
176 # by construction all the keys we fetched are ssh keys
177 # so gpg keys won't be in there
179 key = keys_by_id[key_id]
182 self.logger.warning("Could not spot key {} - probably non-ssh".format(key_id))
183 keys_by_person_id[person['person_id']] = pubkeys
185 nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
186 # create hash by node_id
187 nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
189 slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn'])
190 # create hash by slice_id
191 slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
193 # isolate special vini case in separate method
194 self.create_special_vini_record (interface_hrn)
196 # Get top authority record
197 top_auth_record = self.locate_by_type_hrn ('authority', root_auth)
203 site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
205 site_sfa_created = None
206 if site['name'].startswith('sfa:') or site_sfa_created == 'True':
209 #site_hrn = _get_site_hrn(interface_hrn, site)
210 site_hrn = site['hrn']
211 # import if hrn is not in list of existing hrns or if the hrn exists
212 # but its not a site record
213 site_record = self.locate_by_type_hrn ('authority', site_hrn)
216 urn = hrn_to_urn(site_hrn, 'authority')
217 if not self.auth_hierarchy.auth_exists(urn):
218 self.auth_hierarchy.create_auth(urn)
219 auth_info = self.auth_hierarchy.get_auth_info(urn)
220 site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
221 pointer=site['site_id'],
222 authority=get_authority(site_hrn),
224 site_record.just_created()
225 global_dbsession.add(site_record)
226 global_dbsession.commit()
227 self.logger.info("PlImporter: imported authority (site) : {}".format(site_record))
228 self.remember_record(site_record)
230 # if the site import fails then there is no point in trying to import the
231 # site's child records (node, slices, persons), so skip them.
232 self.logger.log_exc("PlImporter: failed to import site {}. Skipping child records"\
236 # xxx update the record ...
237 site_record.name = site['name']
239 site_record.stale = False
241 # import node records
242 for node_id in site['node_ids']:
244 node = nodes_by_id[node_id]
246 self.logger.warning ("PlImporter: cannot find node_id {} - ignored"
249 site_auth = get_authority(site_hrn)
250 site_name = site['login_base']
251 node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
252 # xxx this sounds suspicious
253 if len(node_hrn) > 64: node_hrn = node_hrn[:64]
254 node_record = self.locate_by_type_hrn ( 'node', node_hrn )
257 pkey = Keypair(create=True)
258 urn = hrn_to_urn(node_hrn, 'node')
259 node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
260 node_record = RegNode (hrn=node_hrn, gid=node_gid,
261 pointer =node['node_id'],
262 authority=get_authority(node_hrn))
263 node_record.just_created()
264 global_dbsession.add(node_record)
265 global_dbsession.commit()
266 self.logger.info("PlImporter: imported node: {}".format(node_record))
267 self.remember_record (node_record)
269 self.logger.log_exc("PlImporter: failed to import node {}".format(node_hrn))
272 # xxx update the record ...
274 node_record.stale = False
278 for person_id in site['person_ids']:
280 if person_id in persons_by_id:
281 person = persons_by_id[person_id]
283 elif person_id in disabled_person_ids:
286 self.logger.warning ("PlImporter: cannot locate person_id {} in site {} - ignored"\
287 .format(person_id, site_hrn))
288 # make sure to NOT run this if anything is wrong
289 if not proceed: continue
291 #person_hrn = email_to_hrn(site_hrn, person['email'])
292 person_hrn = person['hrn']
293 if person_hrn is None:
294 self.logger.warn("Person {} has no hrn - skipped".format(person['email']))
296 # xxx suspicious again
297 if len(person_hrn) > 64:
298 person_hrn = person_hrn[:64]
299 person_urn = hrn_to_urn(person_hrn, 'user')
301 user_record = self.locate_by_type_hrn ( 'user', person_hrn)
303 # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
304 def init_person_key (person, plc_keys):
306 if person['key_ids']:
307 # randomly pick first key in set
310 pkey = convert_public_key(pubkey['key'])
312 self.logger.warn('PlImporter: unable to convert public key for {}'
314 pkey = Keypair(create=True)
316 # the user has no keys. Creating a random keypair for the user's gid
317 self.logger.warn("PlImporter: person {} does not have a PL public key"
319 pkey = Keypair(create=True)
320 return (pubkey, pkey)
324 plc_keys = keys_by_person_id.get(person['person_id'],[])
326 (pubkey, pkey) = init_person_key (person, plc_keys )
327 person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey,
328 email=person['email'])
329 user_record = RegUser (hrn=person_hrn, gid=person_gid,
330 pointer=person['person_id'],
331 authority=get_authority(person_hrn),
332 email=person['email'])
334 user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
336 self.logger.warning("No key found for user {}".format(user_record))
337 user_record.just_created()
338 global_dbsession.add (user_record)
339 global_dbsession.commit()
340 self.logger.info("PlImporter: imported person: {}".format(user_record))
341 self.remember_record ( user_record )
343 # update the record ?
345 # if a user key has changed then we need to update the
346 # users gid by forcing an update here
348 # right now, SFA only has *one* key attached to a user, and this is
349 # the key that the GID was made with
350 # so the logic here is, we consider that things are OK (unchanged) if
351 # all the SFA keys are present as PLC keys
352 # otherwise we trigger the creation of a new gid from *some* plc key
353 # and record this on the SFA side
354 # it would make sense to add a feature in PLC so that one could pick a 'primary'
355 # key but this is not available on the myplc side for now
356 # = or = it would be much better to support several keys in SFA but that
357 # does not seem doable without a major overhaul in the data model as
358 # a GID is attached to a hrn, but it's also linked to a key, so...
359 # NOTE: with this logic, the first key entered in PLC remains the one
360 # current in SFA until it is removed from PLC
361 sfa_keys = user_record.reg_keys
362 def sfa_key_in_list (sfa_key,plc_keys):
363 for plc_key in plc_keys:
364 if plc_key['key'] == sfa_key.key:
367 # are all the SFA keys known to PLC ?
369 if not sfa_keys and plc_keys:
372 for sfa_key in sfa_keys:
373 if not sfa_key_in_list (sfa_key,plc_keys):
376 (pubkey,pkey) = init_person_key (person, plc_keys)
377 person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
378 person_gid.set_email(person['email'])
380 user_record.reg_keys = []
382 user_record.reg_keys = [ RegKey (pubkey['key'], pubkey['key_id'])]
383 user_record.gid = person_gid
384 user_record.just_updated()
385 self.logger.info("PlImporter: updated person: {}".format(user_record))
386 user_record.email = person['email']
387 global_dbsession.commit()
388 user_record.stale = False
389 # accumulate PIs - PLCAPI has a limitation that when someone has PI role
390 # this is valid for all sites she is in..
391 # PI is coded with role_id == 20
392 if 20 in person['role_ids']:
393 site_pis.append (user_record)
395 # PL Admins need to marked as PI of the top authority record
396 if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis:
397 admins.append(user_record)
400 self.logger.log_exc("PlImporter: failed to import person {} {}"
401 .format(person['person_id'], person['email']))
403 # maintain the list of PIs for a given site
404 # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
405 # site_pis = list(set(site_pis))
406 # this was likely due to a bug in the above logic, that had to do with disabled persons
407 # being improperly handled, and where the whole loop on persons
408 # could be performed twice with the same person...
409 # so hopefully we do not need to eliminate duplicates explicitly here anymore
410 site_record.reg_pis = list(set(site_pis))
411 global_dbsession.commit()
414 for slice_id in site['slice_ids']:
416 slice = slices_by_id[slice_id]
418 self.logger.warning ("PlImporter: cannot locate slice_id {} - ignored"
421 #slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
422 slice_hrn = slice['hrn']
423 if slice_hrn is None:
424 self.logger.warning("Slice {} has no hrn - skipped"
425 .format(slice['name']))
427 slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
430 pkey = Keypair(create=True)
431 urn = hrn_to_urn(slice_hrn, 'slice')
432 slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
433 slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
434 pointer=slice['slice_id'],
435 authority=get_authority(slice_hrn))
436 slice_record.just_created()
437 global_dbsession.add(slice_record)
438 global_dbsession.commit()
439 self.logger.info("PlImporter: imported slice: {}".format(slice_record))
440 self.remember_record ( slice_record )
442 self.logger.log_exc("PlImporter: failed to import slice {} ({})"
443 .format(slice_hrn, slice['name']))
445 # xxx update the record ...
446 # given that we record the current set of users anyways, there does not seem to be much left to do here
447 # self.logger.warning ("Slice update not yet implemented on slice {} ({})"
448 # .format(slice_hrn, slice['name']))
450 # record current users affiliated with the slice
451 slice_record.reg_researchers = \
452 [ self.locate_by_type_pointer ('user', user_id) for user_id in slice['person_ids'] ]
453 # remove any weird value (looks like we can get 'None' here
454 slice_record.reg_researchers = [ x for x in slice_record.reg_researchers if x ]
455 global_dbsession.commit()
456 slice_record.stale = False
458 # Set PL Admins as PI's of the top authority
460 top_auth_record.reg_pis = list(set(admins))
461 global_dbsession.commit()
462 self.logger.info('PlImporter: set PL admins {} as PIs of {}'
463 .format(admins, top_auth_record.hrn))
465 ### remove stale records
466 # special records must be preserved
467 system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
468 for record in all_records:
469 if record.hrn in system_hrns:
471 if record.peer_authority:
473 if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
474 record.hrn.endswith("internet2"):
477 for record in all_records:
478 try: stale = record.stale
481 self.logger.warning("stale not found with {}".format(record))
483 self.logger.info("PlImporter: deleting stale record: {}".format(record))
484 global_dbsession.delete(record)
485 global_dbsession.commit()