6 # read the planetlab database and update the local registry database accordingly
7 # (in other words, with this testbed, the SFA registry is *not* authoritative)
8 # so we update the following collections
9 # . authorities (from pl sites)
10 # . node (from pl nodes)
11 # . users+keys (from pl persons and attached keys)
12 # known limitation : *one* of the ssh keys is chosen at random here
13 # xxx todo/check xxx at the very least, when a key is known to the registry
14 # and is still current in plc
15 # then we should definitely make sure to keep that one in sfa...
16 # . slice+researchers (from pl slices and attached users)
21 from sfa.util.config import Config
22 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
24 from sfa.trust.gid import create_uuid
25 from sfa.trust.certificate import convert_public_key, Keypair
27 # using global alchemy.session() here is fine
28 # as importer is on standalone one-shot process
29 from sfa.storage.alchemy import global_dbsession
30 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
32 from sfa.planetlab.plshell import PlShell
33 from sfa.planetlab.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
35 def _get_site_hrn(interface_hrn, site):
36 # Hardcode 'internet2' into the hrn for sites hosting
37 # internet2 nodes. This is a special operation for some vini
39 hrn = ".".join([interface_hrn, site['login_base']])
40 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
41 if site['login_base'].startswith("i2") or site['login_base'].startswith("nlr"):
42 hrn = ".".join([interface_hrn, "internet2", site['login_base']])
48 def __init__ (self, auth_hierarchy, logger):
49 self.auth_hierarchy = auth_hierarchy
52 def add_options (self, parser):
53 # we don't have any options for now
56 # hrn hash is initialized from current db
57 # remember just-created records as we go
58 # xxx might make sense to add a UNIQUE constraint in the db itself
59 def remember_record_by_hrn (self, record):
60 tuple = (record.type, record.hrn)
61 if tuple in self.records_by_type_hrn:
62 self.logger.warning ("PlImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
64 self.records_by_type_hrn [ tuple ] = record
66 # ditto for pointer hash
67 def remember_record_by_pointer (self, record):
68 if record.pointer == -1:
69 self.logger.warning ("PlImporter.remember_record_by_pointer: pointer is void")
71 tuple = (record.type, record.pointer)
72 if tuple in self.records_by_type_pointer:
73 self.logger.warning ("PlImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
75 self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
77 def remember_record (self, record):
78 self.remember_record_by_hrn (record)
79 self.remember_record_by_pointer (record)
81 def locate_by_type_hrn (self, type, hrn):
82 return self.records_by_type_hrn.get ( (type, hrn), None)
84 def locate_by_type_pointer (self, type, pointer):
85 return self.records_by_type_pointer.get ( (type, pointer), None)
87 # a convenience/helper function to see if a record is already known
88 # a former, broken, attempt (in 2.1-9) had been made
89 # to try and use 'pointer' as a first, most significant attempt
90 # the idea being to preserve stuff as much as possible, and thus
91 # to avoid creating a new gid in the case of a simple hrn rename
92 # however this of course doesn't work as the gid depends on the hrn...
93 #def locate (self, type, hrn=None, pointer=-1):
95 # attempt = self.locate_by_type_pointer (type, pointer)
96 # if attempt : return attempt
98 # attempt = self.locate_by_type_hrn (type, hrn,)
99 # if attempt : return attempt
102 # this makes the run method a bit abtruse - out of the way
103 def create_special_vini_record (self, interface_hrn):
104 # special case for vini
105 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
106 # create a fake internet2 site first
107 i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
108 site_hrn = _get_site_hrn(interface_hrn, i2site)
109 # import if hrn is not in list of existing hrns or if the hrn exists
110 # but its not a site record
111 if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
112 urn = hrn_to_urn(site_hrn, 'authority')
113 if not self.auth_hierarchy.auth_exists(urn):
114 self.auth_hierarchy.create_auth(urn)
115 auth_info = self.auth_hierarchy.get_auth_info(urn)
116 auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
117 pointer=site['site_id'],
118 authority=get_authority(site_hrn))
119 auth_record.just_created()
120 global_dbsession.add(auth_record)
121 global_dbsession.commit()
122 self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
123 self.remember_record ( site_record )
125 def run (self, options):
127 interface_hrn = config.SFA_INTERFACE_HRN
128 root_auth = config.SFA_REGISTRY_ROOT_AUTH
129 shell = PlShell (config)
131 ######## retrieve all existing SFA objects
132 all_records = global_dbsession.query(RegRecord).all()
134 # create hash by (type,hrn)
135 # we essentially use this to know if a given record is already known to SFA
136 self.records_by_type_hrn = \
137 dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
138 # create hash by (type,pointer)
139 self.records_by_type_pointer = \
140 dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
141 if record.pointer != -1] )
143 # initialize record.stale to True by default, then mark stale=False on the ones that are in use
144 for record in all_records: record.stale=True
146 ######## retrieve PLC data
148 # retrieve only required stuf
149 sites = shell.GetSites({'peer_id': None, 'enabled' : True},
150 ['site_id','login_base','node_ids','slice_ids','person_ids', 'name'])
151 # create a hash of sites by login_base
152 # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
154 persons = shell.GetPersons({'peer_id': None, 'enabled': True},
155 ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
156 # create a hash of persons by person_id
157 persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
158 # also gather non-enabled user accounts so as to issue relevant warnings
159 disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
160 disabled_person_ids = [ person['person_id'] for person in disabled_persons ]
161 # Get all plc public keys
162 # accumulate key ids for keys retrieval
164 for person in persons:
165 key_ids.extend(person['key_ids'])
166 keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
168 # create a hash of keys by key_id
169 keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] )
170 # create a dict person_id -> [ (plc)keys ]
171 keys_by_person_id = {}
172 for person in persons:
174 for key_id in person['key_ids']:
175 # by construction all the keys we fetched are ssh keys
176 # so gpg keys won't be in there
178 key = keys_by_id[key_id]
181 self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
182 keys_by_person_id[person['person_id']] = pubkeys
184 nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
185 # create hash by node_id
186 nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
188 slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids'])
189 # create hash by slice_id
190 slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
192 # isolate special vini case in separate method
193 self.create_special_vini_record (interface_hrn)
198 site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
200 site_sfa_created = None
201 if site['name'].startswith('sfa:') or site_sfa_created == 'True':
204 site_hrn = _get_site_hrn(interface_hrn, site)
205 # import if hrn is not in list of existing hrns or if the hrn exists
206 # but its not a site record
207 site_record=self.locate_by_type_hrn ('authority', site_hrn)
210 urn = hrn_to_urn(site_hrn, 'authority')
211 if not self.auth_hierarchy.auth_exists(urn):
212 self.auth_hierarchy.create_auth(urn)
213 auth_info = self.auth_hierarchy.get_auth_info(urn)
214 site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
215 pointer=site['site_id'],
216 authority=get_authority(site_hrn))
217 site_record.just_created()
218 global_dbsession.add(site_record)
219 global_dbsession.commit()
220 self.logger.info("PlImporter: imported authority (site) : %s" % site_record)
221 self.remember_record (site_record)
223 # if the site import fails then there is no point in trying to import the
224 # site's child records (node, slices, persons), so skip them.
225 self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn)
228 # xxx update the record ...
230 site_record.stale=False
232 # import node records
233 for node_id in site['node_ids']:
235 node = nodes_by_id[node_id]
237 self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
239 site_auth = get_authority(site_hrn)
240 site_name = site['login_base']
241 node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
242 # xxx this sounds suspicious
243 if len(node_hrn) > 64: node_hrn = node_hrn[:64]
244 node_record = self.locate_by_type_hrn ( 'node', node_hrn )
247 pkey = Keypair(create=True)
248 urn = hrn_to_urn(node_hrn, 'node')
249 node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
250 node_record = RegNode (hrn=node_hrn, gid=node_gid,
251 pointer =node['node_id'],
252 authority=get_authority(node_hrn))
253 node_record.just_created()
254 global_dbsession.add(node_record)
255 global_dbsession.commit()
256 self.logger.info("PlImporter: imported node: %s" % node_record)
257 self.remember_record (node_record)
259 self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn)
262 # xxx update the record ...
264 node_record.stale=False
268 for person_id in site['person_ids']:
270 if person_id in persons_by_id:
271 person=persons_by_id[person_id]
273 elif person_id in disabled_person_ids:
276 self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
277 # make sure to NOT run this if anything is wrong
278 if not proceed: continue
280 person_hrn = email_to_hrn(site_hrn, person['email'])
281 # xxx suspicious again
282 if len(person_hrn) > 64: person_hrn = person_hrn[:64]
283 person_urn = hrn_to_urn(person_hrn, 'user')
285 user_record = self.locate_by_type_hrn ( 'user', person_hrn)
287 # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
288 def init_person_key (person, plc_keys):
290 if person['key_ids']:
291 # randomly pick first key in set
294 pkey = convert_public_key(pubkey['key'])
296 self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
297 pkey = Keypair(create=True)
299 # the user has no keys. Creating a random keypair for the user's gid
300 self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
301 pkey = Keypair(create=True)
302 return (pubkey, pkey)
306 plc_keys = keys_by_person_id.get(person['person_id'],[])
308 (pubkey,pkey) = init_person_key (person, plc_keys )
309 person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
310 user_record = RegUser (hrn=person_hrn, gid=person_gid,
311 pointer=person['person_id'],
312 authority=get_authority(person_hrn),
313 email=person['email'])
315 user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
317 self.logger.warning("No key found for user %s"%user_record)
318 user_record.just_created()
319 global_dbsession.add (user_record)
320 global_dbsession.commit()
321 self.logger.info("PlImporter: imported person: %s" % user_record)
322 self.remember_record ( user_record )
324 # update the record ?
326 # if a user key has changed then we need to update the
327 # users gid by forcing an update here
329 # right now, SFA only has *one* key attached to a user, and this is
330 # the key that the GID was made with
331 # so the logic here is, we consider that things are OK (unchanged) if
332 # all the SFA keys are present as PLC keys
333 # otherwise we trigger the creation of a new gid from *some* plc key
334 # and record this on the SFA side
335 # it would make sense to add a feature in PLC so that one could pick a 'primary'
336 # key but this is not available on the myplc side for now
337 # = or = it would be much better to support several keys in SFA but that
338 # does not seem doable without a major overhaul in the data model as
339 # a GID is attached to a hrn, but it's also linked to a key, so...
340 # NOTE: with this logic, the first key entered in PLC remains the one
341 # current in SFA until it is removed from PLC
342 sfa_keys = user_record.reg_keys
343 def sfa_key_in_list (sfa_key,plc_keys):
344 for plc_key in plc_keys:
345 if plc_key['key']==sfa_key.key:
348 # are all the SFA keys known to PLC ?
350 if not sfa_keys and plc_keys:
353 for sfa_key in sfa_keys:
354 if not sfa_key_in_list (sfa_key,plc_keys):
357 (pubkey,pkey) = init_person_key (person, plc_keys)
358 person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
359 person_gid.set_email(person['email'])
361 user_record.reg_keys=[]
363 user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
364 user_record.gid = person_gid
365 user_record.just_updated()
366 self.logger.info("PlImporter: updated person: %s" % user_record)
367 user_record.email = person['email']
368 global_dbsession.commit()
369 user_record.stale=False
370 # accumulate PIs - PLCAPI has a limitation that when someone has PI role
371 # this is valid for all sites she is in..
372 # PI is coded with role_id==20
373 if 20 in person['role_ids']:
374 site_pis.append (user_record)
376 self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
378 # maintain the list of PIs for a given site
379 # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
380 # site_pis = list(set(site_pis))
381 # this was likely due to a bug in the above logic, that had to do with disabled persons
382 # being improperly handled, and where the whole loop on persons
383 # could be performed twice with the same person...
384 # so hopefully we do not need to eliminate duplicates explicitly here anymore
385 site_record.reg_pis = list(set(site_pis))
386 global_dbsession.commit()
389 for slice_id in site['slice_ids']:
391 slice = slices_by_id[slice_id]
393 self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
394 slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
395 slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
398 pkey = Keypair(create=True)
399 urn = hrn_to_urn(slice_hrn, 'slice')
400 slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
401 slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
402 pointer=slice['slice_id'],
403 authority=get_authority(slice_hrn))
404 slice_record.just_created()
405 global_dbsession.add(slice_record)
406 global_dbsession.commit()
407 self.logger.info("PlImporter: imported slice: %s" % slice_record)
408 self.remember_record ( slice_record )
410 self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
412 # xxx update the record ...
413 # given that we record the current set of users anyways, there does not seem to be much left to do here
414 # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
416 # record current users affiliated with the slice
417 slice_record.reg_researchers = \
418 [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
419 global_dbsession.commit()
420 slice_record.stale=False
422 ### remove stale records
423 # special records must be preserved
424 system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
425 for record in all_records:
426 if record.hrn in system_hrns:
428 if record.peer_authority:
430 if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
431 record.hrn.endswith("internet2"):
434 for record in all_records:
435 try: stale=record.stale
438 self.logger.warning("stale not found with %s"%record)
440 self.logger.info("PlImporter: deleting stale record: %s" % record)
441 global_dbsession.delete(record)
442 global_dbsession.commit()