6 # read the planetlab database and update the local registry database accordingly
7 # so we update the following collections
8 # . authorities (from pl sites)
9 # . node (from pl nodes)
10 # . users+keys (from pl persons and attached keys)
11 # known limitation : *one* of the ssh keys is chosen at random here
12 # xxx todo/check xxx at the very least, when a key is known to the registry
13 # and is still current in plc
14 # then we should definitely make sure to keep that one in sfa...
15 # . slice+researchers (from pl slices and attached users)
20 from sfa.util.config import Config
21 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
23 from sfa.trust.gid import create_uuid
24 from sfa.trust.certificate import convert_public_key, Keypair
26 # using global alchemy.session() here is fine
27 # as importer is on standalone one-shot process
28 from sfa.storage.alchemy import global_dbsession
29 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
31 from sfa.dummy.dummyshell import DummyShell
32 from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
35 def _get_site_hrn(interface_hrn, site):
36 hrn = ".".join([interface_hrn, site['name']])
42 def __init__(self, auth_hierarchy, logger):
43 self.auth_hierarchy = auth_hierarchy
46 def add_options(self, parser):
47 # we don't have any options for now
50 # hrn hash is initialized from current db
51 # remember just-created records as we go
52 # xxx might make sense to add a UNIQUE constraint in the db itself
53 def remember_record_by_hrn(self, record):
54 tuple = (record.type, record.hrn)
55 if tuple in self.records_by_type_hrn:
57 "DummyImporter.remember_record_by_hrn: duplicate (%s,%s)" % tuple)
59 self.records_by_type_hrn[tuple] = record
61 # ditto for pointer hash
62 def remember_record_by_pointer(self, record):
63 if record.pointer == -1:
65 "DummyImporter.remember_record_by_pointer: pointer is void")
67 tuple = (record.type, record.pointer)
68 if tuple in self.records_by_type_pointer:
70 "DummyImporter.remember_record_by_pointer: duplicate (%s,%s)" % tuple)
72 self.records_by_type_pointer[(record.type, record.pointer,)] = record
74 def remember_record(self, record):
75 self.remember_record_by_hrn(record)
76 self.remember_record_by_pointer(record)
78 def locate_by_type_hrn(self, type, hrn):
79 return self.records_by_type_hrn.get((type, hrn), None)
81 def locate_by_type_pointer(self, type, pointer):
82 return self.records_by_type_pointer.get((type, pointer), None)
84 # a convenience/helper function to see if a record is already known
85 # a former, broken, attempt (in 2.1-9) had been made
86 # to try and use 'pointer' as a first, most significant attempt
87 # the idea being to preserve stuff as much as possible, and thus
88 # to avoid creating a new gid in the case of a simple hrn rename
89 # however this of course doesn't work as the gid depends on the hrn...
90 # def locate (self, type, hrn=None, pointer=-1):
92 # attempt = self.locate_by_type_pointer (type, pointer)
93 # if attempt : return attempt
95 # attempt = self.locate_by_type_hrn (type, hrn,)
96 # if attempt : return attempt
99 # this makes the run method a bit abtruse - out of the way
101 def run(self, options):
103 interface_hrn = config.SFA_INTERFACE_HRN
104 root_auth = config.SFA_REGISTRY_ROOT_AUTH
105 shell = DummyShell(config)
107 # retrieve all existing SFA objects
108 all_records = global_dbsession.query(RegRecord).all()
110 # create hash by (type,hrn)
111 # we essentially use this to know if a given record is already known to
113 self.records_by_type_hrn = \
114 dict([((record.type, record.hrn), record)
115 for record in all_records])
116 # create hash by (type,pointer)
117 self.records_by_type_pointer = \
118 dict([((record.type, record.pointer), record) for record in all_records
119 if record.pointer != -1])
121 # initialize record.stale to True by default, then mark stale=False on
122 # the ones that are in use
123 for record in all_records:
126 # retrieve Dummy TB data
128 # retrieve only required stuf
129 sites = [shell.GetTestbedInfo()]
130 # create a hash of sites by login_base
131 # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
132 # Get all dummy TB users
133 users = shell.GetUsers()
134 # create a hash of users by user_id
135 users_by_id = dict([(user['user_id'], user) for user in users])
136 # Get all dummy TB public keys
140 keys.extend(user['keys'])
141 # create a dict user_id -> [ keys ]
142 keys_by_person_id = {}
145 keys_by_person_id[user['user_id']] = user['keys']
146 # Get all dummy TB nodes
147 nodes = shell.GetNodes()
148 # create hash by node_id
149 nodes_by_id = dict([(node['node_id'], node, ) for node in nodes])
150 # Get all dummy TB slices
151 slices = shell.GetSlices()
152 # create hash by slice_id
153 slices_by_id = dict([(slice['slice_id'], slice) for slice in slices])
157 site_hrn = _get_site_hrn(interface_hrn, site)
158 # import if hrn is not in list of existing hrns or if the hrn exists
159 # but its not a site record
160 site_record = self.locate_by_type_hrn('authority', site_hrn)
163 urn = hrn_to_urn(site_hrn, 'authority')
164 if not self.auth_hierarchy.auth_exists(urn):
165 self.auth_hierarchy.create_auth(urn)
166 auth_info = self.auth_hierarchy.get_auth_info(urn)
167 site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
169 authority=get_authority(site_hrn))
170 site_record.just_created()
171 global_dbsession.add(site_record)
172 global_dbsession.commit()
174 "DummyImporter: imported authority (site) : %s" % site_record)
175 self.remember_record(site_record)
177 # if the site import fails then there is no point in trying to import the
178 # site's child records (node, slices, persons), so skip
181 "DummyImporter: failed to import site. Skipping child records")
184 # xxx update the record ...
186 site_record.stale = False
188 # import node records
190 site_auth = get_authority(site_hrn)
191 site_name = site['name']
192 node_hrn = hostname_to_hrn(
193 site_auth, site_name, node['hostname'])
194 # xxx this sounds suspicious
195 if len(node_hrn) > 64:
196 node_hrn = node_hrn[:64]
197 node_record = self.locate_by_type_hrn('node', node_hrn)
200 pkey = Keypair(create=True)
201 urn = hrn_to_urn(node_hrn, 'node')
202 node_gid = self.auth_hierarchy.create_gid(
203 urn, create_uuid(), pkey)
204 node_record = RegNode(hrn=node_hrn, gid=node_gid,
205 pointer=node['node_id'],
206 authority=get_authority(node_hrn))
207 node_record.just_created()
208 global_dbsession.add(node_record)
209 global_dbsession.commit()
211 "DummyImporter: imported node: %s" % node_record)
212 self.remember_record(node_record)
215 "DummyImporter: failed to import node")
217 # xxx update the record ...
219 node_record.stale = False
224 user_hrn = email_to_hrn(site_hrn, user['email'])
225 # xxx suspicious again
226 if len(user_hrn) > 64:
227 user_hrn = user_hrn[:64]
228 user_urn = hrn_to_urn(user_hrn, 'user')
230 user_record = self.locate_by_type_hrn('user', user_hrn)
232 # return a tuple pubkey (a dummy TB key object) and pkey (a
235 def init_user_key(user):
239 # randomly pick first key in set
240 for key in user['keys']:
243 pkey = convert_public_key(pubkey)
249 'DummyImporter: unable to convert public key for %s' % user_hrn)
250 pkey = Keypair(create=True)
252 # the user has no keys. Creating a random keypair for
255 "DummyImporter: user %s does not have a NITOS public key" % user_hrn)
256 pkey = Keypair(create=True)
257 return (pubkey, pkey)
262 (pubkey, pkey) = init_user_key(user)
263 user_gid = self.auth_hierarchy.create_gid(
264 user_urn, create_uuid(), pkey)
265 user_gid.set_email(user['email'])
266 user_record = RegUser(hrn=user_hrn, gid=user_gid,
267 pointer=user['user_id'],
268 authority=get_authority(
272 user_record.reg_keys = [RegKey(pubkey)]
275 "No key found for user %s" % user_record)
276 user_record.just_created()
277 global_dbsession.add(user_record)
278 global_dbsession.commit()
280 "DummyImporter: imported person: %s" % user_record)
281 self.remember_record(user_record)
284 # update the record ?
285 # if user's primary key has changed then we need to update the
286 # users gid by forcing an update here
287 sfa_keys = user_record.reg_keys
289 def key_in_list(key, sfa_keys):
290 for reg_key in sfa_keys:
291 if reg_key.key == key:
294 # is there a new key in Dummy TB ?
296 for key in user['keys']:
297 if not key_in_list(key, sfa_keys):
300 (pubkey, pkey) = init_user_key(user)
301 user_gid = self.auth_hierarchy.create_gid(
302 user_urn, create_uuid(), pkey)
304 user_record.reg_keys = []
306 user_record.reg_keys = [RegKey(pubkey)]
308 "DummyImporter: updated person: %s" % user_record)
309 user_record.email = user['email']
310 global_dbsession.commit()
311 user_record.stale = False
313 self.logger.log_exc("DummyImporter: failed to import user %d %s" % (
314 user['user_id'], user['email']))
318 slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
319 slice_record = self.locate_by_type_hrn('slice', slice_hrn)
322 pkey = Keypair(create=True)
323 urn = hrn_to_urn(slice_hrn, 'slice')
324 slice_gid = self.auth_hierarchy.create_gid(
325 urn, create_uuid(), pkey)
326 slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid,
327 pointer=slice['slice_id'],
328 authority=get_authority(slice_hrn))
329 slice_record.just_created()
330 global_dbsession.add(slice_record)
331 global_dbsession.commit()
333 "DummyImporter: imported slice: %s" % slice_record)
334 self.remember_record(slice_record)
337 "DummyImporter: failed to import slice")
339 # xxx update the record ...
340 self.logger.warning("Slice update not yet implemented")
342 # record current users affiliated with the slice
343 slice_record.reg_researchers = \
344 [self.locate_by_type_pointer(
345 'user', user_id) for user_id in slice['user_ids']]
346 global_dbsession.commit()
347 slice_record.stale = False
349 # remove stale records
350 # special records must be preserved
351 system_hrns = [interface_hrn, root_auth,
352 interface_hrn + '.slicemanager']
353 for record in all_records:
354 if record.hrn in system_hrns:
356 if record.peer_authority:
359 for record in all_records:
364 self.logger.warning("stale not found with %s" % record)
367 "DummyImporter: deleting stale record: %s" % record)
368 global_dbsession.delete(record)
369 global_dbsession.commit()