6 # read the planetlab database and update the local registry database accordingly
7 # so we update the following collections
8 # . authorities (from pl sites)
9 # . node (from pl nodes)
10 # . users+keys (from pl persons and attached keys)
11 # known limitation : *one* of the ssh keys is chosen at random here
12 # xxx todo/check xxx at the very least, when a key is known to the registry
13 # and is still current in plc
14 # then we should definitely make sure to keep that one in sfa...
15 # . slice+researchers (from pl slices and attached users)
20 from sfa.util.config import Config
21 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
23 from sfa.trust.gid import create_uuid
24 from sfa.trust.certificate import convert_public_key, Keypair
26 from sfa.storage.alchemy import dbsession
27 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
29 from sfa.dummy.dummyshell import PlShell
30 from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
32 def _get_site_hrn(interface_hrn, site):
33 hrn = ".".join([interface_hrn, site['name']])
39 def __init__ (self, auth_hierarchy, logger):
40 self.auth_hierarchy = auth_hierarchy
43 def add_options (self, parser):
44 # we don't have any options for now
47 # hrn hash is initialized from current db
48 # remember just-created records as we go
49 # xxx might make sense to add a UNIQUE constraint in the db itself
50 def remember_record_by_hrn (self, record):
51 tuple = (record.type, record.hrn)
52 if tuple in self.records_by_type_hrn:
53 self.logger.warning ("DummyImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
55 self.records_by_type_hrn [ tuple ] = record
57 # ditto for pointer hash
58 def remember_record_by_pointer (self, record):
59 if record.pointer == -1:
60 self.logger.warning ("DummyImporter.remember_record_by_pointer: pointer is void")
62 tuple = (record.type, record.pointer)
63 if tuple in self.records_by_type_pointer:
64 self.logger.warning ("DummyImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
66 self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
68 def remember_record (self, record):
69 self.remember_record_by_hrn (record)
70 self.remember_record_by_pointer (record)
72 def locate_by_type_hrn (self, type, hrn):
73 return self.records_by_type_hrn.get ( (type, hrn), None)
75 def locate_by_type_pointer (self, type, pointer):
76 return self.records_by_type_pointer.get ( (type, pointer), None)
78 # a convenience/helper function to see if a record is already known
79 # a former, broken, attempt (in 2.1-9) had been made
80 # to try and use 'pointer' as a first, most significant attempt
81 # the idea being to preserve stuff as much as possible, and thus
82 # to avoid creating a new gid in the case of a simple hrn rename
83 # however this of course doesn't work as the gid depends on the hrn...
84 #def locate (self, type, hrn=None, pointer=-1):
86 # attempt = self.locate_by_type_pointer (type, pointer)
87 # if attempt : return attempt
89 # attempt = self.locate_by_type_hrn (type, hrn,)
90 # if attempt : return attempt
93 # this makes the run method a bit abtruse - out of the way
95 def run (self, options):
97 interface_hrn = config.SFA_INTERFACE_HRN
98 root_auth = config.SFA_REGISTRY_ROOT_AUTH
99 shell = DummyShell (config)
101 ######## retrieve all existing SFA objects
102 all_records = dbsession.query(RegRecord).all()
104 # create hash by (type,hrn)
105 # we essentially use this to know if a given record is already known to SFA
106 self.records_by_type_hrn = \
107 dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
108 # create hash by (type,pointer)
109 self.records_by_type_pointer = \
110 dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
111 if record.pointer != -1] )
113 # initialize record.stale to True by default, then mark stale=False on the ones that are in use
114 for record in all_records: record.stale=True
116 ######## retrieve Dummy TB data
118 # retrieve only required stuf
119 sites = [shell.GetTestbedInfo()]
120 # create a hash of sites by login_base
121 # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
122 # Get all dummy TB users
123 users = shell.GetUsers()
124 # create a hash of users by user_id
125 users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
126 # Get all dummy TB public keys
129 keys.extend(user['keys'])
130 # create a dict user_id -> [ keys ]
131 keys_by_person_id = {}
133 keys_by_person_id[user['user_id']] = user['keys']
134 # Get all dummy TB nodes
135 nodes = shell.GetNodes()
136 # create hash by node_id
137 nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
138 # Get all dummy TB slices
139 slices = shell.GetSlices()
140 # create hash by slice_id
141 slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
146 site_hrn = _get_site_hrn(interface_hrn, site)
147 # import if hrn is not in list of existing hrns or if the hrn exists
148 # but its not a site record
149 site_record=self.locate_by_type_hrn ('authority', site_hrn)
152 urn = hrn_to_urn(site_hrn, 'authority')
153 if not self.auth_hierarchy.auth_exists(urn):
154 self.auth_hierarchy.create_auth(urn)
155 auth_info = self.auth_hierarchy.get_auth_info(urn)
156 site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
158 authority=get_authority(site_hrn))
159 site_record.just_created()
160 dbsession.add(site_record)
162 self.logger.info("DummyImporter: imported authority (site) : %s" % site_record)
163 self.remember_record (site_record)
165 # if the site import fails then there is no point in trying to import the
166 # site's child records (node, slices, persons), so skip them.
167 self.logger.log_exc("DummyImporter: failed to import site. Skipping child records")
170 # xxx update the record ...
172 site_record.stale=False
174 # import node records
176 site_auth = get_authority(site_hrn)
177 site_name = site['name']
178 node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
179 # xxx this sounds suspicious
180 if len(node_hrn) > 64: node_hrn = node_hrn[:64]
181 node_record = self.locate_by_type_hrn ( 'node', node_hrn )
184 pkey = Keypair(create=True)
185 urn = hrn_to_urn(node_hrn, 'node')
186 node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
187 node_record = RegNode (hrn=node_hrn, gid=node_gid,
188 pointer =node['node_id'],
189 authority=get_authority(node_hrn))
190 node_record.just_created()
191 dbsession.add(node_record)
193 self.logger.info("DummyImporter: imported node: %s" % node_record)
194 self.remember_record (node_record)
196 self.logger.log_exc("DummyImporter: failed to import node")
198 # xxx update the record ...
200 node_record.stale=False
205 user_hrn = email_to_hrn(site_hrn, user['email'])
206 # xxx suspicious again
207 if len(person_hrn) > 64: person_hrn = person_hrn[:64]
208 user_urn = hrn_to_urn(user_hrn, 'user')
210 user_record = self.locate_by_type_hrn ( 'user', user_hrn)
212 # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)
214 def init_user_key (user):
218 # randomly pick first key in set
219 for key in user['keys']:
222 pkey = convert_public_key(pubkey)
227 self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
228 pkey = Keypair(create=True)
230 # the user has no keys. Creating a random keypair for the user's gid
231 self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
232 pkey = Keypair(create=True)
233 return (pubkey, pkey)
238 (pubkey,pkey) = init_user_key (user)
239 user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
240 user_gid.set_email(user['email'])
241 user_record = RegUser (hrn=user_hrn, gid=user_gid,
242 pointer=user['user_id'],
243 authority=get_authority(user_hrn),
246 user_record.reg_keys=[RegKey (pubkey)]
248 self.logger.warning("No key found for user %s"%user_record)
249 user_record.just_created()
250 dbsession.add (user_record)
252 self.logger.info("DummyImporter: imported person: %s" % user_record)
253 self.remember_record ( user_record )
256 # update the record ?
257 # if user's primary key has changed then we need to update the
258 # users gid by forcing an update here
259 sfa_keys = user_record.reg_keys
260 def key_in_list (key,sfa_keys):
261 for reg_key in sfa_keys:
262 if reg_key.key==key: return True
264 # is there a new key in Dummy TB ?
266 for key in user['keys']:
267 if not key_in_list (key,sfa_keys):
270 (pubkey,pkey) = init_user_key (user)
271 user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
273 user_record.reg_keys=[]
275 user_record.reg_keys=[ RegKey (pubkey)]
276 self.logger.info("DummyImporter: updated person: %s" % user_record)
277 user_record.email = person['email']
279 user_record.stale=False
281 self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
286 slice_hrn = slicename_to_hrn(interface_hrn, slice['slice_ame'])
287 slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
290 pkey = Keypair(create=True)
291 urn = hrn_to_urn(slice_hrn, 'slice')
292 slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
293 slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
294 pointer=slice['slice_id'],
295 authority=get_authority(slice_hrn))
296 slice_record.just_created()
297 dbsession.add(slice_record)
299 self.logger.info("DummyImporter: imported slice: %s" % slice_record)
300 self.remember_record ( slice_record )
302 self.logger.log_exc("DummyImporter: failed to import slice")
304 # xxx update the record ...
305 self.logger.warning ("Slice update not yet implemented")
307 # record current users affiliated with the slice
308 slice_record.reg_researchers = \
309 [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
311 slice_record.stale=False
313 ### remove stale records
314 # special records must be preserved
315 system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
316 for record in all_records:
317 if record.hrn in system_hrns:
319 if record.peer_authority:
322 for record in all_records:
323 try: stale=record.stale
326 self.logger.warning("stale not found with %s"%record)
328 self.logger.info("DummyImporter: deleting stale record: %s" % record)
329 dbsession.delete(record)