d274b279ff35057a852d2aa379e3c4336d1b2b91
[sfa.git] / sfa / importer / dummyimporter.py
1 #
2 # Dummy importer
3
4 # requirements
5
6 # read the planetlab database and update the local registry database accordingly
7 # so we update the following collections
8 # . authorities                 (from pl sites)
9 # . node                        (from pl nodes)
10 # . users+keys                  (from pl persons and attached keys)
11 #                       known limitation : *one* of the ssh keys is chosen at random here
12 #                       xxx todo/check xxx at the very least, when a key is known to the registry 
13 #                       and is still current in plc
14 #                       then we should definitely make sure to keep that one in sfa...
15 # . slice+researchers           (from pl slices and attached users)
16
17
18 import os
19
20 from sfa.util.config import Config
21 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
22
23 from sfa.trust.gid import create_uuid    
24 from sfa.trust.certificate import convert_public_key, Keypair
25
26 # using global alchemy.session() here is fine 
27 # as importer is on standalone one-shot process
28 from sfa.storage.alchemy import global_dbsession
29 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
30
31 from sfa.dummy.dummyshell import DummyShell    
32 from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
33
34 def _get_site_hrn(interface_hrn, site):
35      hrn = ".".join([interface_hrn, site['name']])
36      return hrn
37
38
39 class DummyImporter:
40
41     def __init__ (self, auth_hierarchy, logger):
42         self.auth_hierarchy = auth_hierarchy
43         self.logger=logger
44
45     def add_options (self, parser):
46         # we don't have any options for now
47         pass
48
49     # hrn hash is initialized from current db
50     # remember just-created records as we go
51     # xxx might make sense to add a UNIQUE constraint in the db itself
52     def remember_record_by_hrn (self, record):
53         tuple = (record.type, record.hrn)
54         if tuple in self.records_by_type_hrn:
55             self.logger.warning ("DummyImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
56             return
57         self.records_by_type_hrn [ tuple ] = record
58
59     # ditto for pointer hash
60     def remember_record_by_pointer (self, record):
61         if record.pointer == -1:
62             self.logger.warning ("DummyImporter.remember_record_by_pointer: pointer is void")
63             return
64         tuple = (record.type, record.pointer)
65         if tuple in self.records_by_type_pointer:
66             self.logger.warning ("DummyImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
67             return
68         self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
69
70     def remember_record (self, record):
71         self.remember_record_by_hrn (record)
72         self.remember_record_by_pointer (record)
73
74     def locate_by_type_hrn (self, type, hrn):
75         return self.records_by_type_hrn.get ( (type, hrn), None)
76
77     def locate_by_type_pointer (self, type, pointer):
78         return self.records_by_type_pointer.get ( (type, pointer), None)
79
80     # a convenience/helper function to see if a record is already known
81     # a former, broken, attempt (in 2.1-9) had been made 
82     # to try and use 'pointer' as a first, most significant attempt
83     # the idea being to preserve stuff as much as possible, and thus 
84     # to avoid creating a new gid in the case of a simple hrn rename
85     # however this of course doesn't work as the gid depends on the hrn...
86     #def locate (self, type, hrn=None, pointer=-1):
87     #    if pointer!=-1:
88     #        attempt = self.locate_by_type_pointer (type, pointer)
89     #        if attempt : return attempt
90     #    if hrn is not None:
91     #        attempt = self.locate_by_type_hrn (type, hrn,)
92     #        if attempt : return attempt
93     #    return None
94
95     # this makes the run method a bit abtruse - out of the way
96
97     def run (self, options):
98         config = Config ()
99         interface_hrn = config.SFA_INTERFACE_HRN
100         root_auth = config.SFA_REGISTRY_ROOT_AUTH
101         shell = DummyShell (config)
102
103         ######## retrieve all existing SFA objects
104         all_records = global_dbsession.query(RegRecord).all()
105
106         # create hash by (type,hrn) 
107         # we essentially use this to know if a given record is already known to SFA 
108         self.records_by_type_hrn = \
109             dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
110         # create hash by (type,pointer) 
111         self.records_by_type_pointer = \
112             dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
113                      if record.pointer != -1] )
114
115         # initialize record.stale to True by default, then mark stale=False on the ones that are in use
116         for record in all_records: record.stale=True
117
118         ######## retrieve Dummy TB data
119         # Get all plc sites
120         # retrieve only required stuf
121         sites = [shell.GetTestbedInfo()]
122         # create a hash of sites by login_base
123 #        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
124         # Get all dummy TB users
125         users = shell.GetUsers()
126         # create a hash of users by user_id
127         users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
128         # Get all dummy TB public keys
129         keys = []
130         for user in users:
131             if 'keys' in user:
132                 keys.extend(user['keys'])
133         # create a dict user_id -> [ keys ]
134         keys_by_person_id = {} 
135         for user in users:
136              if 'keys' in user:
137                  keys_by_person_id[user['user_id']] = user['keys']
138         # Get all dummy TB nodes  
139         nodes = shell.GetNodes()
140         # create hash by node_id
141         nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
142         # Get all dummy TB slices
143         slices = shell.GetSlices()
144         # create hash by slice_id
145         slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
146
147
148         # start importing 
149         for site in sites:
150             site_hrn = _get_site_hrn(interface_hrn, site)
151             # import if hrn is not in list of existing hrns or if the hrn exists
152             # but its not a site record
153             site_record=self.locate_by_type_hrn ('authority', site_hrn)
154             if not site_record:
155                 try:
156                     urn = hrn_to_urn(site_hrn, 'authority')
157                     if not self.auth_hierarchy.auth_exists(urn):
158                         self.auth_hierarchy.create_auth(urn)
159                     auth_info = self.auth_hierarchy.get_auth_info(urn)
160                     site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
161                                                pointer= -1,
162                                                authority=get_authority(site_hrn))
163                     site_record.just_created()
164                     global_dbsession.add(site_record)
165                     global_dbsession.commit()
166                     self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) 
167                     self.remember_record (site_record)
168                 except:
169                     # if the site import fails then there is no point in trying to import the
170                     # site's child records (node, slices, persons), so skip them.
171                     self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") 
172                     continue 
173             else:
174                 # xxx update the record ...
175                 pass
176             site_record.stale=False
177              
178             # import node records
179             for node in nodes:
180                 site_auth = get_authority(site_hrn)
181                 site_name = site['name']
182                 node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
183                 # xxx this sounds suspicious
184                 if len(node_hrn) > 64: node_hrn = node_hrn[:64]
185                 node_record = self.locate_by_type_hrn ( 'node', node_hrn )
186                 if not node_record:
187                     try:
188                         pkey = Keypair(create=True)
189                         urn = hrn_to_urn(node_hrn, 'node')
190                         node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
191                         node_record = RegNode (hrn=node_hrn, gid=node_gid, 
192                                                pointer =node['node_id'],
193                                                authority=get_authority(node_hrn))
194                         node_record.just_created()
195                         global_dbsession.add(node_record)
196                         global_dbsession.commit()
197                         self.logger.info("DummyImporter: imported node: %s" % node_record)  
198                         self.remember_record (node_record)
199                     except:
200                         self.logger.log_exc("DummyImporter: failed to import node") 
201                 else:
202                     # xxx update the record ...
203                     pass
204                 node_record.stale=False
205
206             site_pis=[]
207             # import users
208             for user in users:
209                 user_hrn = email_to_hrn(site_hrn, user['email'])
210                 # xxx suspicious again
211                 if len(user_hrn) > 64: user_hrn = user_hrn[:64]
212                 user_urn = hrn_to_urn(user_hrn, 'user')
213
214                 user_record = self.locate_by_type_hrn ( 'user', user_hrn)
215
216                 # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)
217
218                 def init_user_key (user):
219                     pubkey = None
220                     pkey = None
221                     if  user['keys']:
222                         # randomly pick first key in set
223                         for key in user['keys']:
224                              pubkey = key
225                              try:
226                                 pkey = convert_public_key(pubkey)
227                                 break
228                              except:
229                                 continue
230                         if not pkey:
231                             self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
232                             pkey = Keypair(create=True)
233                     else:
234                         # the user has no keys. Creating a random keypair for the user's gid
235                         self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
236                         pkey = Keypair(create=True)
237                     return (pubkey, pkey)
238
239                 # new user
240                 try:
241                     if not user_record:
242                         (pubkey,pkey) = init_user_key (user)
243                         user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
244                         user_gid.set_email(user['email'])
245                         user_record = RegUser (hrn=user_hrn, gid=user_gid, 
246                                                  pointer=user['user_id'], 
247                                                  authority=get_authority(user_hrn),
248                                                  email=user['email'])
249                         if pubkey: 
250                             user_record.reg_keys=[RegKey (pubkey)]
251                         else:
252                             self.logger.warning("No key found for user %s"%user_record)
253                         user_record.just_created()
254                         global_dbsession.add (user_record)
255                         global_dbsession.commit()
256                         self.logger.info("DummyImporter: imported person: %s" % user_record)
257                         self.remember_record ( user_record )
258
259                     else:
260                         # update the record ?
261                         # if user's primary key has changed then we need to update the 
262                         # users gid by forcing an update here
263                         sfa_keys = user_record.reg_keys
264                         def key_in_list (key,sfa_keys):
265                             for reg_key in sfa_keys:
266                                 if reg_key.key==key: return True
267                             return False
268                         # is there a new key in Dummy TB ?
269                         new_keys=False
270                         for key in user['keys']:
271                             if not key_in_list (key,sfa_keys):
272                                 new_keys = True
273                         if new_keys:
274                             (pubkey,pkey) = init_user_key (user)
275                             user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
276                             if not pubkey:
277                                 user_record.reg_keys=[]
278                             else:
279                                 user_record.reg_keys=[ RegKey (pubkey)]
280                             self.logger.info("DummyImporter: updated person: %s" % user_record)
281                     user_record.email = user['email']
282                     global_dbsession.commit()
283                     user_record.stale=False
284                 except:
285                     self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
286     
287
288             # import slices
289             for slice in slices:
290                 slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
291                 slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
292                 if not slice_record:
293                     try:
294                         pkey = Keypair(create=True)
295                         urn = hrn_to_urn(slice_hrn, 'slice')
296                         slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
297                         slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
298                                                  pointer=slice['slice_id'],
299                                                  authority=get_authority(slice_hrn))
300                         slice_record.just_created()
301                         global_dbsession.add(slice_record)
302                         global_dbsession.commit()
303                         self.logger.info("DummyImporter: imported slice: %s" % slice_record)  
304                         self.remember_record ( slice_record )
305                     except:
306                         self.logger.log_exc("DummyImporter: failed to import slice")
307                 else:
308                     # xxx update the record ...
309                     self.logger.warning ("Slice update not yet implemented")
310                     pass
311                 # record current users affiliated with the slice
312                 slice_record.reg_researchers = \
313                     [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
314                 global_dbsession.commit()
315                 slice_record.stale=False
316
317         ### remove stale records
318         # special records must be preserved
319         system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
320         for record in all_records: 
321             if record.hrn in system_hrns: 
322                 record.stale=False
323             if record.peer_authority:
324                 record.stale=False
325
326         for record in all_records:
327             try:        stale=record.stale
328             except:     
329                 stale=True
330                 self.logger.warning("stale not found with %s"%record)
331             if stale:
332                 self.logger.info("DummyImporter: deleting stale record: %s" % record)
333                 global_dbsession.delete(record)
334                 global_dbsession.commit()