3 from sfa.util.config import Config
4 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
5 from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
7 from sfa.trust.gid import create_uuid
8 from sfa.trust.certificate import convert_public_key, Keypair
10 from sfa.storage.alchemy import dbsession
11 from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
13 from sfa.plc.plshell import PlShell
15 def _get_site_hrn(interface_hrn, site):
16 # Hardcode 'internet2' into the hrn for sites hosting
17 # internet2 nodes. This is a special operation for some vini
19 hrn = ".".join([interface_hrn, site['login_base']])
20 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
21 if site['login_base'].startswith("i2") or site['login_base'].startswith("nlr"):
22 hrn = ".".join([interface_hrn, "internet2", site['login_base']])
28 def __init__ (self, auth_hierarchy, logger):
29 self.auth_hierarchy = auth_hierarchy
32 def add_options (self, parser):
33 # we don't have any options for now
36 # this makes the run method a bit abtruse - out of the way
37 def create_special_vini_record (self, interface_hrn):
38 # special case for vini
39 if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
40 # create a fake internet2 site first
41 i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
42 site_hrn = _get_site_hrn(interface_hrn, i2site)
43 # import if hrn is not in list of existing hrns or if the hrn exists
44 # but its not a site record
45 if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
46 urn = hrn_to_urn(site_hrn, 'authority')
47 if not self.auth_hierarchy.auth_exists(urn):
48 self.auth_hierarchy.create_auth(urn)
49 auth_info = self.auth_hierarchy.get_auth_info(urn)
50 auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
51 pointer=site['site_id'],
52 authority=get_authority(site_hrn))
53 auth_record.just_created()
54 dbsession.add(auth_record)
56 self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
58 def locate_by_type_hrn (self, type, hrn):
59 return self.records_by_type_hrn.get ( (type, hrn), None)
61 def run (self, options):
63 interface_hrn = config.SFA_INTERFACE_HRN
64 root_auth = config.SFA_REGISTRY_ROOT_AUTH
65 shell = PlShell (config)
67 ######## retrieve all existing SFA objects
68 all_records = dbsession.query(RegRecord).all()
70 # create indexes / hashes by (type,hrn)
71 self.records_by_type_hrn = dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
72 # and by (type,pointer)
73 # the idea was to try and retrieve an sfa record from the pointer data,
74 # in case the plc name (e.g. email address) has changed
75 # and the new hrn can't be located in the current sfa records
76 # however it sounds like it's eventually safer to just create a new sfa record
77 # and let the old one get garbage-collected with stale records
78 # self.records_by_type_pointer = \
79 # dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1 ] )
81 # initialize record.stale to True by default, then mark stale=False on the ones that are in use
82 for record in all_records: record.stale=True
84 ######## retrieve PLC data
86 # retrieve only required stuf
87 sites = shell.GetSites({'peer_id': None, 'enabled' : True},
88 ['site_id','login_base','node_ids','slice_ids','person_ids',])
89 # create a hash of sites by login_base
90 sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
92 persons = shell.GetPersons({'peer_id': None, 'enabled': True},
93 ['person_id', 'email', 'key_ids', 'site_ids'])
94 # create a hash of persons by person_id
95 persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
96 # Get all plc public keys
97 # accumulate key ids for keys retrieval
99 for person in persons:
100 key_ids.extend(person['key_ids'])
101 keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids} )
102 # create a hash of keys by key_id
103 keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] )
104 # create a dict person_id -> [ (plc)keys ]
105 keys_by_person_id = {}
106 for person in persons:
108 for key_id in person['key_ids']:
109 pubkeys.append(keys_by_id[key_id])
110 keys_by_person_id[person['person_id']] = pubkeys
112 nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
113 # create hash by node_id
114 nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
116 slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name'])
117 # create hash by slice_id
118 slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
120 # isolate special vini case in separate method
121 self.create_special_vini_record (interface_hrn)
125 site_hrn = _get_site_hrn(interface_hrn, site)
127 # import if hrn is not in list of existing hrns or if the hrn exists
128 # but its not a site record
129 site_record=self.locate_by_type_hrn ('authority', site_hrn)
132 urn = hrn_to_urn(site_hrn, 'authority')
133 if not self.auth_hierarchy.auth_exists(urn):
134 self.auth_hierarchy.create_auth(urn)
135 auth_info = self.auth_hierarchy.get_auth_info(urn)
136 site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
137 pointer=site['site_id'],
138 authority=get_authority(site_hrn))
139 site_record.just_created()
140 dbsession.add(site_record)
142 self.logger.info("PlImporter: imported authority (site) : %s" % site_record)
144 # if the site import fails then there is no point in trying to import the
145 # site's child records (node, slices, persons), so skip them.
146 self.logger.log_exc("PlImporter: failed to import site. Skipping child records")
148 site_record.stale=False
150 # import node records
151 for node_id in site['node_ids']:
153 node = nodes_by_id[node_id]
155 self.logger.warning ("PlImporter: cannot locate node_id %s - ignored"%node_id)
157 site_auth = get_authority(site_hrn)
158 site_name = site['login_base']
159 hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
160 # xxx this sounds suspicious
161 if len(hrn) > 64: hrn = hrn[:64]
162 node_record = self.locate_by_type_hrn ( 'node', hrn )
165 pkey = Keypair(create=True)
166 urn = hrn_to_urn(hrn, 'node')
167 node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
168 node_record = RegNode (hrn=hrn, gid=node_gid,
169 pointer =node['node_id'],
170 authority=get_authority(hrn))
171 node_record.just_created()
172 dbsession.add(node_record)
174 self.logger.info("PlImporter: imported node: %s" % node_record)
176 self.logger.log_exc("PlImporter: failed to import node")
177 node_record.stale=False
180 for slice_id in site['slice_ids']:
182 slice = slices_by_id[slice_id]
184 self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
185 slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
186 slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
189 pkey = Keypair(create=True)
190 urn = hrn_to_urn(slice_hrn, 'slice')
191 slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
192 slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
193 pointer=slice['slice_id'],
194 authority=get_authority(slice_hrn))
195 slice_record.just_created()
196 dbsession.add(slice_record)
198 self.logger.info("PlImporter: imported slice: %s" % slice_record)
200 self.logger.log_exc("PlImporter: failed to import slice")
201 slice_record.stale=False
204 for person_id in site['person_ids']:
206 person = persons_by_id[person_id]
208 self.logger.warning ("PlImporter: cannot locate person_id %s - ignored"%person_id)
209 person_hrn = email_to_hrn(site_hrn, person['email'])
210 # xxx suspicious again
211 if len(person_hrn) > 64: person_hrn = person_hrn[:64]
213 person_record = self.locate_by_type_hrn( 'user', person_hrn)
215 # if not person_record:
216 # person_record = self.records_by_type_pointer.get ( ('user', person_id,) )
217 # if user's primary key has changed then we need to update the
218 # users gid by forcing an update here
222 sfa_keys = person_record.reg_keys
223 if person_id in keys_by_person_id:
224 plc_keys = keys_by_person_id[person_id]
225 update_record = False
226 def key_in_list (key,sfa_keys):
227 for reg_key in sfa_keys:
228 if reg_key.key==key['key']: return True
231 if not key_in_list (key,sfa_keys):
234 if not person_record or update_record:
237 if 'key_ids' in person and person['key_ids']:
238 # randomly pick first key in set
241 pkey = convert_public_key(pubkey['key'])
243 self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
244 pkey = Keypair(create=True)
246 # the user has no keys. Creating a random keypair for the user's gid
247 self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
248 pkey = Keypair(create=True)
249 urn = hrn_to_urn(person_hrn, 'user')
250 person_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
252 person_record.gid=person_gid
253 if pubkey: person_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
254 self.logger.info("PlImporter: updated person: %s" % person_record)
256 person_record = RegUser (hrn=person_hrn, gid=person_gid,
257 pointer=person['person_id'],
258 authority=get_authority(person_hrn),
259 email=person['email'])
261 person_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
263 self.logger.warning("No key found for user %s"%person_record)
264 dbsession.add (person_record)
266 self.logger.info("PlImporter: imported person: %s" % person_record)
268 self.logger.log_exc("PlImporter: failed to import person %s"%person_id)
269 person_record.stale=False
271 ### remove stale records
272 # special records must be preserved
273 system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
274 for record in all_records:
275 if record.hrn in system_hrns:
277 if record.peer_authority:
279 if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
280 record.hrn.endswith("internet2"):
283 for record in all_records:
284 try: stale=record.stale
287 self.logger.warning("stale not found with %s"%record)
289 self.logger.info("PlImporter: deleting stale record: %s" % record)
290 dbsession.delete(record)