7 # Import PLC records into the SFA database. It is indended that this tool be
8 # run once to create SFA records that reflect the current state of the
11 # The import tool assumes that the existing PLC hierarchy should all be part
12 # of "planetlab.us" (see the root_auth and level1_auth variables below).
14 # Public keys are extracted from the users' SSH keys automatically and used to
15 # create GIDs. This is relatively experimental as a custom tool had to be
16 # written to perform conversion from SSH to OpenSSL format. It only supports
17 # RSA keys at this time, not DSA keys.
23 import logging.handlers
25 from sfa.util.record import *
26 from sfa.util.table import SfaTable
27 from sfa.util.namespace import *
28 from sfa.util.config import Config
29 from sfa.trust.certificate import convert_public_key, Keypair
30 from sfa.trust.trustedroot import *
31 from sfa.trust.hierarchy import *
32 from sfa.plc.api import *
33 from sfa.trust.gid import create_uuid
34 from sfa.plc.sfaImport import *
35 from sfa.util.report import trace, error
37 def process_options():
40 (options, args) = getopt.getopt(sys.argv[1:], '', [])
46 def load_keys(filename):
50 execfile(filename, tmp_dict)
51 if 'keys' in tmp_dict:
52 keys = tmp_dict['keys']
57 def save_keys(filename, keys):
58 f = open(filename, 'w')
59 f.write("keys = %s" % str(keys))
64 LOGFILE='/var/log/sfa_import_plc.log'
65 logging.basicConfig(level=logging.INFO,
66 format='%(asctime)s - %(message)s',
68 rotate_handler = logging.handlers.RotatingFileHandler(LOGFILE, maxBytes=1000000, backupCount=5)
69 logger = logging.getLogger()
70 logger.addHandler(rotate_handler)
74 if not config.SFA_REGISTRY_ENABLED:
76 root_auth = config.SFA_REGISTRY_ROOT_AUTH
77 level1_auth = config.SFA_REGISTRY_LEVEL1_AUTH
78 keys_filename = config.config_path + os.sep + 'person_keys.py'
79 sfaImporter = sfaImport(logger)
80 shell = sfaImporter.shell
81 plc_auth = sfaImporter.plc_auth
82 AuthHierarchy = sfaImporter.AuthHierarchy
83 TrustedRoots = sfaImporter.TrustedRoots
85 if not table.exists():
88 if not level1_auth or level1_auth in ['']:
92 sfaImporter.create_top_level_auth_records(root_auth)
93 import_auth = root_auth
95 if not AuthHierarchy.auth_exists(level1_auth):
96 AuthHierarchy.create_auth(level1_auth)
97 sfaImporter.create_top_level_auth_records(level1_auth)
98 import_auth = level1_auth
100 trace("Import: adding " + import_auth + " to trusted list", logger)
101 authority = AuthHierarchy.get_auth_info(import_auth)
102 TrustedRoots.add_gid(authority.get_gid_object())
104 if ".vini" in import_auth and import_auth.endswith('vini'):
105 # create a fake internet2 site first
106 i2site = {'name': 'Internet2', 'abbreviated_name': 'I2',
107 'login_base': 'internet2', 'site_id': -1}
108 sfaImporter.import_site(import_auth, i2site)
110 # create dict of all existing sfa records
111 existing_records = {}
115 results = table.find()
116 for result in results:
117 existing_records[(result['hrn'], result['type'])] = result
118 existing_hrns.append(result['hrn'])
121 sites = shell.GetSites(plc_auth, {'peer_id': None})
124 sites_dict[site['login_base']] = site
127 persons = shell.GetPersons(plc_auth, {'peer_id': None}, ['person_id', 'email', 'key_ids', 'site_ids'])
129 for person in persons:
130 persons_dict[person['person_id']] = person
131 key_ids.extend(person['key_ids'])
133 # Get all public keys
134 keys = shell.GetKeys(plc_auth, {'peer_id': None, 'key_id': key_ids})
137 keys_dict[key['key_id']] = key['key']
139 # create a dict of person keys keyed on key_id
140 old_person_keys = load_keys(keys_filename)
141 for person in persons:
143 for key_id in person['key_ids']:
144 pubkeys.append(keys_dict[key_id])
145 person_keys[person['person_id']] = pubkeys
148 nodes = shell.GetNodes(plc_auth, {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
151 nodes_dict[node['node_id']] = node
154 slices = shell.GetSlices(plc_auth, {'peer_id': None}, ['slice_id', 'name'])
157 slices_dict[slice['slice_id']] = slice
161 site_hrn = import_auth + "." + site['login_base']
162 # import if hrn is not in list of existing hrns or if the hrn exists
163 # but its not a site record
164 if site_hrn not in existing_hrns or \
165 (site_hrn, 'authority') not in existing_records:
166 site_hrn = sfaImporter.import_site(import_auth, site)
168 # import node records
169 for node_id in site['node_ids']:
170 if node_id not in nodes_dict:
172 node = nodes_dict[node_id]
173 hrn = hostname_to_hrn(import_auth, site['login_base'], node['hostname'])
174 if hrn not in existing_hrns or \
175 (hrn, 'node') not in existing_records:
176 sfaImporter.import_node(site_hrn, node)
179 for slice_id in site['slice_ids']:
180 if slice_id not in slices_dict:
182 slice = slices_dict[slice_id]
183 hrn = slicename_to_hrn(import_auth, slice['name'])
184 if hrn not in existing_hrns or \
185 (hrn, 'slice') not in existing_records:
186 sfaImporter.import_slice(site_hrn, slice)
189 for person_id in site['person_ids']:
190 if person_id not in persons_dict:
192 person = persons_dict[person_id]
193 hrn = email_to_hrn(site_hrn, person['email'])
196 if person_id in old_person_keys:
197 old_keys = old_person_keys[person_id]
198 if person_id in person_keys:
199 new_keys = person_keys[person_id]
200 update_record = False
202 if key not in old_keys:
205 if hrn not in existing_hrns or \
206 (hrn, 'user') not in existing_records or update_record:
207 sfaImporter.import_person(site_hrn, person)
209 # remove stale records
210 for (record_hrn, type) in existing_records.keys():
211 record = existing_records[(record_hrn, type)]
212 # if this is the interface name dont do anything
213 if record_hrn == import_auth or record['peer_authority']:
215 # dont delete vini's internet2 placeholdder record
216 # normally this would be deleted becuase it does not have a plc record
217 if ".vini" in import_auth and import_auth.endswith('vini') and \
218 record_hrn.endswith("internet2"):
223 if type == 'authority':
225 site_hrn = import_auth + "." + site['login_base']
226 if site_hrn == record_hrn and site['site_id'] == record['pointer']:
231 login_base = get_leaf(get_authority(record_hrn))
232 username = get_leaf(record_hrn)
233 if login_base in sites_dict:
234 site = sites_dict[login_base]
235 for person in persons:
236 tmp_username = person['email'].split("@")[0]
237 alt_username = person['email'].split("@")[0].replace(".", "_")
238 if username in [tmp_username, alt_username] and \
239 site['site_id'] in person['site_ids'] and \
240 person['person_id'] == record['pointer']:
244 elif type == 'slice':
245 slicename = hrn_to_pl_slicename(record_hrn)
247 if slicename == slice['name'] and \
248 slice['slice_id'] == record['pointer']:
253 login_base = get_leaf(get_authority(record_hrn))
254 nodename = get_leaf(record_hrn)
255 if login_base in sites_dict:
256 site = sites_dict[login_base]
258 tmp_nodename = node['hostname'].split(".")[0]
259 if tmp_nodename == nodename and \
260 node['site_id'] == site['site_id'] and \
261 node['node_id'] == record['pointer']:
268 record_object = existing_records[(record_hrn, type)]
269 sfaImporter.delete_record(record_hrn, type)
272 trace('Import: saving current pub keys', logger)
273 save_keys(keys_filename, person_keys)
275 if __name__ == "__main__":