7 # Import PLC records into the SFA database. It is indended that this tool be
8 # run once to create SFA records that reflect the current state of the
11 # The import tool assumes that the existing PLC hierarchy should all be part
12 # of "planetlab.us" (see the root_auth and level1_auth variables below).
14 # Public keys are extracted from the users' SSH keys automatically and used to
15 # create GIDs. This is relatively experimental as a custom tool had to be
16 # written to perform conversion from SSH to OpenSSL format. It only supports
17 # RSA keys at this time, not DSA keys.
23 import logging.handlers
25 from sfa.util.record import *
26 from sfa.util.table import SfaTable
27 from sfa.util.namespace import *
28 from sfa.util.config import Config
29 from sfa.trust.certificate import convert_public_key, Keypair
30 from sfa.trust.trustedroot import *
31 from sfa.trust.hierarchy import *
32 from sfa.plc.api import *
33 from sfa.trust.gid import create_uuid
34 from sfa.plc.sfaImport import *
35 from sfa.util.report import trace, error
37 def process_options():
40 (options, args) = getopt.getopt(sys.argv[1:], '', [])
46 def load_keys(filename):
50 execfile(filename, tmp_dict)
51 if 'keys' in tmp_dict:
52 keys = tmp_dict['keys']
57 def save_keys(filename, keys):
58 f = open(filename, 'w')
59 f.write("keys = %s" % str(keys))
64 LOGFILE='/var/log/sfa_import_plc.log'
65 logging.basicConfig(level=logging.INFO,
66 format='%(asctime)s - %(message)s',
68 rotate_handler = logging.handlers.RotatingFileHandler(LOGFILE, maxBytes=1000000, backupCount=5)
69 logger = logging.getLogger()
70 logger.addHandler(rotate_handler)
74 if not config.SFA_REGISTRY_ENABLED:
76 root_auth = config.SFA_REGISTRY_ROOT_AUTH
77 level1_auth = config.SFA_REGISTRY_LEVEL1_AUTH
78 keys_filename = config.config_path + os.sep + 'person_keys.py'
79 sfaImporter = sfaImport(logger)
80 shell = sfaImporter.shell
81 plc_auth = sfaImporter.plc_auth
82 AuthHierarchy = sfaImporter.AuthHierarchy
83 TrustedRoots = sfaImporter.TrustedRoots
86 if not table.exists():
89 if not level1_auth or level1_auth in ['']:
93 sfaImporter.create_top_level_auth_records(root_auth)
94 import_auth = root_auth
96 if not AuthHierarchy.auth_exists(level1_auth):
97 AuthHierarchy.create_auth(level1_auth)
98 sfaImporter.create_top_level_auth_records(level1_auth)
99 import_auth = level1_auth
101 trace("Import: adding " + import_auth + " to trusted list", logger)
102 authority = AuthHierarchy.get_auth_info(import_auth)
103 TrustedRoots.add_gid(authority.get_gid_object())
105 if ".vini" in import_auth and import_auth.endswith('vini'):
106 # create a fake internet2 site first
107 i2site = {'name': 'Internet2', 'abbreviated_name': 'I2',
108 'login_base': 'internet2', 'site_id': -1}
109 sfaImporter.import_site(import_auth, i2site)
111 # create dict of all existing sfa records
112 existing_records = {}
116 results = table.find()
117 for result in results:
118 existing_records[(result['hrn'], result['type'])] = result
119 existing_hrns.append(result['hrn'])
122 sites = shell.GetSites(plc_auth, {'peer_id': None})
125 sites_dict[site['login_base']] = site
128 persons = shell.GetPersons(plc_auth, {'peer_id': None}, ['person_id', 'email', 'key_ids', 'site_ids'])
130 for person in persons:
131 persons_dict[person['person_id']] = person
132 key_ids.extend(person['key_ids'])
134 # Get all public keys
135 keys = shell.GetKeys(plc_auth, {'peer_id': None, 'key_id': key_ids})
138 keys_dict[key['key_id']] = key['key']
140 # create a dict of person keys keyed on key_id
141 old_person_keys = load_keys(keys_filename)
142 for person in persons:
144 for key_id in person['key_ids']:
145 pubkeys.append(keys_dict[key_id])
146 person_keys[person['person_id']] = pubkeys
149 nodes = shell.GetNodes(plc_auth, {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
152 nodes_dict[node['node_id']] = node
155 slices = shell.GetSlices(plc_auth, {'peer_id': None}, ['slice_id', 'name'])
158 slices_dict[slice['slice_id']] = slice
161 site_hrn = import_auth + "." + site['login_base']
162 print "Importing site: %s" % site_hrn
164 # import if hrn is not in list of existing hrns or if the hrn exists
165 # but its not a site record
166 if site_hrn not in existing_hrns or \
167 (site_hrn, 'authority') not in existing_records:
168 site_hrn = sfaImporter.import_site(import_auth, site)
170 # import node records
171 for node_id in site['node_ids']:
172 if node_id not in nodes_dict:
174 node = nodes_dict[node_id]
175 hrn = hostname_to_hrn(import_auth, site['login_base'], node['hostname'])
176 if hrn not in existing_hrns or \
177 (hrn, 'node') not in existing_records:
178 sfaImporter.import_node(site_hrn, node)
181 for slice_id in site['slice_ids']:
182 if slice_id not in slices_dict:
184 slice = slices_dict[slice_id]
185 hrn = slicename_to_hrn(import_auth, slice['name'])
186 if hrn not in existing_hrns or \
187 (hrn, 'slice') not in existing_records:
188 sfaImporter.import_slice(site_hrn, slice)
191 for person_id in site['person_ids']:
192 if person_id not in persons_dict:
194 person = persons_dict[person_id]
195 hrn = email_to_hrn(site_hrn, person['email'])
198 if person_id in old_person_keys:
199 old_keys = old_person_keys[person_id]
200 if person_id in person_keys:
201 new_keys = person_keys[person_id]
202 update_record = False
204 if key not in old_keys:
207 if hrn not in existing_hrns or \
208 (hrn, 'user') not in existing_records or update_record:
209 sfaImporter.import_person(site_hrn, person)
211 # remove stale records
212 for (record_hrn, type) in existing_records.keys():
213 record = existing_records[(record_hrn, type)]
214 # if this is the interface name dont do anything
215 if record_hrn == import_auth or record['peer_authority']:
217 # dont delete vini's internet2 placeholdder record
218 # normally this would be deleted becuase it does not have a plc record
219 if ".vini" in import_auth and import_auth.endswith('vini') and \
220 record_hrn.endswith("internet2"):
225 if type == 'authority':
227 site_hrn = import_auth + "." + site['login_base']
228 if site_hrn == record_hrn and site['site_id'] == record['pointer']:
233 login_base = get_leaf(get_authority(record_hrn))
234 username = get_leaf(record_hrn)
235 if login_base in sites_dict:
236 site = sites_dict[login_base]
237 for person in persons:
238 tmp_username = person['email'].split("@")[0]
239 alt_username = person['email'].split("@")[0].replace(".", "_")
240 if username in [tmp_username, alt_username] and \
241 site['site_id'] in person['site_ids'] and \
242 person['person_id'] == record['pointer']:
246 elif type == 'slice':
247 slicename = hrn_to_pl_slicename(record_hrn)
249 if slicename == slice['name'] and \
250 slice['slice_id'] == record['pointer']:
255 login_base = get_leaf(get_authority(record_hrn))
256 nodename = get_leaf(record_hrn)
257 if login_base in sites_dict:
258 site = sites_dict[login_base]
260 tmp_nodename = node['hostname'].split(".")[0]
261 if tmp_nodename == nodename and \
262 node['site_id'] == site['site_id'] and \
263 node['node_id'] == record['pointer']:
270 record_object = existing_records[(record_hrn, type)]
271 sfaImporter.delete_record(record_hrn, type)
274 trace('Import: saving current pub keys', logger)
275 save_keys(keys_filename, person_keys)
277 if __name__ == "__main__":