From 19d4a01ccf66af9e00914351b3eacd5fc880f988 Mon Sep 17 00:00:00 2001 From: Thierry Parmentelat Date: Sun, 18 Jun 2017 23:44:14 +0200 Subject: [PATCH] ==== RefreshPeer reviewed . disabled remote persons don't get created locally . remote nodes referenced in slices but not exposed are sanitized (ignored) from GetPeerData it remains to check if slices that span several myplc's get created as expected ==== new method DeleteALlPeerEntries() for cleaning up --- PLC/Methods/DeleteAllPeerEntries.py | 105 +++++++++ PLC/Methods/RefreshPeer.py | 331 ++++++++++++++++++---------- PLC/Nodes.py | 1 + 3 files changed, 315 insertions(+), 122 deletions(-) create mode 100644 PLC/Methods/DeleteAllPeerEntries.py diff --git a/PLC/Methods/DeleteAllPeerEntries.py b/PLC/Methods/DeleteAllPeerEntries.py new file mode 100644 index 0000000..5499ba8 --- /dev/null +++ b/PLC/Methods/DeleteAllPeerEntries.py @@ -0,0 +1,105 @@ +# +# Thierry Parmentelat - INRIA +# +# utility to clear all entries from a peer +# initially duplicated from RefreshPeer +# + +import sys + +from PLC.Logger import logger +from PLC.Faults import * +from PLC.Method import Method +from PLC.Parameter import Parameter, Mixed +from PLC.Auth import Auth + +from PLC.Peers import Peer, Peers +from PLC.Sites import Site, Sites +from PLC.Persons import Person, Persons +from PLC.KeyTypes import KeyType, KeyTypes +from PLC.Keys import Key, Keys +from PLC.BootStates import BootState, BootStates +from PLC.Nodes import Node, Nodes +from PLC.SliceInstantiations import SliceInstantiations +from PLC.Slices import Slice, Slices +from PLC.Roles import Role, Roles + +commit_mode = True + +dry_run = False +# debug +#dry_run = True + +########## helpers + +def message(to_print=None, verbose_only=False): + if verbose_only and not verbose: + return + logger.info(to_print) + + +def message_verbose(to_print=None, header='VERBOSE'): + message("{}> {}".format(header, to_print), verbose_only=True) + + +class DeleteAllPeerEntries(Method): + """ + This method is designed for situations where a federation link + is misbehaving and one wants to restart from a clean slate. + It is *not* designed for regular operations, but as a repairing + tool only. + + As the name suggests, clear all local entries that are marked as + belonging to peer peer_id - or peername + if verbose is True said entries are only printed + + Note that remote/foreign entities cannot be deleted + normally with the API + + Returns 1 if successful, faults otherwise. + """ + + roles = ['admin'] + + accepts = [ + Auth(), + Mixed(Peer.fields['peer_id'], + Peer.fields['peername']), + ] + + returns = Parameter(int, "1 if successful") + + def call(self, auth, peer_id_or_peername): + + peer = Peers(self.api, [peer_id_or_peername])[0] + peer_id = peer['peer_id'] + peername = peer['peername'] + + logger.info("DeleteAllPeerEntries on peer {} = {}" + .format(peername, peer_id)) + for singular, plural in ( + (Slice, Slices), + (Key, Keys), + (Person, Persons), + (Node, Nodes), + (Site, Sites)): + classname = singular.__name__ + objs = plural(self.api, {'peer_id': peer_id}) + print("Found {len} {classname}s from peer {peername}" + .format(len=len(objs), + classname=classname, + peername=peername)) + if dry_run: + print("dry-run mode: skipping actual deletion") + else: + print("Deleting {classname}s".format(classname=classname)) + for obj in objs: + print '.', + sys.stdout.flush() + obj.delete(commit=commit_mode) + print + + # Update peer itself and commit + peer.sync(commit=True) + + return 1 diff --git a/PLC/Methods/RefreshPeer.py b/PLC/Methods/RefreshPeer.py index be3fc24..7af8569 100644 --- a/PLC/Methods/RefreshPeer.py +++ b/PLC/Methods/RefreshPeer.py @@ -39,21 +39,23 @@ compatibility = True # debugging # for verbose output verbose = False -# set to a filename for using cached data when debugging -# WARNING: does not actually connect to the peer in this case use_cache = None # for debugging specific entries - display detailed info on selected objs focus_type = None # set to e.g. 'Person' # set to a list of ids (e.g. person_ids) - remote or local ids should work focus_ids = [] -# example -# use_cache="/var/log/peers/getpeerdata.pickle" -# verbose=True -# focus_type='Person' -# focus_ids=[621,1088] +# DEBUGGING SETTINGS example +# set to a filename for using cached data when debugging +# WARNING: does *not* actually connect to the peer in this case +# or more precisely, connect only if the file is not found +# i.e. the first time +# use_cache = "/var/log/peers/getpeerdata.json" +# verbose = True +# focus_type = 'Person' +# focus_ids = [621, 1088] -# helpers +########## helpers def message(to_print=None, verbose_only=False): if verbose_only and not verbose: @@ -62,7 +64,7 @@ def message(to_print=None, verbose_only=False): def message_verbose(to_print=None, header='VERBOSE'): - message("%s> %r" % (header, to_print), verbose_only=True) + message("{}> {}".format(header, to_print), verbose_only=True) # to avoid several instances running at the same time @@ -82,13 +84,13 @@ class FileLock: try: os.unlink(self.fpath) except Exception, e: - message('FileLock.lock(%s) : %s' % (self.fpath, e)) + message('FileLock.lock({}) : {}'.format(self.fpath, e)) return False try: self.fd = open(self.fpath, 'w') fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError, e: - message('FileLock.lock(%s) : %s' % (self.fpath, e)) + message('FileLock.lock({}) : {}'.format(self.fpath, e)) return False return True @@ -97,7 +99,7 @@ class FileLock: fcntl.flock(self.fd, fcntl.LOCK_UN | fcntl.LOCK_NB) self.fd.close() except IOError, e: - message('FileLock.unlock(%s) : %s' % (self.fpath, e)) + message('FileLock.unlock({}) : {}'.format(self.fpath, e)) class RefreshPeer(Method): @@ -116,32 +118,57 @@ class RefreshPeer(Method): Peer.fields['peername']), ] - returns = Parameter(int, "1 if successful") - - ignore_site_fields = ['peer_id', 'peer_site_id', 'last_updated', 'date_created', - 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids'] - ignore_key_fields = ['peer_id', 'peer_key_id', 'person_id'] - ignore_person_fields = ['peer_id', 'peer_person_id', 'last_updated', 'date_created', - 'key_ids', 'slice_ids', 'person_tag_ids'] - ignore_node_fields = ['peer_id', 'peer_node_id', 'last_updated', 'last_contact', 'date_created', - 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids', 'pcu_ids', 'ports'] - ignore_slice_fields = ['peer_id', 'peer_slice_id', 'created', - 'person_ids', 'slice_tag_ids', 'node_ids', ] + returns = Parameter(dict, "various timers") + + # the columns ignored in the comparison - it is safe to ignore: + # (*) the primary key - obviously this is not expected to match + # (*) peer_id and the transcode key, likewise + # (*) all identifiers that refer to other objects in the db + # like e.g. Person.site_ids since this is managed separately + # and does not make sense any way + # (*) we also ignore things like date_created and last_updated + # that refer to local db creation/update + ignore_site_fields = [ + 'site_id', 'peer_id', 'peer_site_id', + 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids', 'site_tag_ids', + 'date_created', 'last_updated', + ] + ignore_key_fields = [ + 'key_id', 'peer_id', 'peer_key_id', + 'person_id', + ] + ignore_person_fields = [ + 'person_id', 'peer_id', 'peer_person_id', + 'key_ids', 'slice_ids', 'person_tag_ids', 'role_ids', 'roles', 'site_ids', + 'date_created', 'last_updated', + ] + ignore_node_fields = [ + 'node_id', 'peer_id', 'peer_node_id', + 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids', 'pcu_ids', 'ports', + 'date_created', 'last_updated', + # somehow those won't print in the ple db + 'last_download', 'last_contact', 'last_pcu_reboot', 'last_boot', + 'last_time_spent_offline', 'last_time_spent_online', 'last_pcu_confirmation', + ] + ignore_slice_fields = [ + 'slice_id', 'peer_id', 'peer_slice_id', + 'person_ids', 'slice_tag_ids', 'node_ids', + 'created', + ] def call(self, auth, peer_id_or_peername): ret_val = None peername = Peers(self.api, [peer_id_or_peername], [ 'peername'])[0]['peername'] - file_lock = FileLock("/tmp/refresh-peer-%s.lock" % peername) + file_lock = FileLock("/tmp/refresh-peer-{peername}.lock" + .format(peername=peername)) if not file_lock.lock(): raise Exception, "Another instance of RefreshPeer is running." try: ret_val = self.real_call(auth, peer_id_or_peername) except Exception, e: file_lock.unlock() - message("RefreshPeer caught exception - BEG") - import traceback - traceback.print_exc(file=log) + logger.exception("RefreshPeer caught exception - BEG") message("RefreshPeer caught exception - END") raise Exception, e file_lock.unlock() @@ -151,9 +178,10 @@ class RefreshPeer(Method): # Get peer peers = Peers(self.api, [peer_id_or_peername]) if not peers: - raise PLCInvalidArgument, "No such peer '%s'" % unicode(peer_id_or_peername) + raise PLCInvalidArgument, "No such peer '{}'".format(unicode(peer_id_or_peername)) peer = peers[0] peer_id = peer['peer_id'] + peername = peer['peername'] # Connect to peer API peer.connect() @@ -162,20 +190,52 @@ class RefreshPeer(Method): # Get peer data start = time.time() - message('RefreshPeer starting up (commit_mode=%r)' % commit_mode) + message('RefreshPeer starting up (commit_mode={})'.format(commit_mode)) if not use_cache: message('Issuing GetPeerData') peer_tables = peer.GetPeerData() else: - import pickle + import json if os.path.isfile(use_cache): message("use_cache: WARNING: using cached getpeerdata") - peer_tables = pickle.load(file(use_cache, 'rb')) + with open(use_cache) as storage: + peer_tables = json.load(storage) else: - message("use_cache: issuing getpeerdata") + message("use_cache: issuing GetPeerData") peer_tables = peer.GetPeerData() - message("use_cache: saving in cache %s", use_cache) - pickle.dump(peer_tables, file(use_cache, 'wb')) + message("use_cache: saving in cache {}".format(use_cache)) + with open(use_cache, 'w') as storage: + json.dump(peer_tables, storage) + + # additions in June 2017 + + # remove entries not marked as enabled + # actually the 'enabled' flag is present on 'Sites' and 'Persons' + # however we accept disabled slices as + # (*) they don't come and go too often + # (*) they may contain vlid nodes, that we would then lose + # if we were to discard those sites + # so bottom line, we filter out only disabled persons + for cls in ('Persons',) : + peer_tables[cls] = [ + obj for obj in peer_tables[cls] if obj['enabled'] + ] + + # somehow we can see GetPeerData from PLC that contains references + # to nodes that are not exposed themselves + # which suggests some inconsistency on their end + # anyway, it's safer to sanitize the dataset to avoid corruption + exposed_peer_node_ids = { n['node_id'] for n in peer_tables['Nodes']} + for slice in peer_tables['Slices']: + before = len(slice['node_ids']) + slice['node_ids'] = [x for x in slice['node_ids'] if x in exposed_peer_node_ids] + after = len(slice['node_ids']) + if after != before: + message("{peername} slice {slicename} got sanitized - {diff} node entries removed out of {before}" + .format(peername=peername, slicename=slice['name'], + diff=before-after, before=before)) + + # end of additions # for smooth federation with 4.2 - ignore fields that are useless # anyway, and rewrite boot_state @@ -193,8 +253,8 @@ class RefreshPeer(Method): del slice[key] timers['transport'] = time.time() - start - peer_tables['db_time'] timers['peer_db'] = peer_tables['db_time'] - message_verbose('GetPeerData returned -> db=%d transport=%d' % - (timers['peer_db'], timers['transport'])) + message_verbose('GetPeerData returned -> db={} transport={}' + .format(timers['peer_db'], timers['transport'])) def sync(objects, peer_objects, classobj, columns): """ @@ -215,8 +275,8 @@ class RefreshPeer(Method): if classname in secondary_keys: secondary_key = secondary_keys[classname] - message_verbose('Entering sync on %s (%s)' % - (classname, primary_key)) + message_verbose('Entering sync on {} ({})' + .format(classname, primary_key)) synced = {} @@ -224,63 +284,63 @@ class RefreshPeer(Method): for peer_object_id, object in objects.iteritems(): if peer_object_id not in peer_objects: object.delete(commit=commit_mode) - message("%s %s %s deleted" % - (peer['peername'], classname, object[primary_key])) + message("{} {} {} deleted" + .format(peername, classname, object[primary_key])) total = len(peer_objects) count = 1 - # peer_object_id, peer_object and object are dynamically bound in the loop below... + # peer_object_id, peer_object and object are open variables in the loop below... # (local) object might be None if creating a new one def in_focus(): if classname != focus_type: return False - return peer_object_id in focus_ids or \ - (object and primary_key in object and object[ - primary_key] in focus_ids) + return (peer_object_id in focus_ids) \ + or (object and (primary_key in object) + and (object[primary_key] in focus_ids)) def message_focus(message): if in_focus(): # always show remote - message_verbose("peer_obj : %d [[%r]]" % (peer_object_id, peer_object), + message_verbose("peer_obj : {} [[{}]]".format(peer_object_id, peer_object), header='FOCUS ' + message) # show local object if a match was found if object: - message_verbose("local_obj : <<%r>>" % (object), + message_verbose("local_obj : <<{}>>".format(object), header='FOCUS ' + message) - # the function to compare a local object with its cadidate peer obj + # the function to compare a local object with its candidate peer obj # xxx probably faster when compatibility is False... def equal_fields(object, peer_object, columns): # fast version: must use __eq__() instead of == since # peer_object may be a raw dict instead of a Peer object. if not compatibility: - return object.__eq__(peer_object) - elif not verbose: + result = object.__eq__(peer_object) + if not result: + message_verbose("fast mode: difference found between {} and {}" + .format(object, peer_object)) + return result + else: for column in columns: - # if in_focus(): message ('FOCUS comparing column %s'%column) if object[column] != peer_object[column]: + message_verbose("difference found in column {}".format(column)) + message_verbose("our object {}".format(object[column])) + message_verbose("remote object {}".format(peer_object[column])) return False return True - else: - result = True - for column in columns: - test = object[column] == peer_object[column] - if not test: - result = False - return result # Add/update new/existing objects for peer_object_id, peer_object in peer_objects.iteritems(): peer_object_name = "" if secondary_key: - peer_object_name = "(%s)" % peer_object[secondary_key] - message_verbose('%s peer_object_id=%d %s (%d/%d)' - % (classname, peer_object_id, peer_object_name, count, total)) + peer_object_name = "({})".format(peer_object[secondary_key]) + message_verbose('{} peer_object_id={} {} ({}/{})' + .format(classname, peer_object_id, peer_object_name, count, total)) count += 1 if peer_object_id in synced: - message("Warning: %s Skipping already added %s: %r" % ( - peer['peername'], classname, peer_object)) + message("Warning: {peername} Skipping already added {classname}: {obj}" + .format(peername=peername, + classname=classname, obj=peer_object)) continue if peer_object_id in objects: @@ -316,25 +376,29 @@ class RefreshPeer(Method): action = "added" if sync: - message_verbose("syncing %s %d - commit_mode=%r" - % (classname, peer_object_id, commit_mode)) + message_verbose("syncing {classname} {id} - commit_mode={mode}" + .format(classname=classname, + id=peer_object_id, mode=commit_mode)) try: object.sync(commit=commit_mode) except PLCInvalidArgument, err: - # Skip if validation fails # XXX Log an event instead of printing to logfile - message("Warning: %s Skipping invalid %s %r : %r" % - (peer['peername'], classname, peer_object, err)) + # skip if validation fails + message("Warning: {peername} Skipping invalid {classname} ({err})\n{object}" + .format(peername=peername, classname=classname, + object=peer_object, err=err)) continue synced[peer_object_id] = object if action: - message("%s: (%d/%d) %s %d %s %s" - % (peer['peername'], count, total, classname, - object[primary_key], peer_object_name, action)) + message("{peername}: ({count}/{total}) {classname} {primary} {name} {action}" + .format(peername=peername, + count=count, total=total, + classname=classname, primary=object[primary_key], + name=peer_object_name, action=action)) - message_verbose("Exiting sync on %s" % classname) + message_verbose("Exiting sync on {}".format(classname)) return synced @@ -358,7 +422,7 @@ class RefreshPeer(Method): start = time.time() - message('Dealing with Sites') + message('(1) Dealing with Sites') # Compare only the columns returned by the GetPeerData() call if peer_tables['Sites']: @@ -390,7 +454,7 @@ class RefreshPeer(Method): # XXX Synchronize foreign key types # - message('Dealing with Keys') + message('(2) Dealing with Keys') key_types = KeyTypes(self.api).dict() @@ -417,8 +481,8 @@ class RefreshPeer(Method): for peer_key_id, key in keys_at_peer.items(): if key['key_type'] not in key_types: # XXX Log an event instead of printing to logfile - message("Warning: Skipping invalid %s key %r" % - (peer['peername'], key)) + message("Warning: Skipping invalid {peername} key {key}" + .format(peername=peername, key=key)) del keys_at_peer[peer_key_id] continue @@ -440,7 +504,7 @@ class RefreshPeer(Method): start = time.time() - message('Dealing with Persons') + message('(3) Dealing with Persons') # Compare only the columns returned by the GetPeerData() call if peer_tables['Persons']: @@ -495,16 +559,17 @@ class RefreshPeer(Method): # Remove stale keys from user for key_id in (set(old_person_key_ids) - set(person_key_ids)): person.remove_key(peer_keys[key_id], commit=commit_mode) - message("%s Key %d removed from person %s" % - (peer['peername'], key_id, person['email'])) + message("{peername} Key {key_id} removed from person {email}" + .format(peername=peername, + key_id=key_id, email=person['email'])) # Add new keys to user for key_id in (set(person_key_ids) - set(old_person_key_ids)): - message("before add_key, passing person=%r" % person) - message("before add_key, passing key=%r" % peer_keys[key_id]) + #message("before add_key, passing person={}".format(person)) + #message("before add_key, passing key={}".format(peer_keys[key_id])) person.add_key(peer_keys[key_id], commit=commit_mode) - message("%s Key %d added into person %s" % - (peer['peername'], key_id, person['email'])) + message("{} Key {} added into person {}" + .format(peername, key_id, person['email'])) timers['persons'] = time.time() - start @@ -520,7 +585,8 @@ class RefreshPeer(Method): start = time.time() - message('Dealing with Nodes (1)') + # NOTE: we do import disabled sites + message('(4) Dealing with Nodes (1)') # Compare only the columns returned by the GetPeerData() call if peer_tables['Nodes']: @@ -539,13 +605,14 @@ class RefreshPeer(Method): for peer_node_id, node in nodes_at_peer.items(): errors = [] if node['site_id'] not in peer_sites: - errors.append("invalid site %d" % node['site_id']) + errors.append("invalid (or disabled) site {}".format(node['site_id'])) if node['boot_state'] not in boot_states: - errors.append("invalid boot state %s" % node['boot_state']) + errors.append("invalid boot state {}".format(node['boot_state'])) if errors: # XXX Log an event instead of printing to logfile - message("Warning: Skipping invalid %s node %r : " % (peer['peername'], node) - + ", ".join(errors)) + message("Warning: Skipping invalid {peername} node {hostname} - {errors}" + .format(peername=peername, + hostname=node['hostname'], errors=", ".join(errors))) del nodes_at_peer[peer_node_id] continue else: @@ -569,7 +636,7 @@ class RefreshPeer(Method): # start = time.time() - message('Dealing with Nodes (2)') + message('(5) Dealing with Nodes (2)') # Keyed on local node_id local_nodes = Nodes(self.api).dict() @@ -598,7 +665,7 @@ class RefreshPeer(Method): start = time.time() - message('Dealing with Slices (1)') + message('(6) Dealing with Slices') # Compare only the columns returned by the GetPeerData() call if peer_tables['Slices']: @@ -617,10 +684,10 @@ class RefreshPeer(Method): for peer_slice_id, slice in slices_at_peer.items(): errors = [] if slice['site_id'] not in peer_sites: - errors.append("invalid site %d" % slice['site_id']) + errors.append("invalid site {}".format(slice['site_id'])) if slice['instantiation'] not in slice_instantiations: - errors.append("invalid instantiation %s" % - slice['instantiation']) + errors.append("invalid instantiation {}" + .format(slice['instantiation'])) if slice['creator_person_id'] not in peer_persons: # Just NULL it out slice['creator_person_id'] = None @@ -628,18 +695,19 @@ class RefreshPeer(Method): slice['creator_person_id'] = peer_persons[ slice['creator_person_id']]['person_id'] if errors: - message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice) - + ", ".join(errors)) + message("Warning: Skipping invalid {peername} slice {slice} : {errors}" + .format(peername=peername, + slice=slice, errors=", ".join(errors))) del slices_at_peer[peer_slice_id] continue else: slice['site_id'] = peer_sites[slice['site_id']]['site_id'] - # Synchronize new set - peer_slices = sync(old_peer_slices, slices_at_peer, Slice, ignore( - columns, RefreshPeer.ignore_slice_fields)) + # Synchronize new set + peer_slices = sync(old_peer_slices, slices_at_peer, Slice, + ignore(columns, RefreshPeer.ignore_slice_fields)) - message('Dealing with Slices (2)') + message('(7) Dealing with Nodes in Slices') # transcoder : retrieve a local node_id from a peer_node_id node_transcoder = dict([(node['node_id'], peer_node_id) for peer_node_id, node in peer_nodes.iteritems()]) @@ -663,20 +731,27 @@ class RefreshPeer(Method): if node_id in node_transcoder and node_transcoder[node_id] in peer_nodes] # Nodes that should be part of the slice - slice_node_ids = [node_id for node_id in peer_slice[ - 'node_ids'] if node_id in peer_nodes] + slice_node_ids = [node_id for node_id in peer_slice['node_ids'] + if node_id in peer_nodes] # Remove stale nodes from slice for node_id in (set(old_slice_node_ids) - set(slice_node_ids)): slice.remove_node(peer_nodes[node_id], commit=commit_mode) - message("%s node %s removed from slice %s" % ( - peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) + message("{peername} node {hostname} (id {node_id}) removed from slice {slicename} (id {slice_id})" + .format(peername=peername, + hostname=peer_nodes[node_id]['hostname'], node_id=peer_nodes[node_id]['node_id'], + slicename=slice['name'], slice_id=slice['slice_id'])) # Add new nodes to slice for node_id in (set(slice_node_ids) - set(old_slice_node_ids)): slice.add_node(peer_nodes[node_id], commit=commit_mode) - message("%s node %s added into slice %s" % ( - peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) + message("{peername} node {hostname} (id {node_id}) added into slice {slicename} (id {slice_id})" + .format(peername=peername, + hostname=peer_nodes[node_id]['hostname'], node_id=peer_nodes[node_id]['node_id'], + slicename=slice['name'], slice_id=slice['slice_id'])) + + if slice['slice_id'] == 225: + return # N.B.: Local nodes that may have been added to the slice # by hand, are removed. In other words, don't do this. @@ -690,11 +765,11 @@ class RefreshPeer(Method): old_slice_person_ids = [] for person_id in slice['person_ids']: if not person_transcoder.has_key(person_id): - message('WARNING : person_id %d in %s not transcodable (1) - skipped' % - (person_id, slice['name'])) + message('WARNING : person_id {person_id} in {slicename} not transcodable (1) - skipped' + .format(person_id=person_id, slicename=slice['name'])) elif person_transcoder[person_id] not in peer_persons: - message('WARNING : person_id %d in %s not transcodable (2) - skipped' % - (person_id, slice['name'])) + message('WARNING : person_id {person_id} in {slicename} not transcodable (2) - skipped' + .format(person_id=person_id, slicename=slice['name'])) else: old_slice_person_ids += [person_transcoder[person_id]] @@ -706,14 +781,18 @@ class RefreshPeer(Method): for person_id in (set(old_slice_person_ids) - set(slice_person_ids)): slice.remove_person( peer_persons[person_id], commit=commit_mode) - message("%s user %s removed from slice %s" % ( - peer['peername'], peer_persons[person_id]['email'], slice['name'])) + message("{peername} user {email} removed from slice {slicename}" + .format(peername=peername, + email=peer_persons[person_id]['email'], + slicename=slice['name'])) # Add new users to slice for person_id in (set(slice_person_ids) - set(old_slice_person_ids)): slice.add_person(peer_persons[person_id], commit=commit_mode) - message("%s user %s added into slice %s" % ( - peer['peername'], peer_persons[person_id]['email'], slice['name'])) + message("{peername} user {email} added into slice {slicename}" + .format(peername=peername, + email=peer_persons[person_id]['email'], + slicename=slice['name'])) # N.B.: Local users that may have been added to the slice # by hand, are not touched. @@ -725,7 +804,7 @@ class RefreshPeer(Method): # start = time.time() - message('Dealing Sites X Persons relationship') + message('(8) Dealing with Persons in Sites') for peer_site_id, site in peer_sites.iteritems(): # Site as viewed by peer @@ -742,14 +821,18 @@ class RefreshPeer(Method): # Remove stale persons from site for person_id in (set(old_site_person_ids) - set(site_person_ids)): site.remove_person(peer_persons[person_id], commit=commit_mode) - message("%s person %s removed from site %s" % ( - peer['peername'], peer_persons[person_id]['email'], site['login_base'])) + message("{peername} person {email} removed from site {login_base}" + .format(peername=peername, + email=peer_persons[person_id]['email'], + login_base=site['login_base'])) # Add new persons to site for person_id in (set(site_person_ids) - set(old_site_person_ids)): site.add_person(peer_persons[person_id], commit=commit_mode) - message("%s person %s added into site %s" % ( - peer['peername'], peer_persons[person_id]['email'], site['login_base'])) + message("{peername} person {email} added into site {login_base}" + .format(peername=peername, + email=peer_persons[person_id]['email'], + login_base=site['login_base'])) timers['sites-persons'] = time.time() - start @@ -758,7 +841,7 @@ class RefreshPeer(Method): # start = time.time() - message('Dealing with Persons Roles relationship') + message('(9) Dealing with Roles for Persons') roles = Roles(self.api) roles_dict = dict([(role['role_id'], role) for role in roles]) @@ -775,14 +858,18 @@ class RefreshPeer(Method): # Remove stale roles for role_id in (set(old_person_role_ids) - set(person_role_ids)): person.remove_role(roles_dict[role_id], commit=commit_mode) - message("%s role %s removed from person %s" % ( - peer['peername'], roles_dict[role_id]['name'], person['email'])) + message("{peername} role {rolename} removed from person {email}" + .format(peername=peername, + rolename=roles_dict[role_id]['name'], + email=person['email'])) # Add new roles to person for role_id in (set(person_role_ids) - set(old_person_role_ids)): person.add_role(roles_dict[role_id], commit=commit_mode) - message("%s role %s added from person %s" % ( - peer['peername'], roles_dict[role_id]['name'], person['email'])) + message("{peername} role {rolename} added from person {email}" + .format(peername=peername, + rolename=roles_dict[role_id]['name'], + email=person['email'])) timers['persons-roles'] = time.time() - start diff --git a/PLC/Nodes.py b/PLC/Nodes.py index 87789ea..7e27ed4 100644 --- a/PLC/Nodes.py +++ b/PLC/Nodes.py @@ -16,6 +16,7 @@ from PLC.Table import Row, Table from PLC.NodeTypes import NodeTypes from PLC.BootStates import BootStates from PLC.Interfaces import Interface, Interfaces +from PLC.TagTypes import TagType, TagTypes def valid_hostname(hostname): # 1. Each part begins and ends with a letter or number. -- 2.43.0