2 # Thierry Parmentelat - INRIA
9 from PLC.Logger import logger
10 from PLC.Faults import *
11 from PLC.Method import Method
12 from PLC.Parameter import Parameter, Mixed
13 from PLC.Auth import Auth
15 from PLC.Peers import Peer, Peers
16 from PLC.Sites import Site, Sites
17 from PLC.Persons import Person, Persons
18 from PLC.KeyTypes import KeyType, KeyTypes
19 from PLC.Keys import Key, Keys
20 from PLC.BootStates import BootState, BootStates
21 from PLC.Nodes import Node, Nodes
22 from PLC.SliceInstantiations import SliceInstantiations
23 from PLC.Slices import Slice, Slices
24 from PLC.Roles import Role, Roles
27 # initial version was doing only one final commit
28 # * set commit_mode to False to get that behaviour
29 # * set comit_mode to True to get everything synced at once
30 # the issue with the 'one-commit-at-the-end' approach is
31 # that the db gets basically totally locked during too long
32 # causing various issues/crashes in the rest of the system
35 # turn this to False only if both ends have the same db schema
36 # compatibility mode is a bit slower but probably safer on the long run
42 # set to a filename for using cached data when debugging
43 # WARNING: does not actually connect to the peer in this case
45 # for debugging specific entries - display detailed info on selected objs
46 focus_type = None # set to e.g. 'Person'
47 # set to a list of ids (e.g. person_ids) - remote or local ids should work
50 # use_cache="/var/log/peers/getpeerdata.pickle"
53 # focus_ids=[621,1088]
58 def message(to_print=None, verbose_only=False):
59 if verbose_only and not verbose:
64 def message_verbose(to_print=None, header='VERBOSE'):
65 message("%s> %r" % (header, to_print), verbose_only=True)
68 # to avoid several instances running at the same time
74 def __init__(self, file_path, expire=60 * 60 * 2):
76 self.fpath = file_path
80 if os.path.exists(self.fpath):
81 if (time.time() - os.stat(self.fpath).st_ctime) > self.expire:
85 message('FileLock.lock(%s) : %s' % (self.fpath, e))
88 self.fd = open(self.fpath, 'w')
89 fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
91 message('FileLock.lock(%s) : %s' % (self.fpath, e))
97 fcntl.flock(self.fd, fcntl.LOCK_UN | fcntl.LOCK_NB)
100 message('FileLock.unlock(%s) : %s' % (self.fpath, e))
103 class RefreshPeer(Method):
105 Fetches site, node, slice, person and key data from the specified peer
106 and caches it locally; also deletes stale entries.
107 Upon successful completion, returns a dict reporting various timers.
115 Mixed(Peer.fields['peer_id'],
116 Peer.fields['peername']),
119 returns = Parameter(int, "1 if successful")
121 ignore_site_fields = ['peer_id', 'peer_site_id', 'last_updated', 'date_created',
122 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids']
123 ignore_key_fields = ['peer_id', 'peer_key_id', 'person_id']
124 ignore_person_fields = ['peer_id', 'peer_person_id', 'last_updated', 'date_created',
125 'key_ids', 'slice_ids', 'person_tag_ids']
126 ignore_node_fields = ['peer_id', 'peer_node_id', 'last_updated', 'last_contact', 'date_created',
127 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids', 'pcu_ids', 'ports']
128 ignore_slice_fields = ['peer_id', 'peer_slice_id', 'created',
129 'person_ids', 'slice_tag_ids', 'node_ids', ]
131 def call(self, auth, peer_id_or_peername):
133 peername = Peers(self.api, [peer_id_or_peername], [
134 'peername'])[0]['peername']
135 file_lock = FileLock("/tmp/refresh-peer-%s.lock" % peername)
136 if not file_lock.lock():
137 raise Exception, "Another instance of RefreshPeer is running."
139 ret_val = self.real_call(auth, peer_id_or_peername)
142 message("RefreshPeer caught exception - BEG")
144 traceback.print_exc(file=log)
145 message("RefreshPeer caught exception - END")
150 def real_call(self, auth, peer_id_or_peername):
152 peers = Peers(self.api, [peer_id_or_peername])
154 raise PLCInvalidArgument, "No such peer '%s'" % unicode(peer_id_or_peername)
156 peer_id = peer['peer_id']
158 # Connect to peer API
165 message('RefreshPeer starting up (commit_mode=%r)' % commit_mode)
167 message('Issuing GetPeerData')
168 peer_tables = peer.GetPeerData()
171 if os.path.isfile(use_cache):
172 message("use_cache: WARNING: using cached getpeerdata")
173 peer_tables = pickle.load(file(use_cache, 'rb'))
175 message("use_cache: issuing getpeerdata")
176 peer_tables = peer.GetPeerData()
177 message("use_cache: saving in cache %s", use_cache)
178 pickle.dump(peer_tables, file(use_cache, 'wb'))
180 # for smooth federation with 4.2 - ignore fields that are useless
181 # anyway, and rewrite boot_state
182 boot_state_rewrite = {'dbg': 'safeboot', 'diag': 'safeboot', 'disable': 'disabled',
183 'inst': 'reinstall', 'rins': 'reinstall', 'new': 'reinstall', 'rcnf': 'reinstall'}
184 for node in peer_tables['Nodes']:
185 for key in ['nodenetwork_ids', 'dummybox_id']:
188 if node['boot_state'] in boot_state_rewrite:
189 node['boot_state'] = boot_state_rewrite[node['boot_state']]
190 for slice in peer_tables['Slices']:
191 for key in ['slice_attribute_ids']:
194 timers['transport'] = time.time() - start - peer_tables['db_time']
195 timers['peer_db'] = peer_tables['db_time']
196 message_verbose('GetPeerData returned -> db=%d transport=%d' %
197 (timers['peer_db'], timers['transport']))
199 def sync(objects, peer_objects, classobj, columns):
201 Synchronizes two dictionaries of objects. objects should
202 be a dictionary of local objects keyed on their foreign
203 identifiers. peer_objects should be a dictionary of
204 foreign objects keyed on their local (i.e., foreign to us)
205 identifiers. Returns a final dictionary of local objects
206 keyed on their foreign identifiers.
209 classname = classobj(self.api).__class__.__name__
210 primary_key = getattr(classobj, 'primary_key')
211 # display all peer objects of these types while looping
212 secondary_keys = {'Node': 'hostname', 'Slice': 'name',
213 'Person': 'email', 'Site': 'login_base'}
215 if classname in secondary_keys:
216 secondary_key = secondary_keys[classname]
218 message_verbose('Entering sync on %s (%s)' %
219 (classname, primary_key))
223 # Delete stale objects
224 for peer_object_id, object in objects.iteritems():
225 if peer_object_id not in peer_objects:
226 object.delete(commit=commit_mode)
227 message("%s %s %s deleted" %
228 (peer['peername'], classname, object[primary_key]))
230 total = len(peer_objects)
233 # peer_object_id, peer_object and object are dynamically bound in the loop below...
234 # (local) object might be None if creating a new one
236 if classname != focus_type:
238 return peer_object_id in focus_ids or \
239 (object and primary_key in object and object[
240 primary_key] in focus_ids)
242 def message_focus(message):
245 message_verbose("peer_obj : %d [[%r]]" % (peer_object_id, peer_object),
246 header='FOCUS ' + message)
247 # show local object if a match was found
249 message_verbose("local_obj : <<%r>>" % (object),
250 header='FOCUS ' + message)
252 # the function to compare a local object with its cadidate peer obj
253 # xxx probably faster when compatibility is False...
254 def equal_fields(object, peer_object, columns):
255 # fast version: must use __eq__() instead of == since
256 # peer_object may be a raw dict instead of a Peer object.
257 if not compatibility:
258 return object.__eq__(peer_object)
260 for column in columns:
261 # if in_focus(): message ('FOCUS comparing column %s'%column)
262 if object[column] != peer_object[column]:
267 for column in columns:
268 test = object[column] == peer_object[column]
273 # Add/update new/existing objects
274 for peer_object_id, peer_object in peer_objects.iteritems():
275 peer_object_name = ""
277 peer_object_name = "(%s)" % peer_object[secondary_key]
278 message_verbose('%s peer_object_id=%d %s (%d/%d)'
279 % (classname, peer_object_id, peer_object_name, count, total))
281 if peer_object_id in synced:
282 message("Warning: %s Skipping already added %s: %r" % (
283 peer['peername'], classname, peer_object))
286 if peer_object_id in objects:
287 # Update existing object
288 object = objects[peer_object_id]
290 # Replace foreign identifier with existing local
291 # identifier temporarily for the purposes of
293 peer_object[primary_key] = object[primary_key]
295 if not equal_fields(object, peer_object, columns):
296 # Only update intrinsic fields
297 object.update(object.db_fields(peer_object))
298 message_focus("DIFFERENCES : updated / syncing")
302 message_focus("UNCHANGED - left intact / not syncing")
306 # Restore foreign identifier
307 peer_object[primary_key] = peer_object_id
311 object = classobj(self.api, peer_object)
312 # Replace foreign identifier with new local identifier
313 del object[primary_key]
314 message_focus("NEW -- created with clean id - syncing")
319 message_verbose("syncing %s %d - commit_mode=%r"
320 % (classname, peer_object_id, commit_mode))
322 object.sync(commit=commit_mode)
323 except PLCInvalidArgument, err:
324 # Skip if validation fails
325 # XXX Log an event instead of printing to logfile
326 message("Warning: %s Skipping invalid %s %r : %r" %
327 (peer['peername'], classname, peer_object, err))
330 synced[peer_object_id] = object
333 message("%s: (%d/%d) %s %d %s %s"
334 % (peer['peername'], count, total, classname,
335 object[primary_key], peer_object_name, action))
337 message_verbose("Exiting sync on %s" % classname)
341 # over time, we've had issues with a given column being
342 # added on one side and not on the other
343 # this helper function computes the intersection of two list of
345 def intersect(l1, l2):
347 return list(set(l1).intersection(set(l2)))
351 # some fields definitely need to be ignored
353 return list(set(l1).difference(set(l2)))
356 # Synchronize foreign sites
361 message('Dealing with Sites')
363 # Compare only the columns returned by the GetPeerData() call
364 if peer_tables['Sites']:
365 columns = peer_tables['Sites'][0].keys()
366 columns = intersect(columns, Site.fields)
370 # Keyed on foreign site_id
371 old_peer_sites = Sites(
372 self.api, {'peer_id': peer_id}, columns).dict('peer_site_id')
373 sites_at_peer = dict([(site['site_id'], site)
374 for site in peer_tables['Sites']])
376 # Synchronize new set (still keyed on foreign site_id)
377 peer_sites = sync(old_peer_sites, sites_at_peer, Site,
378 ignore(columns, RefreshPeer.ignore_site_fields))
380 for peer_site_id, site in peer_sites.iteritems():
381 # Bind any newly cached sites to peer
382 if peer_site_id not in old_peer_sites:
383 peer.add_site(site, peer_site_id, commit=commit_mode)
384 site['peer_id'] = peer_id
385 site['peer_site_id'] = peer_site_id
387 timers['site'] = time.time() - start
390 # XXX Synchronize foreign key types
393 message('Dealing with Keys')
395 key_types = KeyTypes(self.api).dict()
398 # Synchronize foreign keys
403 # Compare only the columns returned by the GetPeerData() call
404 if peer_tables['Keys']:
405 columns = peer_tables['Keys'][0].keys()
406 columns = intersect(columns, Key.fields)
410 # Keyed on foreign key_id
411 old_peer_keys = Keys(
412 self.api, {'peer_id': peer_id}, columns).dict('peer_key_id')
413 keys_at_peer = dict([(key['key_id'], key)
414 for key in peer_tables['Keys']])
416 # Fix up key_type references
417 for peer_key_id, key in keys_at_peer.items():
418 if key['key_type'] not in key_types:
419 # XXX Log an event instead of printing to logfile
420 message("Warning: Skipping invalid %s key %r" %
421 (peer['peername'], key))
422 del keys_at_peer[peer_key_id]
425 # Synchronize new set (still keyed on foreign key_id)
426 peer_keys = sync(old_peer_keys, keys_at_peer, Key,
427 ignore(columns, RefreshPeer.ignore_key_fields))
428 for peer_key_id, key in peer_keys.iteritems():
429 # Bind any newly cached keys to peer
430 if peer_key_id not in old_peer_keys:
431 peer.add_key(key, peer_key_id, commit=commit_mode)
432 key['peer_id'] = peer_id
433 key['peer_key_id'] = peer_key_id
435 timers['keys'] = time.time() - start
438 # Synchronize foreign users
443 message('Dealing with Persons')
445 # Compare only the columns returned by the GetPeerData() call
446 if peer_tables['Persons']:
447 columns = peer_tables['Persons'][0].keys()
448 columns = intersect(columns, Person.fields)
452 # Keyed on foreign person_id
453 old_peer_persons = Persons(
454 self.api, {'peer_id': peer_id}, columns).dict('peer_person_id')
456 # artificially attach the persons returned by GetPeerData to the new peer
457 # this is because validate_email needs peer_id to be correct when
458 # checking for duplicates
459 for person in peer_tables['Persons']:
460 person['peer_id'] = peer_id
461 persons_at_peer = dict([(peer_person['person_id'], peer_person)
462 for peer_person in peer_tables['Persons']])
464 # XXX Do we care about membership in foreign site(s)?
466 # Synchronize new set (still keyed on foreign person_id)
467 peer_persons = sync(old_peer_persons, persons_at_peer, Person,
468 ignore(columns, RefreshPeer.ignore_person_fields))
470 # transcoder : retrieve a local key_id from a peer_key_id
471 key_transcoder = dict([(key['key_id'], peer_key_id)
472 for peer_key_id, key in peer_keys.iteritems()])
474 for peer_person_id, person in peer_persons.iteritems():
475 # Bind any newly cached users to peer
476 if peer_person_id not in old_peer_persons:
477 peer.add_person(person, peer_person_id, commit=commit_mode)
478 person['peer_id'] = peer_id
479 person['peer_person_id'] = peer_person_id
480 person['key_ids'] = []
482 # User as viewed by peer
483 peer_person = persons_at_peer[peer_person_id]
485 # Foreign keys currently belonging to the user
486 old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids']
487 if key_transcoder[key_id] in peer_keys]
489 # Foreign keys that should belong to the user
490 # this is basically peer_person['key_ids'], we just check it makes sense
491 # (e.g. we might have failed importing it)
492 person_key_ids = [key_id for key_id in peer_person[
493 'key_ids'] if key_id in peer_keys]
495 # Remove stale keys from user
496 for key_id in (set(old_person_key_ids) - set(person_key_ids)):
497 person.remove_key(peer_keys[key_id], commit=commit_mode)
498 message("%s Key %d removed from person %s" %
499 (peer['peername'], key_id, person['email']))
501 # Add new keys to user
502 for key_id in (set(person_key_ids) - set(old_person_key_ids)):
503 message("before add_key, passing person=%r" % person)
504 message("before add_key, passing key=%r" % peer_keys[key_id])
505 person.add_key(peer_keys[key_id], commit=commit_mode)
506 message("%s Key %d added into person %s" %
507 (peer['peername'], key_id, person['email']))
509 timers['persons'] = time.time() - start
512 # XXX Synchronize foreign boot states
515 boot_states = BootStates(self.api).dict()
518 # Synchronize foreign nodes
523 message('Dealing with Nodes (1)')
525 # Compare only the columns returned by the GetPeerData() call
526 if peer_tables['Nodes']:
527 columns = peer_tables['Nodes'][0].keys()
528 columns = intersect(columns, Node.fields)
530 columns = Node.fields
532 # Keyed on foreign node_id
533 old_peer_nodes = Nodes(
534 self.api, {'peer_id': peer_id}, columns).dict('peer_node_id')
535 nodes_at_peer = dict([(node['node_id'], node)
536 for node in peer_tables['Nodes']])
538 # Fix up site_id and boot_states references
539 for peer_node_id, node in nodes_at_peer.items():
541 if node['site_id'] not in peer_sites:
542 errors.append("invalid site %d" % node['site_id'])
543 if node['boot_state'] not in boot_states:
544 errors.append("invalid boot state %s" % node['boot_state'])
546 # XXX Log an event instead of printing to logfile
547 message("Warning: Skipping invalid %s node %r : " % (peer['peername'], node)
549 del nodes_at_peer[peer_node_id]
552 node['site_id'] = peer_sites[node['site_id']]['site_id']
554 # Synchronize new set
555 peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node,
556 ignore(columns, RefreshPeer.ignore_node_fields))
558 for peer_node_id, node in peer_nodes.iteritems():
559 # Bind any newly cached foreign nodes to peer
560 if peer_node_id not in old_peer_nodes:
561 peer.add_node(node, peer_node_id, commit=commit_mode)
562 node['peer_id'] = peer_id
563 node['peer_node_id'] = peer_node_id
565 timers['nodes'] = time.time() - start
568 # Synchronize local nodes
572 message('Dealing with Nodes (2)')
574 # Keyed on local node_id
575 local_nodes = Nodes(self.api).dict()
577 for node in peer_tables['PeerNodes']:
578 # Foreign identifier for our node as maintained by peer
579 peer_node_id = node['node_id']
580 # Local identifier for our node as cached by peer
581 node_id = node['peer_node_id']
582 if node_id in local_nodes:
583 # Still a valid local node, add it to the synchronized
584 # set of local node objects keyed on foreign node_id.
585 peer_nodes[peer_node_id] = local_nodes[node_id]
587 timers['local_nodes'] = time.time() - start
590 # XXX Synchronize foreign slice instantiation states
593 slice_instantiations = SliceInstantiations(self.api).dict()
596 # Synchronize foreign slices
601 message('Dealing with Slices (1)')
603 # Compare only the columns returned by the GetPeerData() call
604 if peer_tables['Slices']:
605 columns = peer_tables['Slices'][0].keys()
606 columns = intersect(columns, Slice.fields)
610 # Keyed on foreign slice_id
611 old_peer_slices = Slices(
612 self.api, {'peer_id': peer_id}, columns).dict('peer_slice_id')
613 slices_at_peer = dict([(slice['slice_id'], slice)
614 for slice in peer_tables['Slices']])
616 # Fix up site_id, instantiation, and creator_person_id references
617 for peer_slice_id, slice in slices_at_peer.items():
619 if slice['site_id'] not in peer_sites:
620 errors.append("invalid site %d" % slice['site_id'])
621 if slice['instantiation'] not in slice_instantiations:
622 errors.append("invalid instantiation %s" %
623 slice['instantiation'])
624 if slice['creator_person_id'] not in peer_persons:
626 slice['creator_person_id'] = None
628 slice['creator_person_id'] = peer_persons[
629 slice['creator_person_id']]['person_id']
631 message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice)
633 del slices_at_peer[peer_slice_id]
636 slice['site_id'] = peer_sites[slice['site_id']]['site_id']
638 # Synchronize new set
639 peer_slices = sync(old_peer_slices, slices_at_peer, Slice, ignore(
640 columns, RefreshPeer.ignore_slice_fields))
642 message('Dealing with Slices (2)')
643 # transcoder : retrieve a local node_id from a peer_node_id
644 node_transcoder = dict([(node['node_id'], peer_node_id)
645 for peer_node_id, node in peer_nodes.iteritems()])
646 person_transcoder = dict([(person['person_id'], peer_person_id)
647 for peer_person_id, person in peer_persons.iteritems()])
649 for peer_slice_id, slice in peer_slices.iteritems():
650 # Bind any newly cached foreign slices to peer
651 if peer_slice_id not in old_peer_slices:
652 peer.add_slice(slice, peer_slice_id, commit=commit_mode)
653 slice['peer_id'] = peer_id
654 slice['peer_slice_id'] = peer_slice_id
655 slice['node_ids'] = []
656 slice['person_ids'] = []
658 # Slice as viewed by peer
659 peer_slice = slices_at_peer[peer_slice_id]
661 # Nodes that are currently part of the slice
662 old_slice_node_ids = [node_transcoder[node_id] for node_id in slice['node_ids']
663 if node_id in node_transcoder and node_transcoder[node_id] in peer_nodes]
665 # Nodes that should be part of the slice
666 slice_node_ids = [node_id for node_id in peer_slice[
667 'node_ids'] if node_id in peer_nodes]
669 # Remove stale nodes from slice
670 for node_id in (set(old_slice_node_ids) - set(slice_node_ids)):
671 slice.remove_node(peer_nodes[node_id], commit=commit_mode)
672 message("%s node %s removed from slice %s" % (
673 peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
675 # Add new nodes to slice
676 for node_id in (set(slice_node_ids) - set(old_slice_node_ids)):
677 slice.add_node(peer_nodes[node_id], commit=commit_mode)
678 message("%s node %s added into slice %s" % (
679 peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
681 # N.B.: Local nodes that may have been added to the slice
682 # by hand, are removed. In other words, don't do this.
684 # Foreign users that are currently part of the slice
685 # old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \
686 # if person_transcoder[person_id] in peer_persons]
687 # An issue occurred with a user who registered on both sites (same email)
688 # So the remote person could not get cached locally
689 # The one-line map/filter style is nicer but ineffective here
690 old_slice_person_ids = []
691 for person_id in slice['person_ids']:
692 if not person_transcoder.has_key(person_id):
693 message('WARNING : person_id %d in %s not transcodable (1) - skipped' %
694 (person_id, slice['name']))
695 elif person_transcoder[person_id] not in peer_persons:
696 message('WARNING : person_id %d in %s not transcodable (2) - skipped' %
697 (person_id, slice['name']))
699 old_slice_person_ids += [person_transcoder[person_id]]
701 # Foreign users that should be part of the slice
702 slice_person_ids = [person_id for person_id in peer_slice[
703 'person_ids'] if person_id in peer_persons]
705 # Remove stale users from slice
706 for person_id in (set(old_slice_person_ids) - set(slice_person_ids)):
708 peer_persons[person_id], commit=commit_mode)
709 message("%s user %s removed from slice %s" % (
710 peer['peername'], peer_persons[person_id]['email'], slice['name']))
712 # Add new users to slice
713 for person_id in (set(slice_person_ids) - set(old_slice_person_ids)):
714 slice.add_person(peer_persons[person_id], commit=commit_mode)
715 message("%s user %s added into slice %s" % (
716 peer['peername'], peer_persons[person_id]['email'], slice['name']))
718 # N.B.: Local users that may have been added to the slice
719 # by hand, are not touched.
721 timers['slices'] = time.time() - start
728 message('Dealing Sites X Persons relationship')
730 for peer_site_id, site in peer_sites.iteritems():
731 # Site as viewed by peer
732 peer_site = sites_at_peer[peer_site_id]
734 # Persons that are currently part of the site
735 old_site_person_ids = [person_transcoder[person_id] for person_id in site['person_ids']
736 if person_id in person_transcoder and person_transcoder[person_id] in peer_persons]
738 # Perons that should be part of the site
739 site_person_ids = [person_id for person_id in peer_site[
740 'person_ids'] if person_id in peer_persons]
742 # Remove stale persons from site
743 for person_id in (set(old_site_person_ids) - set(site_person_ids)):
744 site.remove_person(peer_persons[person_id], commit=commit_mode)
745 message("%s person %s removed from site %s" % (
746 peer['peername'], peer_persons[person_id]['email'], site['login_base']))
748 # Add new persons to site
749 for person_id in (set(site_person_ids) - set(old_site_person_ids)):
750 site.add_person(peer_persons[person_id], commit=commit_mode)
751 message("%s person %s added into site %s" % (
752 peer['peername'], peer_persons[person_id]['email'], site['login_base']))
754 timers['sites-persons'] = time.time() - start
761 message('Dealing with Persons Roles relationship')
763 roles = Roles(self.api)
764 roles_dict = dict([(role['role_id'], role) for role in roles])
765 for peer_person_id, person in peer_persons.iteritems():
766 # Person as viewed by peer
767 peer_person = persons_at_peer[peer_person_id]
769 # Roles that are currently attributed for the person
770 old_person_role_ids = [role_id for role_id in person['role_ids']]
772 # Roles that should be attributed to the person
773 person_role_ids = [role_id for role_id in peer_person['role_ids']]
776 for role_id in (set(old_person_role_ids) - set(person_role_ids)):
777 person.remove_role(roles_dict[role_id], commit=commit_mode)
778 message("%s role %s removed from person %s" % (
779 peer['peername'], roles_dict[role_id]['name'], person['email']))
781 # Add new roles to person
782 for role_id in (set(person_role_ids) - set(old_person_role_ids)):
783 person.add_role(roles_dict[role_id], commit=commit_mode)
784 message("%s role %s added from person %s" % (
785 peer['peername'], roles_dict[role_id]['name'], person['email']))
787 timers['persons-roles'] = time.time() - start
789 # Update peer itself and commit
790 peer.sync(commit=True)