2 # Thierry Parmentelat - INRIA
9 from PLC.Logger import logger
10 from PLC.Faults import *
11 from PLC.Method import Method
12 from PLC.Parameter import Parameter, Mixed
13 from PLC.Auth import Auth
15 from PLC.Peers import Peer, Peers
16 from PLC.Sites import Site, Sites
17 from PLC.Persons import Person, Persons
18 from PLC.KeyTypes import KeyType, KeyTypes
19 from PLC.Keys import Key, Keys
20 from PLC.BootStates import BootState, BootStates
21 from PLC.Nodes import Node, Nodes
22 from PLC.SliceInstantiations import SliceInstantiations
23 from PLC.Slices import Slice, Slices
24 from PLC.Roles import Role, Roles
26 #################### settings
27 # initial version was doing only one final commit
28 # * set commit_mode to False to get that behaviour
29 # * set comit_mode to True to get everything synced at once
30 # the issue with the 'one-commit-at-the-end' approach is
31 # that the db gets basically totally locked during too long
32 # causing various issues/crashes in the rest of the system
35 # turn this to False only if both ends have the same db schema
36 # compatibility mode is a bit slower but probably safer on the long run
39 #################### debugging
42 # set to a filename for using cached data when debugging
43 # WARNING: does not actually connect to the peer in this case
45 # for debugging specific entries - display detailed info on selected objs
46 focus_type=None # set to e.g. 'Person'
47 focus_ids=[] # set to a list of ids (e.g. person_ids) - remote or local ids should work
49 #use_cache="/var/log/peers/getpeerdata.pickle"
54 #################### helpers
55 def message(to_print=None, verbose_only=False):
56 if verbose_only and not verbose:
60 def message_verbose(to_print=None, header='VERBOSE'):
61 message("%s> %r"%(header, to_print), verbose_only=True)
64 #################### to avoid several instances running at the same time
69 def __init__(self, file_path, expire = 60 * 60 * 2):
71 self.fpath = file_path
75 if os.path.exists(self.fpath):
76 if (time.time() - os.stat(self.fpath).st_ctime) > self.expire:
80 message('FileLock.lock(%s) : %s' % (self.fpath, e))
83 self.fd = open(self.fpath, 'w')
84 fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
86 message('FileLock.lock(%s) : %s' % (self.fpath, e))
92 fcntl.flock(self.fd, fcntl.LOCK_UN | fcntl.LOCK_NB)
95 message('FileLock.unlock(%s) : %s' % (self.fpath, e))
98 class RefreshPeer(Method):
100 Fetches site, node, slice, person and key data from the specified peer
101 and caches it locally; also deletes stale entries.
102 Upon successful completion, returns a dict reporting various timers.
110 Mixed(Peer.fields['peer_id'],
111 Peer.fields['peername']),
114 returns = Parameter(int, "1 if successful")
116 ignore_site_fields=['peer_id', 'peer_site_id','last_updated', 'date_created',
117 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids' ]
118 ignore_key_fields=['peer_id','peer_key_id', 'person_id']
119 ignore_person_fields=['peer_id','peer_person_id','last_updated','date_created',
120 'key_ids','slice_ids','person_tag_ids']
121 ignore_node_fields=['peer_id','peer_node_id','last_updated','last_contact','date_created',
122 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids','pcu_ids','ports']
123 ignore_slice_fields=['peer_id','peer_slice_id','created',
124 'person_ids','slice_tag_ids','node_ids',]
126 def call(self, auth, peer_id_or_peername):
128 peername = Peers(self.api, [peer_id_or_peername], ['peername'])[0]['peername']
129 file_lock = FileLock("/tmp/refresh-peer-%s.lock" % peername)
130 if not file_lock.lock():
131 raise Exception, "Another instance of RefreshPeer is running."
133 ret_val = self.real_call(auth, peer_id_or_peername)
136 message("RefreshPeer caught exception - BEG")
138 traceback.print_exc(file=log)
139 message("RefreshPeer caught exception - END")
145 def real_call(self, auth, peer_id_or_peername):
147 peers = Peers(self.api, [peer_id_or_peername])
149 raise PLCInvalidArgument, "No such peer '%s'" % unicode(peer_id_or_peername)
151 peer_id = peer['peer_id']
153 # Connect to peer API
160 message('RefreshPeer starting up (commit_mode=%r)'%commit_mode)
162 message('Issuing GetPeerData')
163 peer_tables = peer.GetPeerData()
166 if os.path.isfile(use_cache):
167 message("use_cache: WARNING: using cached getpeerdata")
168 peer_tables=pickle.load(file(use_cache,'rb'))
170 message("use_cache: issuing getpeerdata")
171 peer_tables = peer.GetPeerData()
172 message("use_cache: saving in cache %s",use_cache)
173 pickle.dump(peer_tables,file(use_cache,'wb'))
175 # for smooth federation with 4.2 - ignore fields that are useless anyway, and rewrite boot_state
176 boot_state_rewrite={'dbg':'safeboot','diag':'safeboot','disable':'disabled',
177 'inst':'reinstall','rins':'reinstall','new':'reinstall','rcnf':'reinstall'}
178 for node in peer_tables['Nodes']:
179 for key in ['nodenetwork_ids','dummybox_id']:
182 if node['boot_state'] in boot_state_rewrite: node['boot_state']=boot_state_rewrite[node['boot_state']]
183 for slice in peer_tables['Slices']:
184 for key in ['slice_attribute_ids']:
187 timers['transport'] = time.time() - start - peer_tables['db_time']
188 timers['peer_db'] = peer_tables['db_time']
189 message_verbose('GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport']))
191 def sync(objects, peer_objects, classobj, columns):
193 Synchronizes two dictionaries of objects. objects should
194 be a dictionary of local objects keyed on their foreign
195 identifiers. peer_objects should be a dictionary of
196 foreign objects keyed on their local (i.e., foreign to us)
197 identifiers. Returns a final dictionary of local objects
198 keyed on their foreign identifiers.
201 classname=classobj(self.api).__class__.__name__
202 primary_key=getattr(classobj,'primary_key')
203 # display all peer objects of these types while looping
204 secondary_keys={'Node':'hostname','Slice':'name','Person':'email','Site':'login_base'}
206 if classname in secondary_keys: secondary_key=secondary_keys[classname]
208 message_verbose('Entering sync on %s (%s)'%(classname,primary_key))
212 # Delete stale objects
213 for peer_object_id, object in objects.iteritems():
214 if peer_object_id not in peer_objects:
215 object.delete(commit = commit_mode)
216 message("%s %s %s deleted"%(peer['peername'],classname, object[primary_key]))
218 total = len(peer_objects)
221 # peer_object_id, peer_object and object are dynamically bound in the loop below...
222 # (local) object might be None if creating a new one
224 if classname != focus_type: return False
225 return peer_object_id in focus_ids or \
226 (object and primary_key in object and object[primary_key] in focus_ids)
228 def message_focus(message):
231 message_verbose("peer_obj : %d [[%r]]"%(peer_object_id,peer_object),
232 header='FOCUS '+message)
233 # show local object if a match was found
234 if object: message_verbose("local_obj : <<%r>>"%(object),
235 header='FOCUS '+message);
238 # the function to compare a local object with its cadidate peer obj
239 # xxx probably faster when compatibility is False...
240 def equal_fields (object, peer_object, columns):
241 # fast version: must use __eq__() instead of == since
242 # peer_object may be a raw dict instead of a Peer object.
243 if not compatibility: return object.__eq__(peer_object)
245 for column in columns:
246 # if in_focus(): message ('FOCUS comparing column %s'%column)
247 if object[column] != peer_object[column]: return False
251 for column in columns:
252 test= object[column] == peer_object[column]
253 if not test: result=False
256 # Add/update new/existing objects
257 for peer_object_id, peer_object in peer_objects.iteritems():
259 if secondary_key: peer_object_name="(%s)"%peer_object[secondary_key]
260 message_verbose('%s peer_object_id=%d %s (%d/%d)'
261 %(classname,peer_object_id,peer_object_name,count,total))
263 if peer_object_id in synced:
264 message("Warning: %s Skipping already added %s: %r"%(
265 peer['peername'], classname, peer_object))
268 if peer_object_id in objects:
269 # Update existing object
270 object = objects[peer_object_id]
272 # Replace foreign identifier with existing local
273 # identifier temporarily for the purposes of
275 peer_object[primary_key] = object[primary_key]
277 if not equal_fields(object,peer_object, columns):
278 # Only update intrinsic fields
279 object.update(object.db_fields(peer_object))
280 message_focus("DIFFERENCES : updated / syncing")
284 message_focus("UNCHANGED - left intact / not syncing")
288 # Restore foreign identifier
289 peer_object[primary_key] = peer_object_id
293 object = classobj(self.api, peer_object)
294 # Replace foreign identifier with new local identifier
295 del object[primary_key]
296 message_focus("NEW -- created with clean id - syncing")
301 message_verbose("syncing %s %d - commit_mode=%r"
302 %(classname,peer_object_id,commit_mode))
304 object.sync(commit = commit_mode)
305 except PLCInvalidArgument, err:
306 # Skip if validation fails
307 # XXX Log an event instead of printing to logfile
308 message("Warning: %s Skipping invalid %s %r : %r"%
309 (peer['peername'], classname, peer_object, err))
312 synced[peer_object_id] = object
315 message("%s: (%d/%d) %s %d %s %s"
316 %(peer['peername'], count,total, classname,
317 object[primary_key], peer_object_name, action))
319 message_verbose("Exiting sync on %s"%classname)
323 ### over time, we've had issues with a given column being
324 ### added on one side and not on the other
325 ### this helper function computes the intersection of two list of fields/columns
326 def intersect (l1,l2):
327 if compatibility: return list (set(l1).intersection(set(l2)))
330 # some fields definitely need to be ignored
332 return list (set(l1).difference(set(l2)))
335 # Synchronize foreign sites
340 message('Dealing with Sites')
342 # Compare only the columns returned by the GetPeerData() call
343 if peer_tables['Sites']:
344 columns = peer_tables['Sites'][0].keys()
345 columns = intersect (columns, Site.fields)
349 # Keyed on foreign site_id
350 old_peer_sites = Sites(self.api, {'peer_id': peer_id}, columns).dict('peer_site_id')
351 sites_at_peer = dict([(site['site_id'], site) for site in peer_tables['Sites']])
353 # Synchronize new set (still keyed on foreign site_id)
354 peer_sites = sync(old_peer_sites, sites_at_peer, Site,
355 ignore(columns, RefreshPeer.ignore_site_fields))
357 for peer_site_id, site in peer_sites.iteritems():
358 # Bind any newly cached sites to peer
359 if peer_site_id not in old_peer_sites:
360 peer.add_site(site, peer_site_id, commit = commit_mode)
361 site['peer_id'] = peer_id
362 site['peer_site_id'] = peer_site_id
364 timers['site'] = time.time() - start
367 # XXX Synchronize foreign key types
370 message('Dealing with Keys')
372 key_types = KeyTypes(self.api).dict()
375 # Synchronize foreign keys
380 # Compare only the columns returned by the GetPeerData() call
381 if peer_tables['Keys']:
382 columns = peer_tables['Keys'][0].keys()
383 columns = intersect (columns, Key.fields)
387 # Keyed on foreign key_id
388 old_peer_keys = Keys(self.api, {'peer_id': peer_id}, columns).dict('peer_key_id')
389 keys_at_peer = dict([(key['key_id'], key) for key in peer_tables['Keys']])
391 # Fix up key_type references
392 for peer_key_id, key in keys_at_peer.items():
393 if key['key_type'] not in key_types:
394 # XXX Log an event instead of printing to logfile
395 message("Warning: Skipping invalid %s key %r" % ( peer['peername'], key))
396 del keys_at_peer[peer_key_id]
399 # Synchronize new set (still keyed on foreign key_id)
400 peer_keys = sync(old_peer_keys, keys_at_peer, Key,
401 ignore(columns, RefreshPeer.ignore_key_fields))
402 for peer_key_id, key in peer_keys.iteritems():
403 # Bind any newly cached keys to peer
404 if peer_key_id not in old_peer_keys:
405 peer.add_key(key, peer_key_id, commit = commit_mode)
406 key['peer_id'] = peer_id
407 key['peer_key_id'] = peer_key_id
409 timers['keys'] = time.time() - start
412 # Synchronize foreign users
417 message('Dealing with Persons')
419 # Compare only the columns returned by the GetPeerData() call
420 if peer_tables['Persons']:
421 columns = peer_tables['Persons'][0].keys()
422 columns = intersect (columns, Person.fields)
426 # Keyed on foreign person_id
427 old_peer_persons = Persons(self.api, {'peer_id': peer_id}, columns).dict('peer_person_id')
429 # artificially attach the persons returned by GetPeerData to the new peer
430 # this is because validate_email needs peer_id to be correct when checking for duplicates
431 for person in peer_tables['Persons']:
432 person['peer_id']=peer_id
433 persons_at_peer = dict([(peer_person['person_id'], peer_person) \
434 for peer_person in peer_tables['Persons']])
436 # XXX Do we care about membership in foreign site(s)?
438 # Synchronize new set (still keyed on foreign person_id)
439 peer_persons = sync(old_peer_persons, persons_at_peer, Person,
440 ignore(columns, RefreshPeer.ignore_person_fields))
442 # transcoder : retrieve a local key_id from a peer_key_id
443 key_transcoder = dict ( [ (key['key_id'],peer_key_id) \
444 for peer_key_id,key in peer_keys.iteritems()])
446 for peer_person_id, person in peer_persons.iteritems():
447 # Bind any newly cached users to peer
448 if peer_person_id not in old_peer_persons:
449 peer.add_person(person, peer_person_id, commit = commit_mode)
450 person['peer_id'] = peer_id
451 person['peer_person_id'] = peer_person_id
452 person['key_ids'] = []
455 # User as viewed by peer
456 peer_person = persons_at_peer[peer_person_id]
458 # Foreign keys currently belonging to the user
459 old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids'] \
460 if key_transcoder[key_id] in peer_keys]
462 # Foreign keys that should belong to the user
463 # this is basically peer_person['key_ids'], we just check it makes sense
464 # (e.g. we might have failed importing it)
465 person_key_ids = [ key_id for key_id in peer_person['key_ids'] if key_id in peer_keys]
467 # Remove stale keys from user
468 for key_id in (set(old_person_key_ids) - set(person_key_ids)):
469 person.remove_key(peer_keys[key_id], commit = commit_mode)
470 message ("%s Key %d removed from person %s"%(peer['peername'], key_id, person['email']))
472 # Add new keys to user
473 for key_id in (set(person_key_ids) - set(old_person_key_ids)):
474 message ("before add_key, passing person=%r"%person)
475 message ("before add_key, passing key=%r"%peer_keys[key_id])
476 person.add_key(peer_keys[key_id], commit = commit_mode)
477 message ("%s Key %d added into person %s"%(peer['peername'],key_id, person['email']))
479 timers['persons'] = time.time() - start
482 # XXX Synchronize foreign boot states
485 boot_states = BootStates(self.api).dict()
488 # Synchronize foreign nodes
493 message('Dealing with Nodes (1)')
495 # Compare only the columns returned by the GetPeerData() call
496 if peer_tables['Nodes']:
497 columns = peer_tables['Nodes'][0].keys()
498 columns = intersect (columns, Node.fields)
500 columns = Node.fields
502 # Keyed on foreign node_id
503 old_peer_nodes = Nodes(self.api, {'peer_id': peer_id}, columns).dict('peer_node_id')
504 nodes_at_peer = dict([(node['node_id'], node) \
505 for node in peer_tables['Nodes']])
507 # Fix up site_id and boot_states references
508 for peer_node_id, node in nodes_at_peer.items():
510 if node['site_id'] not in peer_sites:
511 errors.append("invalid site %d" % node['site_id'])
512 if node['boot_state'] not in boot_states:
513 errors.append("invalid boot state %s" % node['boot_state'])
515 # XXX Log an event instead of printing to logfile
516 message ("Warning: Skipping invalid %s node %r : " % (peer['peername'], node)\
518 del nodes_at_peer[peer_node_id]
521 node['site_id'] = peer_sites[node['site_id']]['site_id']
523 # Synchronize new set
524 peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node, ignore (columns, RefreshPeer.ignore_node_fields))
526 for peer_node_id, node in peer_nodes.iteritems():
527 # Bind any newly cached foreign nodes to peer
528 if peer_node_id not in old_peer_nodes:
529 peer.add_node(node, peer_node_id, commit = commit_mode)
530 node['peer_id'] = peer_id
531 node['peer_node_id'] = peer_node_id
533 timers['nodes'] = time.time() - start
536 # Synchronize local nodes
540 message('Dealing with Nodes (2)')
542 # Keyed on local node_id
543 local_nodes = Nodes(self.api).dict()
545 for node in peer_tables['PeerNodes']:
546 # Foreign identifier for our node as maintained by peer
547 peer_node_id = node['node_id']
548 # Local identifier for our node as cached by peer
549 node_id = node['peer_node_id']
550 if node_id in local_nodes:
551 # Still a valid local node, add it to the synchronized
552 # set of local node objects keyed on foreign node_id.
553 peer_nodes[peer_node_id] = local_nodes[node_id]
555 timers['local_nodes'] = time.time() - start
558 # XXX Synchronize foreign slice instantiation states
561 slice_instantiations = SliceInstantiations(self.api).dict()
564 # Synchronize foreign slices
569 message('Dealing with Slices (1)')
571 # Compare only the columns returned by the GetPeerData() call
572 if peer_tables['Slices']:
573 columns = peer_tables['Slices'][0].keys()
574 columns = intersect (columns, Slice.fields)
578 # Keyed on foreign slice_id
579 old_peer_slices = Slices(self.api, {'peer_id': peer_id}, columns).dict('peer_slice_id')
580 slices_at_peer = dict([(slice['slice_id'], slice) \
581 for slice in peer_tables['Slices']])
583 # Fix up site_id, instantiation, and creator_person_id references
584 for peer_slice_id, slice in slices_at_peer.items():
586 if slice['site_id'] not in peer_sites:
587 errors.append("invalid site %d" % slice['site_id'])
588 if slice['instantiation'] not in slice_instantiations:
589 errors.append("invalid instantiation %s" % slice['instantiation'])
590 if slice['creator_person_id'] not in peer_persons:
592 slice['creator_person_id'] = None
594 slice['creator_person_id'] = peer_persons[slice['creator_person_id']]['person_id']
596 message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice) \
598 del slices_at_peer[peer_slice_id]
601 slice['site_id'] = peer_sites[slice['site_id']]['site_id']
603 # Synchronize new set
604 peer_slices = sync(old_peer_slices, slices_at_peer, Slice, ignore (columns, RefreshPeer.ignore_slice_fields))
606 message('Dealing with Slices (2)')
607 # transcoder : retrieve a local node_id from a peer_node_id
608 node_transcoder = dict ( [ (node['node_id'],peer_node_id) \
609 for peer_node_id,node in peer_nodes.iteritems()])
610 person_transcoder = dict ( [ (person['person_id'],peer_person_id) \
611 for peer_person_id,person in peer_persons.iteritems()])
613 for peer_slice_id, slice in peer_slices.iteritems():
614 # Bind any newly cached foreign slices to peer
615 if peer_slice_id not in old_peer_slices:
616 peer.add_slice(slice, peer_slice_id, commit = commit_mode)
617 slice['peer_id'] = peer_id
618 slice['peer_slice_id'] = peer_slice_id
619 slice['node_ids'] = []
620 slice['person_ids'] = []
622 # Slice as viewed by peer
623 peer_slice = slices_at_peer[peer_slice_id]
625 # Nodes that are currently part of the slice
626 old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \
627 if node_id in node_transcoder and node_transcoder[node_id] in peer_nodes]
629 # Nodes that should be part of the slice
630 slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes]
632 # Remove stale nodes from slice
633 for node_id in (set(old_slice_node_ids) - set(slice_node_ids)):
634 slice.remove_node(peer_nodes[node_id], commit = commit_mode)
635 message ("%s node %s removed from slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
637 # Add new nodes to slice
638 for node_id in (set(slice_node_ids) - set(old_slice_node_ids)):
639 slice.add_node(peer_nodes[node_id], commit = commit_mode)
640 message ("%s node %s added into slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
642 # N.B.: Local nodes that may have been added to the slice
643 # by hand, are removed. In other words, don't do this.
645 # Foreign users that are currently part of the slice
646 #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \
647 # if person_transcoder[person_id] in peer_persons]
648 # An issue occurred with a user who registered on both sites (same email)
649 # So the remote person could not get cached locally
650 # The one-line map/filter style is nicer but ineffective here
651 old_slice_person_ids = []
652 for person_id in slice['person_ids']:
653 if not person_transcoder.has_key(person_id):
654 message ('WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name']))
655 elif person_transcoder[person_id] not in peer_persons:
656 message('WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name']))
658 old_slice_person_ids += [person_transcoder[person_id]]
660 # Foreign users that should be part of the slice
661 slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ]
663 # Remove stale users from slice
664 for person_id in (set(old_slice_person_ids) - set(slice_person_ids)):
665 slice.remove_person(peer_persons[person_id], commit = commit_mode)
666 message ("%s user %s removed from slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name']))
668 # Add new users to slice
669 for person_id in (set(slice_person_ids) - set(old_slice_person_ids)):
670 slice.add_person(peer_persons[person_id], commit = commit_mode)
671 message ("%s user %s added into slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name']))
673 # N.B.: Local users that may have been added to the slice
674 # by hand, are not touched.
676 timers['slices'] = time.time() - start
684 message('Dealing Sites X Persons relationship')
686 for peer_site_id, site in peer_sites.iteritems():
687 # Site as viewed by peer
688 peer_site = sites_at_peer[peer_site_id]
690 # Persons that are currently part of the site
691 old_site_person_ids = [ person_transcoder[person_id] for person_id in site['person_ids'] \
692 if person_id in person_transcoder and person_transcoder[person_id] in peer_persons]
694 # Perons that should be part of the site
695 site_person_ids = [ person_id for person_id in peer_site['person_ids'] if person_id in peer_persons]
697 # Remove stale persons from site
698 for person_id in (set(old_site_person_ids) - set(site_person_ids)):
699 site.remove_person(peer_persons[person_id], commit = commit_mode)
700 message ("%s person %s removed from site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base']))
702 # Add new persons to site
703 for person_id in (set(site_person_ids) - set(old_site_person_ids)):
704 site.add_person(peer_persons[person_id], commit = commit_mode)
705 message ("%s person %s added into site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base']))
707 timers['sites-persons'] = time.time() - start
715 message('Dealing with Persons Roles relationship')
717 roles = Roles(self.api)
718 roles_dict = dict([(role['role_id'], role) for role in roles])
719 for peer_person_id, person in peer_persons.iteritems():
720 # Person as viewed by peer
721 peer_person = persons_at_peer[peer_person_id]
723 # Roles that are currently attributed for the person
724 old_person_role_ids = [ role_id for role_id in person['role_ids'] ]
726 # Roles that should be attributed to the person
727 person_role_ids = [ role_id for role_id in peer_person['role_ids'] ]
730 for role_id in (set(old_person_role_ids) - set(person_role_ids)):
731 person.remove_role(roles_dict[role_id], commit = commit_mode)
732 message ("%s role %s removed from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email']))
734 # Add new roles to person
735 for role_id in (set(person_role_ids) - set(old_person_role_ids)):
736 person.add_role(roles_dict[role_id], commit = commit_mode)
737 message ("%s role %s added from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email']))
739 timers['persons-roles'] = time.time() - start
741 # Update peer itself and commit
742 peer.sync(commit = True)