X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=PLC%2FMethods%2FRefreshPeer.py;h=0eb36e6bd7fe279e24e9b20c5c167b549f72c986;hb=e70e20fdbececafef842ec7b330fd48db42e614e;hp=6c0444f26d9c85f550b52aae992d11944064da07;hpb=32a7859efc899b826dc5d7a7cde4f7e0606e09ee;p=plcapi.git diff --git a/PLC/Methods/RefreshPeer.py b/PLC/Methods/RefreshPeer.py index 6c0444f..0eb36e6 100644 --- a/PLC/Methods/RefreshPeer.py +++ b/PLC/Methods/RefreshPeer.py @@ -1,11 +1,12 @@ # # Thierry Parmentelat - INRIA -# -# $Id: RefreshPeer.py,v 1.23 2007/03/23 19:05:16 thierry Exp $ - +# +import os +import sys +import fcntl import time -from PLC.Debug import log +from PLC.Logger import logger from PLC.Faults import * from PLC.Method import Method from PLC.Parameter import Parameter, Mixed @@ -20,8 +21,79 @@ from PLC.BootStates import BootState, BootStates from PLC.Nodes import Node, Nodes from PLC.SliceInstantiations import SliceInstantiations from PLC.Slices import Slice, Slices - +from PLC.Roles import Role, Roles + +#################### settings +# initial version was doing only one final commit +# * set commit_mode to False to get that behaviour +# * set comit_mode to True to get everything synced at once +# the issue with the 'one-commit-at-the-end' approach is +# that the db gets basically totally locked during too long +# causing various issues/crashes in the rest of the system +commit_mode=True + +# turn this to False only if both ends have the same db schema +# compatibility mode is a bit slower but probably safer on the long run +compatibility=True + +#################### debugging +# for verbose output verbose=False +# set to a filename for using cached data when debugging +# WARNING: does not actually connect to the peer in this case +use_cache=None +# for debugging specific entries - display detailed info on selected objs +focus_type=None # set to e.g. 'Person' +focus_ids=[] # set to a list of ids (e.g. person_ids) - remote or local ids should work +#### example +#use_cache="/var/log/peers/getpeerdata.pickle" +#verbose=True +#focus_type='Person' +#focus_ids=[621,1088] + +#################### helpers +def message(to_print=None, verbose_only=False): + if verbose_only and not verbose: + return + logger.info(to_print) + +def message_verbose(to_print=None, header='VERBOSE'): + message("%s> %r"%(header, to_print), verbose_only=True) + + +#################### to avoid several instances running at the same time +class FileLock: + """ + Lock/Unlock file + """ + def __init__(self, file_path, expire = 60 * 60 * 2): + self.expire = expire + self.fpath = file_path + self.fd = None + + def lock(self): + if os.path.exists(self.fpath): + if (time.time() - os.stat(self.fpath).st_ctime) > self.expire: + try: + os.unlink(self.fpath) + except Exception, e: + message('FileLock.lock(%s) : %s' % (self.fpath, e)) + return False + try: + self.fd = open(self.fpath, 'w') + fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except IOError, e: + message('FileLock.lock(%s) : %s' % (self.fpath, e)) + return False + return True + + def unlock(self): + try: + fcntl.flock(self.fd, fcntl.LOCK_UN | fcntl.LOCK_NB) + self.fd.close() + except IOError, e: + message('FileLock.unlock(%s) : %s' % (self.fpath, e)) + class RefreshPeer(Method): """ @@ -41,29 +113,82 @@ class RefreshPeer(Method): returns = Parameter(int, "1 if successful") + ignore_site_fields=['peer_id', 'peer_site_id','last_updated', 'date_created', + 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids' ] + ignore_key_fields=['peer_id','peer_key_id', 'person_id'] + ignore_person_fields=['peer_id','peer_person_id','last_updated','date_created', + 'key_ids','slice_ids','person_tag_ids'] + ignore_node_fields=['peer_id','peer_node_id','last_updated','last_contact','date_created', + 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids','pcu_ids','ports'] + ignore_slice_fields=['peer_id','peer_slice_id','created', + 'person_ids','slice_tag_ids','node_ids',] + def call(self, auth, peer_id_or_peername): + ret_val = None + peername = Peers(self.api, [peer_id_or_peername], ['peername'])[0]['peername'] + file_lock = FileLock("/tmp/refresh-peer-%s.lock" % peername) + if not file_lock.lock(): + raise Exception, "Another instance of RefreshPeer is running." + try: + ret_val = self.real_call(auth, peer_id_or_peername) + except Exception, e: + file_lock.unlock() + message("RefreshPeer caught exception - BEG") + import traceback + traceback.print_exc(file=log) + message("RefreshPeer caught exception - END") + raise Exception, e + file_lock.unlock() + return ret_val + + + def real_call(self, auth, peer_id_or_peername): # Get peer - peers = Peers(self.api, [peer_id_or_peername]) + peers = Peers(self.api, [peer_id_or_peername]) if not peers: raise PLCInvalidArgument, "No such peer '%s'" % unicode(peer_id_or_peername) peer = peers[0] peer_id = peer['peer_id'] - # Connect to peer API + # Connect to peer API peer.connect() timers = {} # Get peer data start = time.time() - print >>log, 'Issuing GetPeerData' - peer_tables = peer.GetPeerData() + message('RefreshPeer starting up (commit_mode=%r)'%commit_mode) + if not use_cache: + message('Issuing GetPeerData') + peer_tables = peer.GetPeerData() + else: + import pickle + if os.path.isfile(use_cache): + message("use_cache: WARNING: using cached getpeerdata") + peer_tables=pickle.load(file(use_cache,'rb')) + else: + message("use_cache: issuing getpeerdata") + peer_tables = peer.GetPeerData() + message("use_cache: saving in cache %s",use_cache) + pickle.dump(peer_tables,file(use_cache,'wb')) + + # for smooth federation with 4.2 - ignore fields that are useless anyway, and rewrite boot_state + boot_state_rewrite={'dbg':'safeboot','diag':'safeboot','disable':'disabled', + 'inst':'reinstall','rins':'reinstall','new':'reinstall','rcnf':'reinstall'} + for node in peer_tables['Nodes']: + for key in ['nodenetwork_ids','dummybox_id']: + if key in node: + del node[key] + if node['boot_state'] in boot_state_rewrite: node['boot_state']=boot_state_rewrite[node['boot_state']] + for slice in peer_tables['Slices']: + for key in ['slice_attribute_ids']: + if key in slice: + del slice[key] timers['transport'] = time.time() - start - peer_tables['db_time'] timers['peer_db'] = peer_tables['db_time'] - if verbose: - print >>log, 'GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport']) + message_verbose('GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport'])) - def sync(objects, peer_objects, classobj): + def sync(objects, peer_objects, classobj, columns): """ Synchronizes two dictionaries of objects. objects should be a dictionary of local objects keyed on their foreign @@ -73,19 +198,73 @@ class RefreshPeer(Method): keyed on their foreign identifiers. """ - if verbose: - print >>log, 'Entering sync on',classobj(self.api).__class__.__name__ + classname=classobj(self.api).__class__.__name__ + primary_key=getattr(classobj,'primary_key') + # display all peer objects of these types while looping + secondary_keys={'Node':'hostname','Slice':'name','Person':'email','Site':'login_base'} + secondary_key=None + if classname in secondary_keys: secondary_key=secondary_keys[classname] + + message_verbose('Entering sync on %s (%s)'%(classname,primary_key)) synced = {} # Delete stale objects for peer_object_id, object in objects.iteritems(): if peer_object_id not in peer_objects: - object.delete(commit = False) - print >> log, peer['peername'],classobj(self.api).__class__.__name__, object[object.primary_key],"deleted" + object.delete(commit = commit_mode) + message("%s %s %s deleted"%(peer['peername'],classname, object[primary_key])) + + total = len(peer_objects) + count=1 + + # peer_object_id, peer_object and object are dynamically bound in the loop below... + # (local) object might be None if creating a new one + def in_focus(): + if classname != focus_type: return False + return peer_object_id in focus_ids or \ + (object and primary_key in object and object[primary_key] in focus_ids) + + def message_focus(message): + if in_focus(): + # always show remote + message_verbose("peer_obj : %d [[%r]]"%(peer_object_id,peer_object), + header='FOCUS '+message) + # show local object if a match was found + if object: message_verbose("local_obj : <<%r>>"%(object), + header='FOCUS '+message); + + + # the function to compare a local object with its cadidate peer obj + # xxx probably faster when compatibility is False... + def equal_fields (object, peer_object, columns): + # fast version: must use __eq__() instead of == since + # peer_object may be a raw dict instead of a Peer object. + if not compatibility: return object.__eq__(peer_object) + elif not verbose: + for column in columns: +# if in_focus(): message ('FOCUS comparing column %s'%column) + if object[column] != peer_object[column]: return False + return True + else: + result=True + for column in columns: + test= object[column] == peer_object[column] + if not test: result=False + return result # Add/update new/existing objects for peer_object_id, peer_object in peer_objects.iteritems(): + peer_object_name="" + if secondary_key: peer_object_name="(%s)"%peer_object[secondary_key] + message_verbose('%s peer_object_id=%d %s (%d/%d)' + %(classname,peer_object_id,peer_object_name,count,total)) + count += 1 + if peer_object_id in synced: + message("Warning: %s Skipping already added %s: %r"%( + peer['peername'], classname, peer_object)) + continue + if peer_object_id in objects: # Update existing object object = objects[peer_object_id] @@ -93,62 +272,77 @@ class RefreshPeer(Method): # Replace foreign identifier with existing local # identifier temporarily for the purposes of # comparison. - peer_object[object.primary_key] = object[object.primary_key] + peer_object[primary_key] = object[primary_key] - # Must use __eq__() instead of == since - # peer_object may be a raw dict instead of a Peer - # object. - if not object.__eq__(peer_object): + if not equal_fields(object,peer_object, columns): # Only update intrinsic fields object.update(object.db_fields(peer_object)) + message_focus("DIFFERENCES : updated / syncing") sync = True - dbg = "changed" + action = "changed" else: + message_focus("UNCHANGED - left intact / not syncing") sync = False - dbg = None + action = None # Restore foreign identifier - peer_object[object.primary_key] = peer_object_id + peer_object[primary_key] = peer_object_id else: + object=None # Add new object object = classobj(self.api, peer_object) # Replace foreign identifier with new local identifier - del object[object.primary_key] + del object[primary_key] + message_focus("NEW -- created with clean id - syncing") sync = True - dbg = "added" + action = "added" if sync: + message_verbose("syncing %s %d - commit_mode=%r" + %(classname,peer_object_id,commit_mode)) try: - object.sync(commit = False) + object.sync(commit = commit_mode) except PLCInvalidArgument, err: # Skip if validation fails # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid", \ - peer['peername'], object.__class__.__name__, \ - ":", peer_object, ":", err + message("Warning: %s Skipping invalid %s %r : %r"% + (peer['peername'], classname, peer_object, err)) continue synced[peer_object_id] = object - if dbg: - print >> log, peer['peername'], classobj(self.api).__class__.__name__, object[object.primary_key], dbg + if action: + message("%s: (%d/%d) %s %d %s %s" + %(peer['peername'], count,total, classname, + object[primary_key], peer_object_name, action)) - if verbose: - print >>log, 'Exiting sync on',classobj(self.api).__class__.__name__ + message_verbose("Exiting sync on %s"%classname) return synced + ### over time, we've had issues with a given column being + ### added on one side and not on the other + ### this helper function computes the intersection of two list of fields/columns + def intersect (l1,l2): + if compatibility: return list (set(l1).intersection(set(l2))) + else: return l1 + + # some fields definitely need to be ignored + def ignore (l1,l2): + return list (set(l1).difference(set(l2))) + # # Synchronize foreign sites # start = time.time() - print >>log, 'Dealing with Sites' + message('Dealing with Sites') # Compare only the columns returned by the GetPeerData() call if peer_tables['Sites']: columns = peer_tables['Sites'][0].keys() + columns = intersect (columns, Site.fields) else: columns = None @@ -157,12 +351,13 @@ class RefreshPeer(Method): sites_at_peer = dict([(site['site_id'], site) for site in peer_tables['Sites']]) # Synchronize new set (still keyed on foreign site_id) - peer_sites = sync(old_peer_sites, sites_at_peer, Site) + peer_sites = sync(old_peer_sites, sites_at_peer, Site, + ignore(columns, RefreshPeer.ignore_site_fields)) for peer_site_id, site in peer_sites.iteritems(): # Bind any newly cached sites to peer if peer_site_id not in old_peer_sites: - peer.add_site(site, peer_site_id, commit = False) + peer.add_site(site, peer_site_id, commit = commit_mode) site['peer_id'] = peer_id site['peer_site_id'] = peer_site_id @@ -172,7 +367,7 @@ class RefreshPeer(Method): # XXX Synchronize foreign key types # - print >>log, 'Dealing with Keys' + message('Dealing with Keys') key_types = KeyTypes(self.api).dict() @@ -185,6 +380,7 @@ class RefreshPeer(Method): # Compare only the columns returned by the GetPeerData() call if peer_tables['Keys']: columns = peer_tables['Keys'][0].keys() + columns = intersect (columns, Key.fields) else: columns = None @@ -196,17 +392,17 @@ class RefreshPeer(Method): for peer_key_id, key in keys_at_peer.items(): if key['key_type'] not in key_types: # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid %s key:" % peer['peername'], \ - key, ": invalid key type", key['key_type'] + message("Warning: Skipping invalid %s key %r" % ( peer['peername'], key)) del keys_at_peer[peer_key_id] continue # Synchronize new set (still keyed on foreign key_id) - peer_keys = sync(old_peer_keys, keys_at_peer, Key) + peer_keys = sync(old_peer_keys, keys_at_peer, Key, + ignore(columns, RefreshPeer.ignore_key_fields)) for peer_key_id, key in peer_keys.iteritems(): # Bind any newly cached keys to peer if peer_key_id not in old_peer_keys: - peer.add_key(key, peer_key_id, commit = False) + peer.add_key(key, peer_key_id, commit = commit_mode) key['peer_id'] = peer_id key['peer_key_id'] = peer_key_id @@ -218,62 +414,67 @@ class RefreshPeer(Method): start = time.time() - print >>log, 'Dealing with Persons' + message('Dealing with Persons') # Compare only the columns returned by the GetPeerData() call if peer_tables['Persons']: columns = peer_tables['Persons'][0].keys() + columns = intersect (columns, Person.fields) else: columns = None # Keyed on foreign person_id old_peer_persons = Persons(self.api, {'peer_id': peer_id}, columns).dict('peer_person_id') - # artificially attach the persons returned by GetPeerData to the new peer - # this is because validate_email needs peer_id to be correct when checking for duplicates - for person in peer_tables['Persons']: - person['peer_id']=peer_id + # artificially attach the persons returned by GetPeerData to the new peer + # this is because validate_email needs peer_id to be correct when checking for duplicates + for person in peer_tables['Persons']: + person['peer_id']=peer_id persons_at_peer = dict([(peer_person['person_id'], peer_person) \ for peer_person in peer_tables['Persons']]) # XXX Do we care about membership in foreign site(s)? # Synchronize new set (still keyed on foreign person_id) - peer_persons = sync(old_peer_persons, persons_at_peer, Person) + peer_persons = sync(old_peer_persons, persons_at_peer, Person, + ignore(columns, RefreshPeer.ignore_person_fields)) - # transcoder : retrieve a local key_id from a peer_key_id - key_transcoder = dict ( [ (key['key_id'],peer_key_id) \ - for peer_key_id,key in peer_keys.iteritems()]) + # transcoder : retrieve a local key_id from a peer_key_id + key_transcoder = dict ( [ (key['key_id'],peer_key_id) \ + for peer_key_id,key in peer_keys.iteritems()]) for peer_person_id, person in peer_persons.iteritems(): # Bind any newly cached users to peer if peer_person_id not in old_peer_persons: - peer.add_person(person, peer_person_id, commit = False) + peer.add_person(person, peer_person_id, commit = commit_mode) person['peer_id'] = peer_id person['peer_person_id'] = peer_person_id person['key_ids'] = [] + # User as viewed by peer peer_person = persons_at_peer[peer_person_id] - + # Foreign keys currently belonging to the user - old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids'] \ - if key_transcoder[key_id] in peer_keys] + old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids'] \ + if key_transcoder[key_id] in peer_keys] # Foreign keys that should belong to the user - # this is basically peer_person['key_ids'], we just check it makes sense - # (e.g. we might have failed importing it) - person_key_ids = [ key_id for key_id in peer_person['key_ids'] if key_id in peer_keys] + # this is basically peer_person['key_ids'], we just check it makes sense + # (e.g. we might have failed importing it) + person_key_ids = [ key_id for key_id in peer_person['key_ids'] if key_id in peer_keys] # Remove stale keys from user - for key_id in (set(old_person_key_ids) - set(person_key_ids)): - person.remove_key(peer_keys[key_id], commit = False) - print >> log, peer['peername'], 'Key', key_id, 'removed from', person['email'] + for key_id in (set(old_person_key_ids) - set(person_key_ids)): + person.remove_key(peer_keys[key_id], commit = commit_mode) + message ("%s Key %d removed from person %s"%(peer['peername'], key_id, person['email'])) # Add new keys to user - for key_id in (set(person_key_ids) - set(old_person_key_ids)): - person.add_key(peer_keys[key_id], commit = False) - print >> log, peer['peername'], 'Key', key_id, 'added into', person['email'] + for key_id in (set(person_key_ids) - set(old_person_key_ids)): + message ("before add_key, passing person=%r"%person) + message ("before add_key, passing key=%r"%peer_keys[key_id]) + person.add_key(peer_keys[key_id], commit = commit_mode) + message ("%s Key %d added into person %s"%(peer['peername'],key_id, person['email'])) timers['persons'] = time.time() - start @@ -289,13 +490,14 @@ class RefreshPeer(Method): start = time.time() - print >>log, 'Dealing with Nodes' + message('Dealing with Nodes (1)') # Compare only the columns returned by the GetPeerData() call if peer_tables['Nodes']: columns = peer_tables['Nodes'][0].keys() + columns = intersect (columns, Node.fields) else: - columns = None + columns = Node.fields # Keyed on foreign node_id old_peer_nodes = Nodes(self.api, {'peer_id': peer_id}, columns).dict('peer_node_id') @@ -311,20 +513,20 @@ class RefreshPeer(Method): errors.append("invalid boot state %s" % node['boot_state']) if errors: # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid %s node:" % peer['peername'], \ - node, ":", ", ".join(errors) + message ("Warning: Skipping invalid %s node %r : " % (peer['peername'], node)\ + + ", ".join(errors)) del nodes_at_peer[peer_node_id] continue else: node['site_id'] = peer_sites[node['site_id']]['site_id'] # Synchronize new set - peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node) + peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node, ignore (columns, RefreshPeer.ignore_node_fields)) for peer_node_id, node in peer_nodes.iteritems(): # Bind any newly cached foreign nodes to peer if peer_node_id not in old_peer_nodes: - peer.add_node(node, peer_node_id, commit = False) + peer.add_node(node, peer_node_id, commit = commit_mode) node['peer_id'] = peer_id node['peer_node_id'] = peer_node_id @@ -335,6 +537,7 @@ class RefreshPeer(Method): # start = time.time() + message('Dealing with Nodes (2)') # Keyed on local node_id local_nodes = Nodes(self.api).dict() @@ -363,11 +566,12 @@ class RefreshPeer(Method): start = time.time() - print >>log, 'Dealing with Slices' + message('Dealing with Slices (1)') # Compare only the columns returned by the GetPeerData() call if peer_tables['Slices']: columns = peer_tables['Slices'][0].keys() + columns = intersect (columns, Slice.fields) else: columns = None @@ -389,26 +593,27 @@ class RefreshPeer(Method): else: slice['creator_person_id'] = peer_persons[slice['creator_person_id']]['person_id'] if errors: - print >> log, "Warning: Skipping invalid %s slice:" % peer['peername'], \ - slice, ":", ", ".join(errors) + message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice) \ + + ", ".join(errors)) del slices_at_peer[peer_slice_id] continue else: slice['site_id'] = peer_sites[slice['site_id']]['site_id'] # Synchronize new set - peer_slices = sync(old_peer_slices, slices_at_peer, Slice) + peer_slices = sync(old_peer_slices, slices_at_peer, Slice, ignore (columns, RefreshPeer.ignore_slice_fields)) - # transcoder : retrieve a local node_id from a peer_node_id - node_transcoder = dict ( [ (node['node_id'],peer_node_id) \ - for peer_node_id,node in peer_nodes.iteritems()]) - person_transcoder = dict ( [ (person['person_id'],peer_person_id) \ - for peer_person_id,person in peer_persons.iteritems()]) + message('Dealing with Slices (2)') + # transcoder : retrieve a local node_id from a peer_node_id + node_transcoder = dict ( [ (node['node_id'],peer_node_id) \ + for peer_node_id,node in peer_nodes.iteritems()]) + person_transcoder = dict ( [ (person['person_id'],peer_person_id) \ + for peer_person_id,person in peer_persons.iteritems()]) for peer_slice_id, slice in peer_slices.iteritems(): # Bind any newly cached foreign slices to peer if peer_slice_id not in old_peer_slices: - peer.add_slice(slice, peer_slice_id, commit = False) + peer.add_slice(slice, peer_slice_id, commit = commit_mode) slice['peer_id'] = peer_id slice['peer_slice_id'] = peer_slice_id slice['node_ids'] = [] @@ -418,58 +623,121 @@ class RefreshPeer(Method): peer_slice = slices_at_peer[peer_slice_id] # Nodes that are currently part of the slice - old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \ - if node_transcoder[node_id] in peer_nodes] + old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \ + if node_id in node_transcoder and node_transcoder[node_id] in peer_nodes] # Nodes that should be part of the slice - slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes] + slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes] # Remove stale nodes from slice for node_id in (set(old_slice_node_ids) - set(slice_node_ids)): - slice.remove_node(peer_nodes[node_id], commit = False) - print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'removed from', slice['name'] + slice.remove_node(peer_nodes[node_id], commit = commit_mode) + message ("%s node %s removed from slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) # Add new nodes to slice for node_id in (set(slice_node_ids) - set(old_slice_node_ids)): - slice.add_node(peer_nodes[node_id], commit = False) - print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'added into', slice['name'] + slice.add_node(peer_nodes[node_id], commit = commit_mode) + message ("%s node %s added into slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) # N.B.: Local nodes that may have been added to the slice # by hand, are removed. In other words, don't do this. # Foreign users that are currently part of the slice - #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \ - # if person_transcoder[person_id] in peer_persons] - # An issue occurred with a user who registered on both sites (same email) - # So the remote person could not get cached locally - # The one-line map/filter style is nicer but ineffective here - old_slice_person_ids = [] - for person_id in slice['person_ids']: - if not person_transcoder.has_key(person_id): - print >> log, 'WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name']) - elif person_transcoder[person_id] not in peer_persons: - print >> log, 'WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name']) - else: - old_slice_person_ids += [person_transcoder[person_id]] + #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \ + # if person_transcoder[person_id] in peer_persons] + # An issue occurred with a user who registered on both sites (same email) + # So the remote person could not get cached locally + # The one-line map/filter style is nicer but ineffective here + old_slice_person_ids = [] + for person_id in slice['person_ids']: + if not person_transcoder.has_key(person_id): + message ('WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name'])) + elif person_transcoder[person_id] not in peer_persons: + message('WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name'])) + else: + old_slice_person_ids += [person_transcoder[person_id]] # Foreign users that should be part of the slice - slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ] + slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ] # Remove stale users from slice for person_id in (set(old_slice_person_ids) - set(slice_person_ids)): - slice.remove_person(peer_persons[person_id], commit = False) - print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'removed from', slice['name'] + slice.remove_person(peer_persons[person_id], commit = commit_mode) + message ("%s user %s removed from slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name'])) # Add new users to slice for person_id in (set(slice_person_ids) - set(old_slice_person_ids)): - slice.add_person(peer_persons[person_id], commit = False) - print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'added into', slice['name'] + slice.add_person(peer_persons[person_id], commit = commit_mode) + message ("%s user %s added into slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name'])) # N.B.: Local users that may have been added to the slice # by hand, are not touched. timers['slices'] = time.time() - start + + # + # Persons x Sites + # + start = time.time() + + message('Dealing Sites X Persons relationship') + + for peer_site_id, site in peer_sites.iteritems(): + # Site as viewed by peer + peer_site = sites_at_peer[peer_site_id] + + # Persons that are currently part of the site + old_site_person_ids = [ person_transcoder[person_id] for person_id in site['person_ids'] \ + if person_id in person_transcoder and person_transcoder[person_id] in peer_persons] + + # Perons that should be part of the site + site_person_ids = [ person_id for person_id in peer_site['person_ids'] if person_id in peer_persons] + + # Remove stale persons from site + for person_id in (set(old_site_person_ids) - set(site_person_ids)): + site.remove_person(peer_persons[person_id], commit = commit_mode) + message ("%s person %s removed from site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base'])) + + # Add new persons to site + for person_id in (set(site_person_ids) - set(old_site_person_ids)): + site.add_person(peer_persons[person_id], commit = commit_mode) + message ("%s person %s added into site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base'])) + + timers['sites-persons'] = time.time() - start + + + # + # Persons x Roles + # + start = time.time() + + message('Dealing with Persons Roles relationship') + + roles = Roles(self.api) + roles_dict = dict([(role['role_id'], role) for role in roles]) + for peer_person_id, person in peer_persons.iteritems(): + # Person as viewed by peer + peer_person = persons_at_peer[peer_person_id] + + # Roles that are currently attributed for the person + old_person_role_ids = [ role_id for role_id in person['role_ids'] ] + + # Roles that should be attributed to the person + person_role_ids = [ role_id for role_id in peer_person['role_ids'] ] + + # Remove stale roles + for role_id in (set(old_person_role_ids) - set(person_role_ids)): + person.remove_role(roles_dict[role_id], commit = commit_mode) + message ("%s role %s removed from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email'])) + + # Add new roles to person + for role_id in (set(person_role_ids) - set(old_person_role_ids)): + person.add_role(roles_dict[role_id], commit = commit_mode) + message ("%s role %s added from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email'])) + + timers['persons-roles'] = time.time() - start + # Update peer itself and commit peer.sync(commit = True)