X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=PLC%2FMethods%2FRefreshPeer.py;h=4568d9975d5a34f2a2c5fb7d6722ec4fdd116690;hb=c52ada150ab8df273deefa92af79887e81a40bc2;hp=f13f394dd22e6c18da2ea2964bc681c124b88f8b;hpb=9cf6b5c4c2315b48a7fd61d9b39e62ddc3329a9c;p=plcapi.git diff --git a/PLC/Methods/RefreshPeer.py b/PLC/Methods/RefreshPeer.py index f13f394..4568d99 100644 --- a/PLC/Methods/RefreshPeer.py +++ b/PLC/Methods/RefreshPeer.py @@ -1,6 +1,7 @@ # # Thierry Parmentelat - INRIA # +# $Id$ import time @@ -20,11 +21,29 @@ from PLC.Nodes import Node, Nodes from PLC.SliceInstantiations import SliceInstantiations from PLC.Slices import Slice, Slices +verbose=False + +# initial version was doing only one final commit +# * set commit_mode to False to get that behaviour +# * set comit_mode to True to get everything synced at once +commit_mode=True + +def message (to_print=None,verbose_only=False): + if verbose_only and not verbose: + return + print >> log, time.strftime("%m-%d-%H-%M-%S:"), + if to_print: + print >>log, to_print + +def message_verbose(to_print=None): + message(to_print,verbose_only=True) + class RefreshPeer(Method): """ - Fetches node and slice data from the specified peer and caches it - locally; also deletes stale entries. Returns 1 if successful, - faults otherwise. + Fetches site, node, slice, person and key data from the specified peer + and caches it locally; also deletes stale entries. + Upon successful completion, returns a dict reporting various timers. + Faults otherwise. """ roles = ['admin'] @@ -38,9 +57,6 @@ class RefreshPeer(Method): returns = Parameter(int, "1 if successful") def call(self, auth, peer_id_or_peername): - - start = time.time() - # Get peer peers = Peers(self.api, [peer_id_or_peername]) if not peers: @@ -54,15 +70,15 @@ class RefreshPeer(Method): timers = {} # Get peer data + start = time.time() + message('RefreshPeer starting up (commit_mode=%r)'%commit_mode) + message('Issuing GetPeerData') peer_tables = peer.GetPeerData() timers['transport'] = time.time() - start - peer_tables['db_time'] timers['peer_db'] = peer_tables['db_time'] + message_verbose('GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport'])) - now=time.time() - timers['prepare'] = now-start-timers['peer_db']-timers['transport'] - start=now - - def sync(objects, peer_objects, classobj,debug_dict={}): + def sync(objects, peer_objects, classobj): """ Synchronizes two dictionaries of objects. objects should be a dictionary of local objects keyed on their foreign @@ -72,32 +88,35 @@ class RefreshPeer(Method): keyed on their foreign identifiers. """ - for key in ['delete','sync','process','focus','added','deleted','updated','unchanged','synced','screwed']: - debug_dict[key]=0 + classname=classobj(self.api).__class__.__name__ + message_verbose('Entering sync on %s'%classname) synced = {} - xstart=time.time() # Delete stale objects for peer_object_id, object in objects.iteritems(): if peer_object_id not in peer_objects: - object.delete(commit = False) - print classobj, "object %d deleted" % object[object.primary_key] - debug_dict['deleted'] += 1 - - xnow=time.time() - debug_dict['delete']=xnow-xstart - xstart=xnow - + object.delete(commit = commit_mode) + message("%s %s %s deleted"%(peer['peername'],classname, object[object.primary_key])) + + total = len(peer_objects) + count=1 + # set this to something realistic to trace down a given object(s) + trace_type="Node" + trace_ids=[] + def trace (message): + if classname == trace_type and peer_object_id in trace_ids: + message_verbose('TRACE>>'+message) + # Add/update new/existing objects for peer_object_id, peer_object in peer_objects.iteritems(): - - xnow=time.time() - debug_dict['sync'] += (xnow-xstart) - xstart=xnow - - #if peer_object_id in objects: - if objects.has_key(peer_object_id): + message_verbose ('DBG %s peer_object_id=%d (%d/%d)'%(classname,peer_object_id,count,total)) + count += 1 + if classname == 'Node': + message_verbose ('DBG>> hostname=%s'%peer_object['hostname']) + elif classname == "Slice": + message_verbose ('DBG>> slicename=%s'%peer_object['name']) + if peer_object_id in objects: # Update existing object object = objects[peer_object_id] @@ -109,53 +128,49 @@ class RefreshPeer(Method): # Must use __eq__() instead of == since # peer_object may be a raw dict instead of a Peer # object. + trace ("in objects : comparing") if not object.__eq__(peer_object): # Only update intrinsic fields + trace ("updating") object.update(object.db_fields(peer_object)) + trace ("updated") sync = True dbg = "changed" - debug_dict['updated'] += 1 else: + trace ("intact") sync = False dbg = None - debug_dict['unchanged'] += 1 # Restore foreign identifier peer_object[object.primary_key] = peer_object_id else: + trace ("not in objects -- creating") # Add new object object = classobj(self.api, peer_object) + trace ("created") # Replace foreign identifier with new local identifier del object[object.primary_key] + trace ("forced clean id") sync = True dbg = "added" - debug_dict['added'] += 1 - - xnow=time.time() - debug_dict['process'] += (xnow-xstart) - xstart=xnow if sync: + message_verbose("DBG>> syncing %s %d - commit_mode=%r"%(classname,peer_object_id,commit_mode)) try: - object.sync(commit = False) - debug_dict['synced'] += 1 + object.sync(commit = commit_mode) except PLCInvalidArgument, err: # Skip if validation fails # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid", \ - peer['peername'], object.__class__.__name__, \ - ":", peer_object, ":", err - debug_dict['screwed'] += 1 + message("Warning: %s Skipping invalid %s %r : %r"%(\ + peer['peername'], classname, peer_object, err)) continue synced[peer_object_id] = object if dbg: - print >> log, peer['peername'], classobj(self.api).__class__.__name__, object[object.primary_key], dbg + message("%s: %s %d %s"%(peer['peername'], classname, object[object.primary_key], dbg)) - xnow=time.time() - debug_dict['sync'] += (xnow-xstart) - xstart=xnow + message_verbose("Exiting sync on %s"%classname) return synced @@ -163,6 +178,10 @@ class RefreshPeer(Method): # Synchronize foreign sites # + start = time.time() + + message('Dealing with Sites') + # Compare only the columns returned by the GetPeerData() call if peer_tables['Sites']: columns = peer_tables['Sites'][0].keys() @@ -179,24 +198,25 @@ class RefreshPeer(Method): for peer_site_id, site in peer_sites.iteritems(): # Bind any newly cached sites to peer if peer_site_id not in old_peer_sites: - peer.add_site(site, peer_site_id, commit = False) + peer.add_site(site, peer_site_id, commit = commit_mode) site['peer_id'] = peer_id site['peer_site_id'] = peer_site_id - now=time.time() - timers['site'] = now - start - start = now + timers['site'] = time.time() - start # # XXX Synchronize foreign key types # + message('Dealing with Keys') + key_types = KeyTypes(self.api).dict() # # Synchronize foreign keys # + start = time.time() # Compare only the columns returned by the GetPeerData() call if peer_tables['Keys']: @@ -212,8 +232,7 @@ class RefreshPeer(Method): for peer_key_id, key in keys_at_peer.items(): if key['key_type'] not in key_types: # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid %s key:" % peer['peername'], \ - key, ": invalid key type", key['key_type'] + message("Warning: Skipping invalid %s key %r" % ( peer['peername'], key)) del keys_at_peer[peer_key_id] continue @@ -222,7 +241,7 @@ class RefreshPeer(Method): for peer_key_id, key in peer_keys.iteritems(): # Bind any newly cached keys to peer if peer_key_id not in old_peer_keys: - peer.add_key(key, peer_key_id, commit = False) + peer.add_key(key, peer_key_id, commit = commit_mode) key['peer_id'] = peer_id key['peer_key_id'] = peer_key_id @@ -233,7 +252,8 @@ class RefreshPeer(Method): # start = time.time() - substart = start + + message('Dealing with Persons') # Compare only the columns returned by the GetPeerData() call if peer_tables['Persons']: @@ -243,105 +263,52 @@ class RefreshPeer(Method): # Keyed on foreign person_id old_peer_persons = Persons(self.api, {'peer_id': peer_id}, columns).dict('peer_person_id') + + # artificially attach the persons returned by GetPeerData to the new peer + # this is because validate_email needs peer_id to be correct when checking for duplicates + for person in peer_tables['Persons']: + person['peer_id']=peer_id persons_at_peer = dict([(peer_person['person_id'], peer_person) \ for peer_person in peer_tables['Persons']]) - now=time.time() - timers [ 'persons-1' ] = now - substart - substart=now - # XXX Do we care about membership in foreign site(s)? # Synchronize new set (still keyed on foreign person_id) - yyy={} - peer_persons = sync(old_peer_persons, persons_at_peer, Person,yyy) - for key in yyy: - timers[ 'persons-'+key ] = yyy[key] - - now=time.time() - timers [ 'persons-2' ] = now - substart - substart=now - subsubstart=now - - for key in ['persons-31','persons-32','persons-33','persons-34','persons-35','persons-36','person3-added']: - timers[key]=0 - - # allows to retrieve local_key_id from a peer_key_id, if any - peer_key_id_from_local_key_id = dict( \ - [ (key['key_id'],peer_key_id) for (peer_key_id,key) in peer_keys.items()]) - - for peer_person_id, person in peer_persons.iteritems(): + peer_persons = sync(old_peer_persons, persons_at_peer, Person) - now=time.time() - timers [ 'persons-36' ] += (now - subsubstart) - subsubstart=now + # transcoder : retrieve a local key_id from a peer_key_id + key_transcoder = dict ( [ (key['key_id'],peer_key_id) \ + for peer_key_id,key in peer_keys.iteritems()]) + for peer_person_id, person in peer_persons.iteritems(): # Bind any newly cached users to peer if peer_person_id not in old_peer_persons: - peer.add_person(person, peer_person_id, commit = False) + peer.add_person(person, peer_person_id, commit = commit_mode) person['peer_id'] = peer_id person['peer_person_id'] = peer_person_id person['key_ids'] = [] - timers['person3-added'] += 1 - - now=time.time() - timers [ 'persons-31' ] += (now - subsubstart) - subsubstart=now # User as viewed by peer peer_person = persons_at_peer[peer_person_id] # Foreign keys currently belonging to the user - old_person_keys = dict(filter(lambda (peer_key_id, key): \ - key['key_id'] in person['key_ids'], - peer_keys.items())) - print 'old_person_keys',old_person_keys.keys() - - old_person_key_ids_set = set(\ - [ peer_key_id_from_local_key_id[local_key_id] for local_key_id in person['key_ids']]) - print 'old_person_keys_set',old_person_key_ids_set - - - now=time.time() - timers [ 'persons-33' ] += (now - subsubstart) - subsubstart=now + old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids'] \ + if key_transcoder[key_id] in peer_keys] # Foreign keys that should belong to the user - person_keys = dict(filter(lambda (peer_key_id, key): \ - peer_key_id in peer_person['key_ids'], - peer_keys.items())) - print 'person_keys',person_keys.keys() - - person_keys_new = dict( [ (peer_key_id,peer_keys[peer_key_id]) \ - for peer_key_id in peer_person['key_ids'] ]) - print 'person_keys_new',person_keys_new.keys() - - - now=time.time() - timers [ 'persons-34' ] += (now - subsubstart) - subsubstart=now + # this is basically peer_person['key_ids'], we just check it makes sense + # (e.g. we might have failed importing it) + person_key_ids = [ key_id for key_id in peer_person['key_ids'] if key_id in peer_keys] # Remove stale keys from user - for peer_key_id in (set(old_person_keys.keys()) - set(person_keys.keys())): -# for peer_key_id in (old_person_key_ids_set - set(person_keys.keys())): - person.remove_key(old_person_keys[peer_key_id], commit = False) - - now=time.time() - timers [ 'persons-35' ] += (now - subsubstart) - subsubstart=now + for key_id in (set(old_person_key_ids) - set(person_key_ids)): + person.remove_key(peer_keys[key_id], commit = commit_mode) + message ("%s Key %d removed from person %s"%(peer['peername'], key_id, person['email'])) # Add new keys to user - for peer_key_id in (set(person_keys.keys()) - set(old_person_keys.keys())): -# for peer_key_id in (set(person_keys.keys()) - old_person_key_ids_set): - person.add_key(person_keys[peer_key_id], commit = False) - - now=time.time() - timers [ 'persons-36' ] += (now - subsubstart) - subsubstart=now - - now=time.time() - timers [ 'persons-3' ] = now - substart - substart=now + for key_id in (set(person_key_ids) - set(old_person_key_ids)): + person.add_key(peer_keys[key_id], commit = commit_mode) + message ("%s Key %d added into person %s"%(peer['peername'],key_id, person['email'])) timers['persons'] = time.time() - start @@ -349,14 +316,15 @@ class RefreshPeer(Method): # XXX Synchronize foreign boot states # - start = time.time() - boot_states = BootStates(self.api).dict() # # Synchronize foreign nodes # + start = time.time() + + message('Dealing with Nodes (1)') # Compare only the columns returned by the GetPeerData() call if peer_tables['Nodes']: @@ -378,8 +346,8 @@ class RefreshPeer(Method): errors.append("invalid boot state %s" % node['boot_state']) if errors: # XXX Log an event instead of printing to logfile - print >> log, "Warning: Skipping invalid %s node:" % peer['peername'], \ - node, ":", ", ".join(errors) + message ("Warning: Skipping invalid %s node %r : " % (peer['peername'], node)\ + + ", ".join(errors)) del nodes_at_peer[peer_node_id] continue else: @@ -391,7 +359,7 @@ class RefreshPeer(Method): for peer_node_id, node in peer_nodes.iteritems(): # Bind any newly cached foreign nodes to peer if peer_node_id not in old_peer_nodes: - peer.add_node(node, peer_node_id, commit = False) + peer.add_node(node, peer_node_id, commit = commit_mode) node['peer_id'] = peer_id node['peer_node_id'] = peer_node_id @@ -402,6 +370,7 @@ class RefreshPeer(Method): # start = time.time() + message('Dealing with Nodes (2)') # Keyed on local node_id local_nodes = Nodes(self.api).dict() @@ -422,14 +391,16 @@ class RefreshPeer(Method): # XXX Synchronize foreign slice instantiation states # - start = time.time() - slice_instantiations = SliceInstantiations(self.api).dict() # # Synchronize foreign slices # + start = time.time() + + message('Dealing with Slices (1)') + # Compare only the columns returned by the GetPeerData() call if peer_tables['Slices']: columns = peer_tables['Slices'][0].keys() @@ -454,8 +425,8 @@ class RefreshPeer(Method): else: slice['creator_person_id'] = peer_persons[slice['creator_person_id']]['person_id'] if errors: - print >> log, "Warning: Skipping invalid %s slice:" % peer['peername'], \ - slice, ":", ", ".join(errors) + message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice) \ + + ", ".join(errors)) del slices_at_peer[peer_slice_id] continue else: @@ -464,10 +435,17 @@ class RefreshPeer(Method): # Synchronize new set peer_slices = sync(old_peer_slices, slices_at_peer, Slice) + message('Dealing with Slices (2)') + # transcoder : retrieve a local node_id from a peer_node_id + node_transcoder = dict ( [ (node['node_id'],peer_node_id) \ + for peer_node_id,node in peer_nodes.iteritems()]) + person_transcoder = dict ( [ (person['person_id'],peer_person_id) \ + for peer_person_id,person in peer_persons.iteritems()]) + for peer_slice_id, slice in peer_slices.iteritems(): # Bind any newly cached foreign slices to peer if peer_slice_id not in old_peer_slices: - peer.add_slice(slice, peer_slice_id, commit = False) + peer.add_slice(slice, peer_slice_id, commit = commit_mode) slice['peer_id'] = peer_id slice['peer_slice_id'] = peer_slice_id slice['node_ids'] = [] @@ -477,53 +455,59 @@ class RefreshPeer(Method): peer_slice = slices_at_peer[peer_slice_id] # Nodes that are currently part of the slice - old_slice_nodes = dict(filter(lambda (peer_node_id, node): \ - node['node_id'] in slice['node_ids'], - peer_nodes.items())) + old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \ + if node_transcoder[node_id] in peer_nodes] # Nodes that should be part of the slice - slice_nodes = dict(filter(lambda (peer_node_id, node): \ - peer_node_id in peer_slice['node_ids'], - peer_nodes.items())) + slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes] # Remove stale nodes from slice - for node_id in (set(old_slice_nodes.keys()) - set(slice_nodes.keys())): - slice.remove_node(old_slice_nodes[node_id], commit = False) + for node_id in (set(old_slice_node_ids) - set(slice_node_ids)): + slice.remove_node(peer_nodes[node_id], commit = commit_mode) + message ("%s node %s removed from slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) # Add new nodes to slice - for node_id in (set(slice_nodes.keys()) - set(old_slice_nodes.keys())): - slice.add_node(slice_nodes[node_id], commit = False) + for node_id in (set(slice_node_ids) - set(old_slice_node_ids)): + slice.add_node(peer_nodes[node_id], commit = commit_mode) + message ("%s node %s added into slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name'])) # N.B.: Local nodes that may have been added to the slice # by hand, are removed. In other words, don't do this. # Foreign users that are currently part of the slice - old_slice_persons = dict(filter(lambda (peer_person_id, person): \ - person['person_id'] in slice['person_ids'], - peer_persons.items())) + #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \ + # if person_transcoder[person_id] in peer_persons] + # An issue occurred with a user who registered on both sites (same email) + # So the remote person could not get cached locally + # The one-line map/filter style is nicer but ineffective here + old_slice_person_ids = [] + for person_id in slice['person_ids']: + if not person_transcoder.has_key(person_id): + message ('WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name'])) + elif person_transcoder[person_id] not in peer_persons: + message('WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name'])) + else: + old_slice_person_ids += [person_transcoder[person_id]] # Foreign users that should be part of the slice - slice_persons = dict(filter(lambda (peer_person_id, person): \ - peer_person_id in peer_slice['person_ids'], - peer_persons.items())) + slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ] # Remove stale users from slice - for peer_person_id in (set(old_slice_persons.keys()) - set(slice_persons.keys())): - slice.remove_person(old_slice_persons[peer_person_id], commit = False) + for person_id in (set(old_slice_person_ids) - set(slice_person_ids)): + slice.remove_person(peer_persons[person_id], commit = commit_mode) + message ("%s user %s removed from slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name'])) # Add new users to slice - for peer_person_id in (set(slice_persons.keys()) - set(old_slice_persons.keys())): - slice.add_person(slice_persons[peer_person_id], commit = False) + for person_id in (set(slice_person_ids) - set(old_slice_person_ids)): + slice.add_person(peer_persons[person_id], commit = commit_mode) + message ("%s user %s added into slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name'])) # N.B.: Local users that may have been added to the slice # by hand, are not touched. timers['slices'] = time.time() - start - start=time.time() # Update peer itself and commit peer.sync(commit = True) - timers['sync'] = time.time() - start - return timers