#
# Thierry Parmentelat - INRIA
#
+# $Id: RefreshPeer.py,v 1.23 2007/03/23 19:05:16 thierry Exp $
import time
from PLC.SliceInstantiations import SliceInstantiations
from PLC.Slices import Slice, Slices
+verbose=False
+
class RefreshPeer(Method):
"""
- Fetches node and slice data from the specified peer and caches it
- locally; also deletes stale entries. Returns 1 if successful,
- faults otherwise.
+ Fetches site, node, slice, person and key data from the specified peer
+ and caches it locally; also deletes stale entries.
+ Upon successful completion, returns a dict reporting various timers.
+ Faults otherwise.
"""
roles = ['admin']
returns = Parameter(int, "1 if successful")
def call(self, auth, peer_id_or_peername):
-
- start = time.time()
-
# Get peer
peers = Peers(self.api, [peer_id_or_peername])
if not peers:
timers = {}
# Get peer data
+ start = time.time()
+ print >>log, 'Issuing GetPeerData'
peer_tables = peer.GetPeerData()
timers['transport'] = time.time() - start - peer_tables['db_time']
timers['peer_db'] = peer_tables['db_time']
+ if verbose:
+ print >>log, 'GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport'])
- now=time.time()
- timers['prepare'] = now-start-timers['peer_db']-timers['transport']
- start=now
-
- def sync(objects, peer_objects, classobj,debug_dict={}):
+ def sync(objects, peer_objects, classobj):
"""
Synchronizes two dictionaries of objects. objects should
be a dictionary of local objects keyed on their foreign
keyed on their foreign identifiers.
"""
- for key in ['delete','sync','process','focus','added','deleted','updated','unchanged','synced','screwed']:
- debug_dict[key]=0
+ if verbose:
+ print >>log, 'Entering sync on',classobj(self.api).__class__.__name__
synced = {}
- xstart=time.time()
# Delete stale objects
for peer_object_id, object in objects.iteritems():
if peer_object_id not in peer_objects:
object.delete(commit = False)
- print classobj, "object %d deleted" % object[object.primary_key]
- debug_dict['deleted'] += 1
-
- xnow=time.time()
- debug_dict['delete']=xnow-xstart
- xstart=xnow
+ print >> log, peer['peername'],classobj(self.api).__class__.__name__, object[object.primary_key],"deleted"
# Add/update new/existing objects
for peer_object_id, peer_object in peer_objects.iteritems():
-
- xnow=time.time()
- debug_dict['sync'] += (xnow-xstart)
- xstart=xnow
-
- #if peer_object_id in objects:
- if objects.has_key(peer_object_id):
+ if peer_object_id in objects:
# Update existing object
object = objects[peer_object_id]
object.update(object.db_fields(peer_object))
sync = True
dbg = "changed"
- debug_dict['updated'] += 1
else:
sync = False
dbg = None
- debug_dict['unchanged'] += 1
# Restore foreign identifier
peer_object[object.primary_key] = peer_object_id
del object[object.primary_key]
sync = True
dbg = "added"
- debug_dict['added'] += 1
-
- xnow=time.time()
- debug_dict['process'] += (xnow-xstart)
- xstart=xnow
if sync:
try:
object.sync(commit = False)
- debug_dict['synced'] += 1
except PLCInvalidArgument, err:
# Skip if validation fails
# XXX Log an event instead of printing to logfile
print >> log, "Warning: Skipping invalid", \
peer['peername'], object.__class__.__name__, \
":", peer_object, ":", err
- debug_dict['screwed'] += 1
continue
synced[peer_object_id] = object
if dbg:
print >> log, peer['peername'], classobj(self.api).__class__.__name__, object[object.primary_key], dbg
- xnow=time.time()
- debug_dict['sync'] += (xnow-xstart)
- xstart=xnow
+ if verbose:
+ print >>log, 'Exiting sync on',classobj(self.api).__class__.__name__
return synced
# Synchronize foreign sites
#
+ start = time.time()
+
+ print >>log, 'Dealing with Sites'
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Sites']:
columns = peer_tables['Sites'][0].keys()
site['peer_id'] = peer_id
site['peer_site_id'] = peer_site_id
- now=time.time()
- timers['site'] = now - start
- start = now
+ timers['site'] = time.time() - start
#
# XXX Synchronize foreign key types
#
+ print >>log, 'Dealing with Keys'
+
key_types = KeyTypes(self.api).dict()
#
# Synchronize foreign keys
#
+ start = time.time()
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Keys']:
#
start = time.time()
- substart = start
+
+ print >>log, 'Dealing with Persons'
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Persons']:
# Keyed on foreign person_id
old_peer_persons = Persons(self.api, {'peer_id': peer_id}, columns).dict('peer_person_id')
+
+ # artificially attach the persons returned by GetPeerData to the new peer
+ # this is because validate_email needs peer_id to be correct when checking for duplicates
+ for person in peer_tables['Persons']:
+ person['peer_id']=peer_id
persons_at_peer = dict([(peer_person['person_id'], peer_person) \
for peer_person in peer_tables['Persons']])
- now=time.time()
- timers [ 'persons-1' ] = now - substart
- substart=now
-
# XXX Do we care about membership in foreign site(s)?
# Synchronize new set (still keyed on foreign person_id)
- yyy={}
- peer_persons = sync(old_peer_persons, persons_at_peer, Person,yyy)
- for key in yyy:
- timers[ 'persons-'+key ] = yyy[key]
-
- now=time.time()
- timers [ 'persons-2' ] = now - substart
- substart=now
- subsubstart=now
-
- for key in ['persons-31','persons-32','persons-33','persons-34','persons-35','persons-36','person3-added']:
- timers[key]=0
-
- # allows to retrieve local_key_id from a peer_key_id, if any
- peer_key_id_from_local_key_id = dict( \
- [ (key['key_id'],peer_key_id) for (peer_key_id,key) in peer_keys.items()])
+ peer_persons = sync(old_peer_persons, persons_at_peer, Person)
- for peer_person_id, person in peer_persons.iteritems():
-
- now=time.time()
- timers [ 'persons-36' ] += (now - subsubstart)
- subsubstart=now
+ # transcoder : retrieve a local key_id from a peer_key_id
+ key_transcoder = dict ( [ (key['key_id'],peer_key_id) \
+ for peer_key_id,key in peer_keys.iteritems()])
+ for peer_person_id, person in peer_persons.iteritems():
# Bind any newly cached users to peer
if peer_person_id not in old_peer_persons:
peer.add_person(person, peer_person_id, commit = False)
person['peer_id'] = peer_id
person['peer_person_id'] = peer_person_id
person['key_ids'] = []
- timers['person3-added'] += 1
-
- now=time.time()
- timers [ 'persons-31' ] += (now - subsubstart)
- subsubstart=now
# User as viewed by peer
peer_person = persons_at_peer[peer_person_id]
# Foreign keys currently belonging to the user
- old_person_keys = dict(filter(lambda (peer_key_id, key): \
- key['key_id'] in person['key_ids'],
- peer_keys.items()))
- print 'old_person_keys',old_person_keys.keys()
-
- old_person_key_ids_set = set(\
- [ peer_key_id_from_local_key_id[local_key_id] for local_key_id in person['key_ids']])
- print 'old_person_keys_set',old_person_key_ids_set
-
-
- now=time.time()
- timers [ 'persons-33' ] += (now - subsubstart)
- subsubstart=now
+ old_person_key_ids = [key_transcoder[key_id] for key_id in person['key_ids'] \
+ if key_transcoder[key_id] in peer_keys]
# Foreign keys that should belong to the user
- person_keys = dict(filter(lambda (peer_key_id, key): \
- peer_key_id in peer_person['key_ids'],
- peer_keys.items()))
- print 'person_keys',person_keys.keys()
-
- person_keys_new = dict( [ (peer_key_id,peer_keys[peer_key_id]) \
- for peer_key_id in peer_person['key_ids'] ])
- print 'person_keys_new',person_keys_new.keys()
-
-
- now=time.time()
- timers [ 'persons-34' ] += (now - subsubstart)
- subsubstart=now
+ # this is basically peer_person['key_ids'], we just check it makes sense
+ # (e.g. we might have failed importing it)
+ person_key_ids = [ key_id for key_id in peer_person['key_ids'] if key_id in peer_keys]
# Remove stale keys from user
- for peer_key_id in (set(old_person_keys.keys()) - set(person_keys.keys())):
-# for peer_key_id in (old_person_key_ids_set - set(person_keys.keys())):
- person.remove_key(old_person_keys[peer_key_id], commit = False)
-
- now=time.time()
- timers [ 'persons-35' ] += (now - subsubstart)
- subsubstart=now
+ for key_id in (set(old_person_key_ids) - set(person_key_ids)):
+ person.remove_key(peer_keys[key_id], commit = False)
+ print >> log, peer['peername'], 'Key', key_id, 'removed from', person['email']
# Add new keys to user
- for peer_key_id in (set(person_keys.keys()) - set(old_person_keys.keys())):
-# for peer_key_id in (set(person_keys.keys()) - old_person_key_ids_set):
- person.add_key(person_keys[peer_key_id], commit = False)
-
- now=time.time()
- timers [ 'persons-36' ] += (now - subsubstart)
- subsubstart=now
-
- now=time.time()
- timers [ 'persons-3' ] = now - substart
- substart=now
+ for key_id in (set(person_key_ids) - set(old_person_key_ids)):
+ person.add_key(peer_keys[key_id], commit = False)
+ print >> log, peer['peername'], 'Key', key_id, 'added into', person['email']
timers['persons'] = time.time() - start
# XXX Synchronize foreign boot states
#
- start = time.time()
-
boot_states = BootStates(self.api).dict()
#
# Synchronize foreign nodes
#
+ start = time.time()
+
+ print >>log, 'Dealing with Nodes'
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Nodes']:
# XXX Synchronize foreign slice instantiation states
#
- start = time.time()
-
slice_instantiations = SliceInstantiations(self.api).dict()
#
# Synchronize foreign slices
#
+ start = time.time()
+
+ print >>log, 'Dealing with Slices'
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Slices']:
columns = peer_tables['Slices'][0].keys()
# Synchronize new set
peer_slices = sync(old_peer_slices, slices_at_peer, Slice)
+ # transcoder : retrieve a local node_id from a peer_node_id
+ node_transcoder = dict ( [ (node['node_id'],peer_node_id) \
+ for peer_node_id,node in peer_nodes.iteritems()])
+ person_transcoder = dict ( [ (person['person_id'],peer_person_id) \
+ for peer_person_id,person in peer_persons.iteritems()])
+
for peer_slice_id, slice in peer_slices.iteritems():
# Bind any newly cached foreign slices to peer
if peer_slice_id not in old_peer_slices:
peer_slice = slices_at_peer[peer_slice_id]
# Nodes that are currently part of the slice
- old_slice_nodes = dict(filter(lambda (peer_node_id, node): \
- node['node_id'] in slice['node_ids'],
- peer_nodes.items()))
+ old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \
+ if node_transcoder[node_id] in peer_nodes]
# Nodes that should be part of the slice
- slice_nodes = dict(filter(lambda (peer_node_id, node): \
- peer_node_id in peer_slice['node_ids'],
- peer_nodes.items()))
+ slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes]
# Remove stale nodes from slice
- for node_id in (set(old_slice_nodes.keys()) - set(slice_nodes.keys())):
- slice.remove_node(old_slice_nodes[node_id], commit = False)
+ for node_id in (set(old_slice_node_ids) - set(slice_node_ids)):
+ slice.remove_node(peer_nodes[node_id], commit = False)
+ print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'removed from', slice['name']
# Add new nodes to slice
- for node_id in (set(slice_nodes.keys()) - set(old_slice_nodes.keys())):
- slice.add_node(slice_nodes[node_id], commit = False)
+ for node_id in (set(slice_node_ids) - set(old_slice_node_ids)):
+ slice.add_node(peer_nodes[node_id], commit = False)
+ print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'added into', slice['name']
# N.B.: Local nodes that may have been added to the slice
# by hand, are removed. In other words, don't do this.
# Foreign users that are currently part of the slice
- old_slice_persons = dict(filter(lambda (peer_person_id, person): \
- person['person_id'] in slice['person_ids'],
- peer_persons.items()))
+ #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \
+ # if person_transcoder[person_id] in peer_persons]
+ # An issue occurred with a user who registered on both sites (same email)
+ # So the remote person could not get cached locally
+ # The one-line map/filter style is nicer but ineffective here
+ old_slice_person_ids = []
+ for person_id in slice['person_ids']:
+ if not person_transcoder.has_key(person_id):
+ print >> log, 'WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name'])
+ elif person_transcoder[person_id] not in peer_persons:
+ print >> log, 'WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name'])
+ else:
+ old_slice_person_ids += [person_transcoder[person_id]]
# Foreign users that should be part of the slice
- slice_persons = dict(filter(lambda (peer_person_id, person): \
- peer_person_id in peer_slice['person_ids'],
- peer_persons.items()))
+ slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ]
# Remove stale users from slice
- for peer_person_id in (set(old_slice_persons.keys()) - set(slice_persons.keys())):
- slice.remove_person(old_slice_persons[peer_person_id], commit = False)
+ for person_id in (set(old_slice_person_ids) - set(slice_person_ids)):
+ slice.remove_person(peer_persons[person_id], commit = False)
+ print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'removed from', slice['name']
# Add new users to slice
- for peer_person_id in (set(slice_persons.keys()) - set(old_slice_persons.keys())):
- slice.add_person(slice_persons[peer_person_id], commit = False)
+ for person_id in (set(slice_person_ids) - set(old_slice_person_ids)):
+ slice.add_person(peer_persons[person_id], commit = False)
+ print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'added into', slice['name']
# N.B.: Local users that may have been added to the slice
# by hand, are not touched.
timers['slices'] = time.time() - start
- start=time.time()
# Update peer itself and commit
peer.sync(commit = True)
- timers['sync'] = time.time() - start
-
return timers