#
# Thierry Parmentelat - INRIA
#
+# $Id$
import time
from PLC.SliceInstantiations import SliceInstantiations
from PLC.Slices import Slice, Slices
+verbose=False
+
+# initial version was doing only one final commit
+# * set commit_mode to False to get that behaviour
+# * set comit_mode to True to get everything synced at once
+commit_mode=True
+
+def message (to_print=None,verbose_only=False):
+ if verbose_only and not verbose:
+ return
+ print >> log, time.strftime("%m-%d-%H-%M-%S:"),
+ if to_print:
+ print >>log, to_print
+
+def message_verbose(to_print=None):
+ message(to_print,verbose_only=True)
+
class RefreshPeer(Method):
"""
- Fetches node and slice data from the specified peer and caches it
- locally; also deletes stale entries. Returns 1 if successful,
- faults otherwise.
+ Fetches site, node, slice, person and key data from the specified peer
+ and caches it locally; also deletes stale entries.
+ Upon successful completion, returns a dict reporting various timers.
+ Faults otherwise.
"""
roles = ['admin']
# Get peer data
start = time.time()
+ message('RefreshPeer starting up (commit_mode=%r)'%commit_mode)
+ message('Issuing GetPeerData')
peer_tables = peer.GetPeerData()
+ # for smooth federation with 4.2 - ignore fields that are useless anyway, and rewrite boot_state
+ boot_state_rewrite={'dbg':'safeboot','diag':'safeboot','disable':'disabled',
+ 'inst':'reinstall','rins':'reinstall','new':'reinstall','rcnf':'reinstall'}
+ for node in peer_tables['Nodes']:
+ for key in ['nodenetwork_ids','dummybox_id']:
+ if key in node:
+ del node[key]
+ if node['boot_state'] in boot_state_rewrite: node['boot_state']=boot_state_rewrite[node['boot_state']]
+ for slice in peer_tables['Slices']:
+ for key in ['slice_attribute_ids']:
+ if key in slice:
+ del slice[key]
timers['transport'] = time.time() - start - peer_tables['db_time']
timers['peer_db'] = peer_tables['db_time']
+ message_verbose('GetPeerData returned -> db=%d transport=%d'%(timers['peer_db'],timers['transport']))
def sync(objects, peer_objects, classobj):
"""
keyed on their foreign identifiers.
"""
+ classname=classobj(self.api).__class__.__name__
+ message_verbose('Entering sync on %s'%classname)
+
synced = {}
# Delete stale objects
for peer_object_id, object in objects.iteritems():
if peer_object_id not in peer_objects:
- object.delete(commit = False)
- print classobj(self.api).__class__.__name__, "object %s deleted" % object[object.class_key]
-
+ object.delete(commit = commit_mode)
+ message("%s %s %s deleted"%(peer['peername'],classname, object[object.primary_key]))
+
+ total = len(peer_objects)
+ count=1
+ # set this to something realistic to trace down a given object(s)
+ trace_type="Node"
+ trace_ids=[]
+ def trace (message):
+ if classname == trace_type and peer_object_id in trace_ids:
+ message_verbose('TRACE>>'+message)
+
# Add/update new/existing objects
for peer_object_id, peer_object in peer_objects.iteritems():
+ message_verbose ('DBG %s peer_object_id=%d (%d/%d)'%(classname,peer_object_id,count,total))
+ count += 1
+ if peer_object_id in synced:
+ message("Warning: %s Skipping already added %s: %r"%(
+ peer['peername'], classname, peer_object))
+ continue
+ if classname == 'Node':
+ message_verbose ('DBG>> hostname=%s'%peer_object['hostname'])
+ elif classname == "Slice":
+ message_verbose ('DBG>> slicename=%s'%peer_object['name'])
if peer_object_id in objects:
# Update existing object
object = objects[peer_object_id]
# Must use __eq__() instead of == since
# peer_object may be a raw dict instead of a Peer
# object.
+ trace ("in objects : comparing")
if not object.__eq__(peer_object):
# Only update intrinsic fields
+ trace ("updating")
object.update(object.db_fields(peer_object))
+ trace ("updated")
sync = True
dbg = "changed"
else:
+ trace ("intact")
sync = False
dbg = None
# Restore foreign identifier
peer_object[object.primary_key] = peer_object_id
else:
+ trace ("not in objects -- creating")
# Add new object
object = classobj(self.api, peer_object)
+ trace ("created")
# Replace foreign identifier with new local identifier
del object[object.primary_key]
+ trace ("forced clean id")
sync = True
dbg = "added"
if sync:
+ message_verbose("DBG>> syncing %s %d - commit_mode=%r"%(classname,peer_object_id,commit_mode))
try:
- object.sync(commit = False)
+ object.sync(commit = commit_mode)
except PLCInvalidArgument, err:
# Skip if validation fails
# XXX Log an event instead of printing to logfile
- print >> log, "Warning: Skipping invalid", \
- peer['peername'], object.__class__.__name__, \
- ":", peer_object, ":", err
+ message("Warning: %s Skipping invalid %s %r : %r"%(\
+ peer['peername'], classname, peer_object, err))
continue
synced[peer_object_id] = object
if dbg:
- print >> log, peer['peername'], classobj(self.api).__class__.__name__, \
- object[object.class_key], object[object.primary_key], dbg
+ message("%s: %s %d %s"%(peer['peername'], classname, object[object.primary_key], dbg))
+
+ message_verbose("Exiting sync on %s"%classname)
return synced
start = time.time()
+ message('Dealing with Sites')
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Sites']:
columns = peer_tables['Sites'][0].keys()
for peer_site_id, site in peer_sites.iteritems():
# Bind any newly cached sites to peer
if peer_site_id not in old_peer_sites:
- peer.add_site(site, peer_site_id, commit = False)
+ peer.add_site(site, peer_site_id, commit = commit_mode)
site['peer_id'] = peer_id
site['peer_site_id'] = peer_site_id
# XXX Synchronize foreign key types
#
+ message('Dealing with Keys')
+
key_types = KeyTypes(self.api).dict()
#
for peer_key_id, key in keys_at_peer.items():
if key['key_type'] not in key_types:
# XXX Log an event instead of printing to logfile
- print >> log, "Warning: Skipping invalid %s key:" % peer['peername'], \
- key, ": invalid key type", key['key_type']
+ message("Warning: Skipping invalid %s key %r" % ( peer['peername'], key))
del keys_at_peer[peer_key_id]
continue
for peer_key_id, key in peer_keys.iteritems():
# Bind any newly cached keys to peer
if peer_key_id not in old_peer_keys:
- peer.add_key(key, peer_key_id, commit = False)
+ peer.add_key(key, peer_key_id, commit = commit_mode)
key['peer_id'] = peer_id
key['peer_key_id'] = peer_key_id
start = time.time()
+ message('Dealing with Persons')
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Persons']:
columns = peer_tables['Persons'][0].keys()
# Keyed on foreign person_id
old_peer_persons = Persons(self.api, {'peer_id': peer_id}, columns).dict('peer_person_id')
+
+ # artificially attach the persons returned by GetPeerData to the new peer
+ # this is because validate_email needs peer_id to be correct when checking for duplicates
+ for person in peer_tables['Persons']:
+ person['peer_id']=peer_id
persons_at_peer = dict([(peer_person['person_id'], peer_person) \
for peer_person in peer_tables['Persons']])
peer_persons = sync(old_peer_persons, persons_at_peer, Person)
# transcoder : retrieve a local key_id from a peer_key_id
- key_transcoder = dict ( [ (key['key_id'],key['peer_key_id']) for key in peer_keys.values()])
+ key_transcoder = dict ( [ (key['key_id'],peer_key_id) \
+ for peer_key_id,key in peer_keys.iteritems()])
for peer_person_id, person in peer_persons.iteritems():
# Bind any newly cached users to peer
if peer_person_id not in old_peer_persons:
- peer.add_person(person, peer_person_id, commit = False)
+ peer.add_person(person, peer_person_id, commit = commit_mode)
person['peer_id'] = peer_id
person['peer_person_id'] = peer_person_id
person['key_ids'] = []
# Remove stale keys from user
for key_id in (set(old_person_key_ids) - set(person_key_ids)):
- person.remove_key(peer_keys[key_id], commit = False)
- print >> log, peer['peername'], 'Key', key_id, 'removed from', person['email']
+ person.remove_key(peer_keys[key_id], commit = commit_mode)
+ message ("%s Key %d removed from person %s"%(peer['peername'], key_id, person['email']))
# Add new keys to user
for key_id in (set(person_key_ids) - set(old_person_key_ids)):
- person.add_key(peer_keys[key_id], commit = False)
- print >> log, peer['peername'], 'Key', key_id, 'added into', person['email']
+ person.add_key(peer_keys[key_id], commit = commit_mode)
+ message ("%s Key %d added into person %s"%(peer['peername'],key_id, person['email']))
timers['persons'] = time.time() - start
start = time.time()
+ message('Dealing with Nodes (1)')
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Nodes']:
columns = peer_tables['Nodes'][0].keys()
else:
- columns = None
+ # smooth federation with a 4.2 peer - ignore these fields that are useless anyway
+ columns = Node.fields
+ if 'interface_ids' in columns: columns.remove('interface_ids')
+ if 'dummybox_id' in columns: columns.remove('dummybox_id')
# Keyed on foreign node_id
old_peer_nodes = Nodes(self.api, {'peer_id': peer_id}, columns).dict('peer_node_id')
errors.append("invalid boot state %s" % node['boot_state'])
if errors:
# XXX Log an event instead of printing to logfile
- print >> log, "Warning: Skipping invalid %s node:" % peer['peername'], \
- node, ":", ", ".join(errors)
+ message ("Warning: Skipping invalid %s node %r : " % (peer['peername'], node)\
+ + ", ".join(errors))
del nodes_at_peer[peer_node_id]
continue
else:
for peer_node_id, node in peer_nodes.iteritems():
# Bind any newly cached foreign nodes to peer
if peer_node_id not in old_peer_nodes:
- peer.add_node(node, peer_node_id, commit = False)
+ peer.add_node(node, peer_node_id, commit = commit_mode)
node['peer_id'] = peer_id
node['peer_node_id'] = peer_node_id
#
start = time.time()
+ message('Dealing with Nodes (2)')
# Keyed on local node_id
local_nodes = Nodes(self.api).dict()
start = time.time()
+ message('Dealing with Slices (1)')
+
# Compare only the columns returned by the GetPeerData() call
if peer_tables['Slices']:
columns = peer_tables['Slices'][0].keys()
else:
slice['creator_person_id'] = peer_persons[slice['creator_person_id']]['person_id']
if errors:
- print >> log, "Warning: Skipping invalid %s slice:" % peer['peername'], \
- slice, ":", ", ".join(errors)
+ message("Warning: Skipping invalid %s slice %r : " % (peer['peername'], slice) \
+ + ", ".join(errors))
del slices_at_peer[peer_slice_id]
continue
else:
# Synchronize new set
peer_slices = sync(old_peer_slices, slices_at_peer, Slice)
+ message('Dealing with Slices (2)')
# transcoder : retrieve a local node_id from a peer_node_id
- node_transcoder = dict ( [ (node['node_id'],node['peer_node_id']) for node in peer_nodes.values()])
- person_transcoder = dict ( [ (person['person_id'],person['peer_person_id']) for person in peer_persons.values()])
+ node_transcoder = dict ( [ (node['node_id'],peer_node_id) \
+ for peer_node_id,node in peer_nodes.iteritems()])
+ person_transcoder = dict ( [ (person['person_id'],peer_person_id) \
+ for peer_person_id,person in peer_persons.iteritems()])
for peer_slice_id, slice in peer_slices.iteritems():
# Bind any newly cached foreign slices to peer
if peer_slice_id not in old_peer_slices:
- peer.add_slice(slice, peer_slice_id, commit = False)
+ peer.add_slice(slice, peer_slice_id, commit = commit_mode)
slice['peer_id'] = peer_id
slice['peer_slice_id'] = peer_slice_id
slice['node_ids'] = []
# Nodes that are currently part of the slice
old_slice_node_ids = [ node_transcoder[node_id] for node_id in slice['node_ids'] \
- if node_transcoder[node_id] in peer_nodes]
+ if node_id in node_transcoder and node_transcoder[node_id] in peer_nodes]
# Nodes that should be part of the slice
slice_node_ids = [ node_id for node_id in peer_slice['node_ids'] if node_id in peer_nodes]
# Remove stale nodes from slice
for node_id in (set(old_slice_node_ids) - set(slice_node_ids)):
- slice.remove_node(peer_nodes[node_id], commit = False)
- print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'removed from', slice['name']
+ slice.remove_node(peer_nodes[node_id], commit = commit_mode)
+ message ("%s node %s removed from slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
# Add new nodes to slice
for node_id in (set(slice_node_ids) - set(old_slice_node_ids)):
- slice.add_node(peer_nodes[node_id], commit = False)
- print >> log, peer['peername'], 'Node', peer_nodes[node_id]['hostname'], 'added into', slice['name']
+ slice.add_node(peer_nodes[node_id], commit = commit_mode)
+ message ("%s node %s added into slice %s"%(peer['peername'], peer_nodes[node_id]['hostname'], slice['name']))
# N.B.: Local nodes that may have been added to the slice
# by hand, are removed. In other words, don't do this.
# Foreign users that are currently part of the slice
- old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \
- if person_transcoder[person_id] in peer_persons]
+ #old_slice_person_ids = [ person_transcoder[person_id] for person_id in slice['person_ids'] \
+ # if person_transcoder[person_id] in peer_persons]
+ # An issue occurred with a user who registered on both sites (same email)
+ # So the remote person could not get cached locally
+ # The one-line map/filter style is nicer but ineffective here
+ old_slice_person_ids = []
+ for person_id in slice['person_ids']:
+ if not person_transcoder.has_key(person_id):
+ message ('WARNING : person_id %d in %s not transcodable (1) - skipped'%(person_id,slice['name']))
+ elif person_transcoder[person_id] not in peer_persons:
+ message('WARNING : person_id %d in %s not transcodable (2) - skipped'%(person_id,slice['name']))
+ else:
+ old_slice_person_ids += [person_transcoder[person_id]]
# Foreign users that should be part of the slice
slice_person_ids = [ person_id for person_id in peer_slice['person_ids'] if person_id in peer_persons ]
# Remove stale users from slice
for person_id in (set(old_slice_person_ids) - set(slice_person_ids)):
- slice.remove_person(peer_persons[person_id], commit = False)
- print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'removed from', slice['name']
+ slice.remove_person(peer_persons[person_id], commit = commit_mode)
+ message ("%s user %s removed from slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name']))
# Add new users to slice
for person_id in (set(slice_person_ids) - set(old_slice_person_ids)):
- slice.add_person(peer_persons[person_id], commit = False)
- print >> log, peer['peername'], 'User', peer_persons[person_id]['email'], 'added into', slice['name']
+ slice.add_person(peer_persons[person_id], commit = commit_mode)
+ message ("%s user %s added into slice %s"%(peer['peername'],peer_persons[person_id]['email'], slice['name']))
# N.B.: Local users that may have been added to the slice
# by hand, are not touched.