import fcntl
import time
-from PLC.Debug import log
+from PLC.Logger import logger
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Nodes import Node, Nodes
from PLC.SliceInstantiations import SliceInstantiations
from PLC.Slices import Slice, Slices
+from PLC.Roles import Role, Roles
#################### settings
# initial version was doing only one final commit
# compatibility mode is a bit slower but probably safer on the long run
compatibility=True
+#################### debugging
# for verbose output
verbose=False
+# set to a filename for using cached data when debugging
+# WARNING: does not actually connect to the peer in this case
+use_cache=None
# for debugging specific entries - display detailed info on selected objs
focus_type=None # set to e.g. 'Person'
focus_ids=[] # set to a list of ids (e.g. person_ids) - remote or local ids should work
+#### example
+#use_cache="/var/log/peers/getpeerdata.pickle"
#verbose=True
#focus_type='Person'
-#focus_ids=[29103,239578,28825]
+#focus_ids=[621,1088]
#################### helpers
-def message (to_print=None,verbose_only=False):
+def message(to_print=None, verbose_only=False):
if verbose_only and not verbose:
return
- print >> log, time.strftime("%m-%d-%H-%M-%S:"),
- if to_print:
- print >>log, to_print
+ logger.info(to_print)
def message_verbose(to_print=None, header='VERBOSE'):
- message("%s> %r"%(header,to_print),verbose_only=True)
+ message("%s> %r"%(header, to_print), verbose_only=True)
#################### to avoid several instances running at the same time
returns = Parameter(int, "1 if successful")
+ ignore_site_fields=['peer_id', 'peer_site_id','last_updated', 'date_created',
+ 'address_ids', 'node_ids', 'person_ids', 'pcu_ids', 'slice_ids' ]
+ ignore_key_fields=['peer_id','peer_key_id', 'person_id']
+ ignore_person_fields=['peer_id','peer_person_id','last_updated','date_created',
+ 'key_ids','slice_ids','person_tag_ids']
+ ignore_node_fields=['peer_id','peer_node_id','last_updated','last_contact','date_created',
+ 'node_tag_ids', 'interface_ids', 'slice_ids', 'nodegroup_ids','pcu_ids','ports']
+ ignore_slice_fields=['peer_id','peer_slice_id','created',
+ 'person_ids','slice_tag_ids','node_ids',]
+
def call(self, auth, peer_id_or_peername):
ret_val = None
peername = Peers(self.api, [peer_id_or_peername], ['peername'])[0]['peername']
file_lock.unlock()
message("RefreshPeer caught exception - BEG")
import traceback
- traceback.print_exc()
+ traceback.print_exc(file=log)
message("RefreshPeer caught exception - END")
raise Exception, e
file_lock.unlock()
# Get peer data
start = time.time()
message('RefreshPeer starting up (commit_mode=%r)'%commit_mode)
- message('Issuing GetPeerData')
- peer_tables = peer.GetPeerData()
+ if not use_cache:
+ message('Issuing GetPeerData')
+ peer_tables = peer.GetPeerData()
+ else:
+ import pickle
+ if os.path.isfile(use_cache):
+ message("use_cache: WARNING: using cached getpeerdata")
+ peer_tables=pickle.load(file(use_cache,'rb'))
+ else:
+ message("use_cache: issuing getpeerdata")
+ peer_tables = peer.GetPeerData()
+ message("use_cache: saving in cache %s",use_cache)
+ pickle.dump(peer_tables,file(use_cache,'wb'))
+
# for smooth federation with 4.2 - ignore fields that are useless anyway, and rewrite boot_state
boot_state_rewrite={'dbg':'safeboot','diag':'safeboot','disable':'disabled',
'inst':'reinstall','rins':'reinstall','new':'reinstall','rcnf':'reinstall'}
# peer_object_id, peer_object and object are dynamically bound in the loop below...
# (local) object might be None if creating a new one
- def message_focus (message):
- if classname != focus_type: return
- if peer_object_id in focus_ids or \
- (object and primary_key in object and object[primary_key] in focus_ids):
+ def in_focus():
+ if classname != focus_type: return False
+ return peer_object_id in focus_ids or \
+ (object and primary_key in object and object[primary_key] in focus_ids)
+
+ def message_focus(message):
+ if in_focus():
# always show remote
message_verbose("peer_obj : %d [[%r]]"%(peer_object_id,peer_object),
header='FOCUS '+message)
if object: message_verbose("local_obj : <<%r>>"%(object),
header='FOCUS '+message);
+
# the function to compare a local object with its cadidate peer obj
# xxx probably faster when compatibility is False...
def equal_fields (object, peer_object, columns):
# fast version: must use __eq__() instead of == since
# peer_object may be a raw dict instead of a Peer object.
if not compatibility: return object.__eq__(peer_object)
- else:
- for column in columns:
+ elif not verbose:
+ for column in columns:
+# if in_focus(): message ('FOCUS comparing column %s'%column)
if object[column] != peer_object[column]: return False
return True
+ else:
+ result=True
+ for column in columns:
+ test= object[column] == peer_object[column]
+ if not test: result=False
+ return result
# Add/update new/existing objects
for peer_object_id, peer_object in peer_objects.iteritems():
peer_object_name=""
if secondary_key: peer_object_name="(%s)"%peer_object[secondary_key]
- message_verbose ('%s peer_object_id=%d %s (%d/%d)'%(classname,peer_object_id,peer_object_name,count,total))
+ message_verbose('%s peer_object_id=%d %s (%d/%d)'
+ %(classname,peer_object_id,peer_object_name,count,total))
count += 1
if peer_object_id in synced:
message("Warning: %s Skipping already added %s: %r"%(
# comparison.
peer_object[primary_key] = object[primary_key]
- if equal_fields(object,peer_object, columns):
+ if not equal_fields(object,peer_object, columns):
# Only update intrinsic fields
object.update(object.db_fields(peer_object))
- message_focus ("DIFFERENCES : updated / syncing")
+ message_focus("DIFFERENCES : updated / syncing")
sync = True
action = "changed"
else:
- message_focus ("UNCHANGED - left intact / not syncing")
+ message_focus("UNCHANGED - left intact / not syncing")
sync = False
action = None
object = classobj(self.api, peer_object)
# Replace foreign identifier with new local identifier
del object[primary_key]
- message_focus ("NEW -- created with clean id - syncing")
+ message_focus("NEW -- created with clean id - syncing")
sync = True
action = "added"
if sync:
- message_verbose("syncing %s %d - commit_mode=%r"%(classname,peer_object_id,commit_mode))
+ message_verbose("syncing %s %d - commit_mode=%r"
+ %(classname,peer_object_id,commit_mode))
try:
object.sync(commit = commit_mode)
except PLCInvalidArgument, err:
# Skip if validation fails
# XXX Log an event instead of printing to logfile
- message("Warning: %s Skipping invalid %s %r : %r"%(\
- peer['peername'], classname, peer_object, err))
+ message("Warning: %s Skipping invalid %s %r : %r"%
+ (peer['peername'], classname, peer_object, err))
continue
synced[peer_object_id] = object
if action:
- message("%s: %s %d %s %s"%(peer['peername'], classname, object[primary_key], peer_object_name, action))
+ message("%s: (%d/%d) %s %d %s %s"
+ %(peer['peername'], count,total, classname,
+ object[primary_key], peer_object_name, action))
message_verbose("Exiting sync on %s"%classname)
if compatibility: return list (set(l1).intersection(set(l2)))
else: return l1
+ # some fields definitely need to be ignored
+ def ignore (l1,l2):
+ return list (set(l1).difference(set(l2)))
+
#
# Synchronize foreign sites
#
sites_at_peer = dict([(site['site_id'], site) for site in peer_tables['Sites']])
# Synchronize new set (still keyed on foreign site_id)
- peer_sites = sync(old_peer_sites, sites_at_peer, Site, columns)
+ peer_sites = sync(old_peer_sites, sites_at_peer, Site,
+ ignore(columns, RefreshPeer.ignore_site_fields))
for peer_site_id, site in peer_sites.iteritems():
# Bind any newly cached sites to peer
continue
# Synchronize new set (still keyed on foreign key_id)
- peer_keys = sync(old_peer_keys, keys_at_peer, Key, columns)
+ peer_keys = sync(old_peer_keys, keys_at_peer, Key,
+ ignore(columns, RefreshPeer.ignore_key_fields))
for peer_key_id, key in peer_keys.iteritems():
# Bind any newly cached keys to peer
if peer_key_id not in old_peer_keys:
# XXX Do we care about membership in foreign site(s)?
# Synchronize new set (still keyed on foreign person_id)
- peer_persons = sync(old_peer_persons, persons_at_peer, Person, columns)
+ peer_persons = sync(old_peer_persons, persons_at_peer, Person,
+ ignore(columns, RefreshPeer.ignore_person_fields))
# transcoder : retrieve a local key_id from a peer_key_id
key_transcoder = dict ( [ (key['key_id'],peer_key_id) \
person['peer_id'] = peer_id
person['peer_person_id'] = peer_person_id
person['key_ids'] = []
+
# User as viewed by peer
peer_person = persons_at_peer[peer_person_id]
node['site_id'] = peer_sites[node['site_id']]['site_id']
# Synchronize new set
- peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node, columns)
+ peer_nodes = sync(old_peer_nodes, nodes_at_peer, Node, ignore (columns, RefreshPeer.ignore_node_fields))
for peer_node_id, node in peer_nodes.iteritems():
# Bind any newly cached foreign nodes to peer
slice['site_id'] = peer_sites[slice['site_id']]['site_id']
# Synchronize new set
- peer_slices = sync(old_peer_slices, slices_at_peer, Slice, columns)
+ peer_slices = sync(old_peer_slices, slices_at_peer, Slice, ignore (columns, RefreshPeer.ignore_slice_fields))
message('Dealing with Slices (2)')
# transcoder : retrieve a local node_id from a peer_node_id
timers['slices'] = time.time() - start
+
+ #
+ # Persons x Sites
+ #
+ start = time.time()
+
+ message('Dealing Sites X Persons relationship')
+
+ for peer_site_id, site in peer_sites.iteritems():
+ # Site as viewed by peer
+ peer_site = sites_at_peer[peer_site_id]
+
+ # Persons that are currently part of the site
+ old_site_person_ids = [ person_transcoder[person_id] for person_id in site['person_ids'] \
+ if person_id in person_transcoder and person_transcoder[person_id] in peer_persons]
+
+ # Perons that should be part of the site
+ site_person_ids = [ person_id for person_id in peer_site['person_ids'] if person_id in peer_persons]
+
+ # Remove stale persons from site
+ for person_id in (set(old_site_person_ids) - set(site_person_ids)):
+ site.remove_person(peer_persons[person_id], commit = commit_mode)
+ message ("%s person %s removed from site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base']))
+
+ # Add new persons to site
+ for person_id in (set(site_person_ids) - set(old_site_person_ids)):
+ site.add_person(peer_persons[person_id], commit = commit_mode)
+ message ("%s person %s added into site %s"%(peer['peername'], peer_persons[person_id]['email'], site['login_base']))
+
+ timers['sites-persons'] = time.time() - start
+
+
+ #
+ # Persons x Roles
+ #
+ start = time.time()
+
+ message('Dealing with Persons Roles relationship')
+
+ roles = Roles(self.api)
+ roles_dict = dict([(role['role_id'], role) for role in roles])
+ for peer_person_id, person in peer_persons.iteritems():
+ # Person as viewed by peer
+ peer_person = persons_at_peer[peer_person_id]
+
+ # Roles that are currently attributed for the person
+ old_person_role_ids = [ role_id for role_id in person['role_ids'] ]
+
+ # Roles that should be attributed to the person
+ person_role_ids = [ role_id for role_id in peer_person['role_ids'] ]
+
+ # Remove stale roles
+ for role_id in (set(old_person_role_ids) - set(person_role_ids)):
+ person.remove_role(roles_dict[role_id], commit = commit_mode)
+ message ("%s role %s removed from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email']))
+
+ # Add new roles to person
+ for role_id in (set(person_role_ids) - set(old_person_role_ids)):
+ person.add_role(roles_dict[role_id], commit = commit_mode)
+ message ("%s role %s added from person %s"%(peer['peername'], roles_dict[role_id]['name'], person['email']))
+
+ timers['persons-roles'] = time.time() - start
+
# Update peer itself and commit
peer.sync(commit = True)