from PLC.Table import Row, Table
verbose_flag=False;
+verbose_flag=True;
def verbose (*args):
if verbose_flag:
print (args)
return {'row_class':row_class,
'table_class':table_class,
- 'class_id': row_class.__dict__['class_id'],
+ 'primary_key': row_class.__dict__['primary_key'],
'class_key': row_class.__dict__['class_key'],
'foreign_fields': row_class.__dict__['foreign_fields'],
'foreign_xrefs': row_class.__dict__['foreign_xrefs'],
# an attempt to provide genericity in the caching algorithm
- # the Peer object we are syncing with
- def __init__ (self, api, peer, peer_server, auth):
-
- import PLC.Peers
+ def __init__ (self, api, peer_id, peer_server, auth):
self.api = api
- assert isinstance(peer,PLC.Peers.Peer)
- self.peer = peer
+ self.peer_id = peer_id
self.peer_server = peer_server
self.auth = auth
def __init__ (self, api, classname, alien_objects):
self.api = api
attrs = class_attributes (classname)
- self.class_id = attrs['class_id']
+ self.primary_key = attrs['primary_key']
self.class_key = attrs['class_key']
# cannot use dict, it's acquired by xmlrpc and is untyped
- d = {}
- for x in alien_objects:
- verbose ('indexing',x)
- d[x[self.class_id]]=x
- self.alien_objects_byid = d
+ self.alien_objects_byid = dict( [ (x[self.primary_key],x) for x in alien_objects ] )
+ # retrieve local objects
local_objects = attrs['table_class'] (api)
self.local_objects_byname = local_objects.dict(self.class_key)
+
verbose ('Transcoder init :',classname,
self.alien_objects_byid.keys(),
self.local_objects_byname.keys())
def transcode (self, alien_id):
""" transforms an alien id into a local one """
# locate alien obj from alien_id
- verbose ('entering transcode with alien_id',alien_id,)
+ verbose ('.entering transcode with alien_id',alien_id,)
alien_object=self.alien_objects_byid[alien_id]
- verbose ('located alien_obj',)
+ verbose ('..located alien_obj',)
name = alien_object [self.class_key]
- verbose ('got name',name,)
+ verbose ('...got name',name,)
local_object=self.local_objects_byname[name]
- verbose ('found local obj')
- local_id=local_object[self.class_id]
- verbose ('and local_id',local_id)
+ verbose ('....found local obj')
+ local_id=local_object[self.primary_key]
+ verbose ('.....and local_id',local_id)
return local_id
# classname: the type of objects we are talking about; e.g. 'Slice'
# peer_object_list list of objects at a given peer - e.g. peer.GetSlices()
# alien_xref_objs_dict : a dict {'classname':alien_obj_list} e.g. {'Node':peer.GetNodes()}
+ # his must match the keys in xref_specs
# lambda_ignore : the alien objects are ignored if this returns true
def update_table (self,
classname,
alien_object_list,
alien_xref_objs_dict = {},
- lambda_ignore=lambda x:False):
+ lambda_ignore=lambda x:False,
+ report_name_conflicts = True):
- peer = self.peer
- peer_id = peer['peer_id']
+ peer_id=self.peer_id
attrs = class_attributes (classname)
row_class = attrs['row_class']
table_class = attrs['table_class']
- class_id = attrs['class_id']
+ primary_key = attrs['primary_key']
class_key = attrs['class_key']
foreign_fields = attrs['foreign_fields']
foreign_xrefs = attrs['foreign_xrefs']
## allocate transcoders and xreftables once, for each item in foreign_xrefs
- accessories={}
- for xref_classname,xref_spec in foreign_xrefs.iteritems():
- d={}
- d['transcoder']=Cache.Transcoder (self.api,xref_classname,alien_xref_objs_dict[xref_classname])
- d['xref_table'] =Cache.XrefTable (self.api,xref_spec['table'],classname,xref_classname)
- accessories[xref_classname]=d
+ # create a dict 'classname' -> {'transcoder' : ..., 'xref_table' : ...}
+ accessories = dict(
+ [ (xref_classname,
+ {'transcoder':Cache.Transcoder (self.api,xref_classname,alien_xref_objs_dict[xref_classname]),
+ 'xref_table':Cache.XrefTable (self.api,xref_spec['table'],classname,xref_classname)})
+ for xref_classname,xref_spec in foreign_xrefs.iteritems()])
### get current local table
# get ALL local objects so as to cope with
local_objects = table_class (self.api)
### index upon class_key for future searches
local_objects_index = local_objects.dict(class_key)
+
verbose ('update_table',classname,local_objects_index.keys())
### mark entries for this peer outofdate
+ new_count=0
old_count=0;
for local_object in local_objects:
if local_object['peer_id'] == peer_id:
else:
local_object.uptodate=True
- new_count=0
# scan the peer's local objects
for alien_object in alien_object_list:
+ object_name = alien_object[class_key]
+
### ignore, e.g. system-wide slices
if lambda_ignore(alien_object):
+ verbose('Ignoring',object_name)
continue
- object_name = alien_object[class_key]
- verbose ('update_table - Considering',object_name)
+ verbose ('update_table (%s) - Considering'%classname,object_name)
# create or update
try:
### We know about this object already
local_object = local_objects_index[object_name]
if local_object ['peer_id'] is None:
- print 'We are in trouble here'
- print 'The %s object named %s is natively defined twice'%(classname,object_name)
- print 'Once on this PLC and once on peer %d'%peer_id
- print 'We dont raise an exception so that the remaining updates can still take place'
+ if report_name_conflicts:
+ ### xxx send e-mail
+ print '==================== We are in trouble here'
+ print 'The %s object named %s is natively defined twice'%(classname,object_name)
+ print 'Once on this PLC and once on peer %d'%peer_id
+ print 'We dont raise an exception so that the remaining updates can still take place'
continue
if local_object['peer_id'] != peer_id:
### the object has changed its plc,
field=xref_spec['field']
alien_xref_obj_list = alien_xref_objs_dict[xref_classname]
alien_value = alien_object[field]
+ transcoder = accessories[xref_classname]['transcoder']
if isinstance (alien_value,list):
verbose ('update_table list-transcoding ',xref_classname,' aliens=',alien_value,)
- transcoder = accessories[xref_classname]['transcoder']
local_values=[]
for a in alien_value:
try:
former_xrefs=local_object[xref_spec['field']]
except:
former_xrefs=[]
- xref_table.update_item (local_object[class_id],
+ xref_table.update_item (local_object[primary_key],
former_xrefs,
local_values)
elif isinstance (alien_value,int):
+ verbose ('update_table atom-transcoding ',xref_classname,' aliens=',alien_value,)
new_value = transcoder.transcode(alien_value)
local_object[field] = new_value
local_object.sync()
### return delta in number of objects
return new_count-old_count
+
+ # slice attributes exhibit a special behaviour
+ # because there is no name we can use to retrieve/check for equality
+ # this object is like a 3-part xref, linking slice_attribute_type, slice,
+ # and potentially node, together with a value that can change over time.
+ # extending the generic model to support a lambda rather than class_key
+ # would clearly become overkill
+ def update_slice_attributes (self,
+ alien_slice_attributes,
+ alien_nodes,
+ alien_slices):
+
+ from PLC.SliceAttributeTypes import SliceAttributeTypes
+ from PLC.SliceAttributes import SliceAttribute, SliceAttributes
+
+ # init
+ peer_id = self.peer_id
+
+ # create transcoders
+ node_xcoder = Cache.Transcoder (self.api, 'Node', alien_nodes)
+ slice_xcoder= Cache.Transcoder (self.api, 'Slice', alien_slices)
+ # no need to transcode SliceAttributeTypes, we have a name in the result
+ local_sat_dict = SliceAttributeTypes(self.api).dict('name')
+
+ # load local objects
+ local_objects = SliceAttributes (self.api,{'peer_id':peer_id})
+
+ ### mark entries for this peer outofdate
+ new_count = 0
+ old_count=len(local_objects)
+ for local_object in local_objects:
+ local_object.uptodate=False
+
+ for alien_object in alien_slice_attributes:
+
+ verbose('----- update_slice_attributes: considering ...')
+ verbose(' ',alien_object)
+
+ # locate local slice
+ try:
+ slice_id = slice_xcoder.transcode(alien_object['slice_id'])
+ except:
+ verbose('update_slice_attributes: unable to locate slice',
+ alien_object['slice_id'])
+ continue
+ # locate slice_attribute_type
+ try:
+ sat_id = local_sat_dict[alien_object['name']]['attribute_type_id']
+ except:
+ verbose('update_slice_attributes: unable to locate slice attribute type',
+ alien_object['name'])
+ continue
+ # locate local node if specified
+ try:
+ alien_node_id = alien_object['node_id']
+ if alien_node_id is not None:
+ node_id = node_xcoder.transcode(alien_node_id)
+ else:
+ node_id=None
+ except:
+ verbose('update_slice_attributes: unable to locate node',
+ alien_object['node_id'])
+ continue
+
+ # locate the local SliceAttribute if any
+ try:
+ verbose ('searching name=', alien_object['name'],
+ 'slice_id',slice_id, 'node_id',node_id)
+ local_object = SliceAttributes (self.api,
+ {'name':alien_object['name'],
+ 'slice_id':slice_id,
+ 'node_id':node_id})[0]
- def refresh_nodes (self, peer_get_nodes):
- """
- refreshes the foreign_nodes and peer_node tables
- expected input is the current list of local nodes
- as returned from the peer by GetNodes {'peer_id':None}
+ if local_object['peer_id'] != peer_id:
+ verbose ('FOUND local sa - skipped')
+ continue
+ verbose('FOUND already cached sa')
+ local_object['value'] = alien_object['value']
+ # create it if missing
+ except:
+ local_object = SliceAttribute(self.api,
+ {'peer_id':peer_id,
+ 'slice_id':slice_id,
+ 'node_id':node_id,
+ 'attribute_type_id':sat_id,
+ 'value':alien_object['value']})
+ verbose('CREATED new sa')
+ local_object.uptodate=True
+ new_count += 1
+ local_object.sync()
+
+ for local_object in local_objects:
+ if not local_object.uptodate:
+ local_object.delete()
+
+ self.api.db.commit()
+ ### return delta in number of objects
+ return new_count-old_count
- returns the number of new nodes (can be negative)
- """
+ def get_locals (self, list):
+ return [x for x in list if x['peer_id'] is None]
- return self.update_table ('Node', peer_get_nodes)
-
- def refresh_slices (self, peer_get_slices, peer_foreign_nodes):
- """
- refreshes the foreign_slices and peer_slice tables
- expected input is the current list of slices as returned by GetSlices
-
- returns the number of new slices on this peer (can be negative)
- """
-
- # xxx use 'system' flag for finding system slices
- return self.update_table ('Slice', peer_get_slices,
- {'Node':peer_foreign_nodes},
- lambda x: x['creator_person_id']==1)
-
def refresh_peer (self):
- peer_local_nodes = self.peer_server.GetNodes(self.auth,None,None,'local')
- peer_foreign_nodes = self.peer_server.GetNodes(self.auth,None,None,'foreign')
- peer_local_slices = self.peer_server.GetSlices(self.auth,{'peer_id':None})
-
- from PLC.Nodes import Nodes
- verbose ('local nodes before refresh',len(Nodes(self.api)))
- nb_new_nodes = self.refresh_nodes(peer_local_nodes)
- verbose ('local nodes after refresh',len(Nodes(self.api)))
-
- # rough and temporary
- nb_new_slices = self.refresh_slices(peer_local_slices,peer_local_nodes+peer_foreign_nodes)
-
+ # so as to minimize the numer of requests
+ # we get all objects in a single call and sort afterwards
+ # xxx ideally get objects either local or the ones attached here
+ # requires to know remote peer's peer_id for ourselves, mmhh..
+ # does not make any difference in a 2-peer deployment though
+
+ ### uses GetPeerData to gather all info in a single xmlrpc request
+
+ # xxx see also GetPeerData - peer_id arg unused yet
+ all_data = self.peer_server.GetPeerData (self.auth,0)
+
+ # refresh sites
+ all_sites = all_data['Sites']
+ plocal_sites = self.get_locals (all_sites)
+ nb_new_sites = self.update_table('Site', plocal_sites)
+
+ # refresh keys
+ all_keys = all_data['Keys']
+ plocal_keys = self.get_locals (all_keys)
+ nb_new_keys = self.update_table('Key', plocal_keys)
+
+ # refresh nodes
+ all_nodes = all_data['Nodes']
+ plocal_nodes = self.get_locals(all_nodes)
+ nb_new_nodes = self.update_table('Node', plocal_nodes,
+ { 'Site' : all_sites } )
+
+ # refresh persons
+ all_persons = all_data['Persons']
+ plocal_persons = self.get_locals(all_persons)
+ nb_new_persons = self.update_table ('Person', plocal_persons,
+ { 'Key': all_keys, 'Site' : all_sites } )
+
+ # refresh slice attribute types
+ all_slice_attribute_types = all_data ['SliceAttibuteTypes']
+ plocal_slice_attribute_types = self.get_locals(all_slice_attribute_types)
+ nb_new_slice_attribute_types = self.update_table ('SliceAttributeType',
+ plocal_slice_attribute_types,
+ report_name_conflicts = False)
+
+ # refresh slices
+ all_slices = all_data['Slices']
+ plocal_slices = self.get_locals(all_slices)
+
+ def is_system_slice (slice):
+ return slice['creator_person_id'] == 1
+
+ nb_new_slices = self.update_table ('Slice', plocal_slices,
+ {'Node': all_nodes, 'Person': all_persons},
+ is_system_slice)
+
+ # refresh slice attributes
+ all_slice_attributes = all_data ['SliceAttributes']
+ plocal_slice_attributes = self.get_locals(all_slice_attributes)
+ nb_new_slice_attributes = self.update_slice_attributes (plocal_slice_attributes,
+ all_nodes,
+ all_slices)
+
+ ### returned as-is by RefreshPeer
return {'plcname':self.api.config.PLC_NAME,
+ 'new_sites':nb_new_sites,
+ 'new_keys':nb_new_keys,
'new_nodes':nb_new_nodes,
- 'new_slices':nb_new_slices}
+ 'new_persons':nb_new_persons,
+ 'new_slice_attribute_types':nb_new_slice_attribute_types,
+ 'new_slices':nb_new_slices,
+ 'new_slice_attributes':nb_new_slice_attributes,
+ }