1 from PLC.Faults import *
2 from PLC.Parameter import Parameter
3 from PLC.Filter import Filter
4 from PLC.Table import Row, Table
12 def class_attributes (classname):
13 """ locates various attributes defined in the row class """
14 topmodule = __import__ ('PLC.%ss'%classname)
15 module = topmodule.__dict__['%ss'%classname]
16 # local row-like class, e.g. Node
17 row_class = module.__dict__['%s'%classname]
18 # local tab-like class, e.g. Nodes
19 table_class = module.__dict__['%ss'%classname]
21 return {'row_class':row_class,
22 'table_class':table_class,
23 'primary_key': row_class.__dict__['primary_key'],
24 'class_key': row_class.__dict__['class_key'],
25 'foreign_fields': row_class.__dict__['foreign_fields'],
26 'foreign_xrefs': row_class.__dict__['foreign_xrefs'],
31 # an attempt to provide genericity in the caching algorithm
33 def __init__ (self, api, peer_id, peer_server, auth):
36 self.peer_id = peer_id
37 self.peer_server = peer_server
42 def __init__ (self, api, classname, alien_objects):
44 attrs = class_attributes (classname)
45 self.primary_key = attrs['primary_key']
46 self.class_key = attrs['class_key']
48 # cannot use dict, it's acquired by xmlrpc and is untyped
49 self.alien_objects_byid = dict( [ (x[self.primary_key],x) for x in alien_objects ] )
51 # retrieve local objects
52 local_objects = attrs['table_class'] (api)
53 self.local_objects_byname = local_objects.dict(self.class_key)
55 verbose ('Transcoder init :',classname,
56 self.alien_objects_byid.keys(),
57 self.local_objects_byname.keys())
59 def transcode (self, alien_id):
60 """ transforms an alien id into a local one """
61 # locate alien obj from alien_id
62 verbose ('.entering transcode with alien_id',alien_id,)
63 alien_object=self.alien_objects_byid[alien_id]
64 verbose ('..located alien_obj',)
65 name = alien_object [self.class_key]
66 verbose ('...got name',name,)
67 local_object=self.local_objects_byname[name]
68 verbose ('....found local obj')
69 local_id=local_object[self.primary_key]
70 verbose ('.....and local_id',local_id)
74 # for handling simple n-to-n relation tables, like e.g. slice_node
77 def __init__ (self, api, tablename, class1, class2):
79 self.tablename = tablename
80 self.lowerclass1 = class1.lower()
81 self.lowerclass2 = class2.lower()
83 def delete_old_items (self, id1, id2_set):
86 sql += "DELETE FROM %s WHERE %s_id=%d"%(self.tablename,self.lowerclass1,id1)
87 sql += " AND %s_id IN ("%self.lowerclass2
88 sql += ",".join([str(i) for i in id2_set])
92 def insert_new_items (self, id1, id2_set):
93 ### xxx needs to be optimized
94 ### tried to figure a way to use a single sql statement
95 ### like: insert into table (x,y) values (1,2),(3,4);
96 ### but apparently this is not supported under postgresql
98 sql = "INSERT INTO %s VALUES (%d,%d)"%(self.tablename,id1,id2)
101 def update_item (self, id1, old_id2s, new_id2s):
102 news = set (new_id2s)
103 olds = set (old_id2s)
104 to_delete = olds-news
105 self.delete_old_items (id1, to_delete)
106 to_create = news-olds
107 self.insert_new_items (id1, to_create)
110 # classname: the type of objects we are talking about; e.g. 'Slice'
111 # peer_object_list list of objects at a given peer - e.g. peer.GetSlices()
112 # alien_xref_objs_dict : a dict {'classname':alien_obj_list} e.g. {'Node':peer.GetNodes()}
113 # we need an entry for each class mentioned in the class's foreign_xrefs
114 # lambda_ignore : the alien objects are ignored if this returns true
115 def update_table (self,
118 alien_xref_objs_dict = {},
119 lambda_ignore=lambda x:False,
120 report_name_conflicts = True):
122 verbose ("============================== entering update_table on",classname)
125 attrs = class_attributes (classname)
126 row_class = attrs['row_class']
127 table_class = attrs['table_class']
128 primary_key = attrs['primary_key']
129 class_key = attrs['class_key']
130 foreign_fields = attrs['foreign_fields']
131 foreign_xrefs = attrs['foreign_xrefs']
133 ## allocate transcoders and xreftables once, for each item in foreign_xrefs
134 # create a dict 'classname' -> {'transcoder' : ..., 'xref_table' : ...}
135 xref_accessories = dict(
137 {'transcoder' : Cache.Transcoder (self.api,xref['class'],alien_xref_objs_dict[xref['class']]),
138 'xref_table' : Cache.XrefTable (self.api,xref['table'],classname,xref['class'])})
139 for xref in foreign_xrefs ])
141 # the fields that are direct references, like e.g. site_id in Node
142 # determined lazily, we need an alien_object to do that, and we may have none here
143 direct_ref_fields = None
145 ### get current local table
146 # get ALL local objects so as to cope with
147 # (*) potential moves between plcs
148 # (*) or naming conflicts
149 local_objects = table_class (self.api)
150 ### index upon class_key for future searches
151 local_objects_index = local_objects.dict(class_key)
153 verbose ('update_table',classname,local_objects_index.keys())
155 ### mark entries for this peer outofdate
158 for local_object in local_objects:
159 if local_object['peer_id'] == peer_id:
160 local_object.uptodate=False
163 local_object.uptodate=True
165 # scan the peer's local objects
166 for alien_object in alien_object_list:
168 object_name = alien_object[class_key]
170 ### ignore, e.g. system-wide slices
171 if lambda_ignore(alien_object):
172 verbose('Ignoring',object_name)
175 verbose ('update_table (%s) - Considering'%classname,object_name)
179 ### We know about this object already
180 local_object = local_objects_index[object_name]
181 if local_object ['peer_id'] is None:
182 if report_name_conflicts:
184 print '!!!!!!!!!! We are in trouble here'
185 print 'The %s object named %s is natively defined twice, '%(classname,object_name),
186 print 'once on this PLC and once on peer %d'%peer_id
187 print 'We dont raise an exception so that the remaining updates can still take place'
190 if local_object['peer_id'] != peer_id:
191 ### the object has changed its plc,
192 ### Note, this is not problematic here because both definitions are remote
193 ### we can assume the object just moved
194 ### needs to update peer_id though
195 local_object['peer_id'] = peer_id
196 # update all fields as per foreign_fields
197 for field in foreign_fields:
198 local_object[field]=alien_object[field]
199 verbose ('update_table FOUND',object_name)
201 ### create a new entry
202 local_object = row_class(self.api,
203 {class_key :object_name,'peer_id':peer_id})
205 local_objects_index[class_key]=local_object
206 verbose ('update_table CREATED',object_name)
207 # update all fields as per foreign_fields
208 for field in foreign_fields:
209 local_object[field]=alien_object[field]
210 # this is tricky; at this point we may have primary_key unspecified,
211 # but we need it for handling xrefs below, so we'd like to sync to get one
212 # on the other hand some required fields may be still missing so
213 # the DB would refuse to sync in this case (e.g. site_id in Node)
214 # so let's fill them with 1 so we can sync, this will be overridden below
215 # lazily determine this set of fields now
216 if direct_ref_fields is None:
218 for xref in foreign_xrefs:
220 verbose('checking field %s for direct_ref'%field)
221 if isinstance(alien_object[field],int):
222 direct_ref_fields.append(field)
223 verbose("FOUND DIRECT REFS",direct_ref_fields)
224 for field in direct_ref_fields:
225 local_object[field]=1
226 verbose('Early sync on',local_object)
228 verbose('Early syncing of %s, reloading'%object_name)
229 # sigh: now we have to reload it because of side-effects, like e.g. on Slice.expires
230 local_object=table_class(self.api, {class_key:object_name})[0]
231 verbose('After reload',local_object)
233 # this row is now valid
234 local_object.uptodate=True
238 for xref in foreign_xrefs:
240 alien_xref_obj_list = alien_xref_objs_dict[xref['class']]
241 alien_value = alien_object[field]
242 transcoder = xref_accessories[xref['field']]['transcoder']
243 if isinstance (alien_value,list):
244 verbose ('update_table list-transcoding ',xref['class'],' aliens=',alien_value,)
246 for a in alien_value:
248 local_values.append(transcoder.transcode(a))
250 # could not transcode - might be from another peer that we dont know about..
252 verbose (" transcoded as ",local_values)
253 xref_table = xref_accessories[xref['field']]['xref_table']
254 # newly created objects dont have xref fields set yet
256 former_xrefs=local_object[xref['field']]
259 xref_table.update_item (local_object[primary_key],
262 elif isinstance (alien_value,int):
263 verbose ('update_table atom-transcoding ',xref['class'],' aliens=',alien_value,)
264 new_value = transcoder.transcode(alien_value)
265 local_object[field] = new_value
267 ### this object is completely updated, let's save it
268 verbose('FINAL sync on %s:'%object_name,local_object)
272 ### delete entries that are not uptodate
273 for local_object in local_objects:
274 if not local_object.uptodate:
275 local_object.delete()
279 ### return delta in number of objects
280 return new_count-old_count
282 # slice attributes exhibit a special behaviour
283 # because there is no name we can use to retrieve/check for equality
284 # this object is like a 3-part xref, linking slice_attribute_type, slice,
285 # and potentially node, together with a value that can change over time.
286 # extending the generic model to support a lambda rather than class_key
287 # would clearly become overkill
288 def update_slice_attributes (self,
289 alien_slice_attributes,
293 from PLC.SliceAttributeTypes import SliceAttributeTypes
294 from PLC.SliceAttributes import SliceAttribute, SliceAttributes
297 peer_id = self.peer_id
300 node_xcoder = Cache.Transcoder (self.api, 'Node', alien_nodes)
301 slice_xcoder= Cache.Transcoder (self.api, 'Slice', alien_slices)
302 # no need to transcode SliceAttributeTypes, we have a name in the result
303 local_sat_dict = SliceAttributeTypes(self.api).dict('name')
306 local_objects = SliceAttributes (self.api,{'peer_id':peer_id})
308 ### mark entries for this peer outofdate
310 old_count=len(local_objects)
311 for local_object in local_objects:
312 local_object.uptodate=False
314 for alien_object in alien_slice_attributes:
316 verbose('----- update_slice_attributes: considering ...')
317 verbose(' ',alien_object)
321 slice_id = slice_xcoder.transcode(alien_object['slice_id'])
323 verbose('update_slice_attributes: unable to locate slice',
324 alien_object['slice_id'])
326 # locate slice_attribute_type
328 sat_id = local_sat_dict[alien_object['name']]['attribute_type_id']
330 verbose('update_slice_attributes: unable to locate slice attribute type',
331 alien_object['name'])
333 # locate local node if specified
335 alien_node_id = alien_object['node_id']
336 if alien_node_id is not None:
337 node_id = node_xcoder.transcode(alien_node_id)
341 verbose('update_slice_attributes: unable to locate node',
342 alien_object['node_id'])
345 # locate the local SliceAttribute if any
347 verbose ('searching name=', alien_object['name'],
348 'slice_id',slice_id, 'node_id',node_id)
349 local_object = SliceAttributes (self.api,
350 {'name':alien_object['name'],
352 'node_id':node_id})[0]
354 if local_object['peer_id'] != peer_id:
355 verbose ('FOUND local sa - skipped')
357 verbose('FOUND already cached sa')
358 local_object['value'] = alien_object['value']
359 # create it if missing
361 local_object = SliceAttribute(self.api,
365 'attribute_type_id':sat_id,
366 'value':alien_object['value']})
367 verbose('CREATED new sa')
368 local_object.uptodate=True
372 for local_object in local_objects:
373 if not local_object.uptodate:
374 local_object.delete()
377 ### return delta in number of objects
378 return new_count-old_count
380 def get_locals (self, list):
381 return [x for x in list if x['peer_id'] is None]
383 def refresh_peer (self):
385 # so as to minimize the numer of requests
386 # we get all objects in a single call and sort afterwards
387 # xxx ideally get objects either local or the ones attached here
388 # requires to know remote peer's peer_id for ourselves, mmhh..
389 # does not make any difference in a 2-peer deployment though
391 ### uses GetPeerData to gather all info in a single xmlrpc request
393 # xxx see also GetPeerData - peer_id arg unused yet
394 all_data = self.peer_server.GetPeerData (self.auth,0)
397 all_sites = all_data['Sites']
398 plocal_sites = self.get_locals (all_sites)
399 nb_new_sites = self.update_table('Site', plocal_sites)
402 all_keys = all_data['Keys']
403 plocal_keys = self.get_locals (all_keys)
404 nb_new_keys = self.update_table('Key', plocal_keys)
407 all_nodes = all_data['Nodes']
408 plocal_nodes = self.get_locals(all_nodes)
409 nb_new_nodes = self.update_table('Node', plocal_nodes,
410 { 'Site' : all_sites } )
413 all_persons = all_data['Persons']
414 plocal_persons = self.get_locals(all_persons)
415 nb_new_persons = self.update_table ('Person', plocal_persons,
416 { 'Key': all_keys, 'Site' : all_sites } )
418 # refresh slice attribute types
419 all_slice_attribute_types = all_data ['SliceAttibuteTypes']
420 plocal_slice_attribute_types = self.get_locals(all_slice_attribute_types)
421 nb_new_slice_attribute_types = self.update_table ('SliceAttributeType',
422 plocal_slice_attribute_types,
423 report_name_conflicts = False)
426 all_slices = all_data['Slices']
427 plocal_slices = self.get_locals(all_slices)
429 def is_system_slice (slice):
430 return slice['creator_person_id'] == 1
432 nb_new_slices = self.update_table ('Slice', plocal_slices,
433 {'Node': all_nodes, 'Person': all_persons, 'Site': all_sites},
436 # refresh slice attributes
437 all_slice_attributes = all_data ['SliceAttributes']
438 plocal_slice_attributes = self.get_locals(all_slice_attributes)
439 nb_new_slice_attributes = self.update_slice_attributes (plocal_slice_attributes,
443 ### returned as-is by RefreshPeer
444 return {'plcname':self.api.config.PLC_NAME,
445 'new_sites':nb_new_sites,
446 'new_keys':nb_new_keys,
447 'new_nodes':nb_new_nodes,
448 'new_persons':nb_new_persons,
449 'new_slice_attribute_types':nb_new_slice_attribute_types,
450 'new_slices':nb_new_slices,
451 'new_slice_attributes':nb_new_slice_attributes,