local_object = local_objects_index[object_name]
if local_object ['peer_id'] is None:
### xxx send e-mail
- print 'We are in trouble here'
+ print '==================== We are in trouble here'
print 'The %s object named %s is natively defined twice'%(classname,object_name)
print 'Once on this PLC and once on peer %d'%peer_id
print 'We dont raise an exception so that the remaining updates can still take place'
### return delta in number of objects
return new_count-old_count
- def refresh_nodes (self, peer_get_nodes):
- """
- refreshes the foreign_nodes and peer_node tables
- expected input is the current list of local nodes
- as returned from the peer by GetNodes {'peer_id':None}
+ def get_locals (self, list):
+ return [x for x in list if x['peer_id'] is None]
- returns the number of new nodes (can be negative)
- """
-
- return self.update_table ('Node', peer_get_nodes)
-
- def refresh_slices (self, peer_get_slices, peer_foreign_nodes):
- """
- refreshes the foreign_slices and peer_slice tables
- expected input is the current list of slices as returned by GetSlices
-
- returns the number of new slices on this peer (can be negative)
- """
-
- # xxx use 'system' flag for finding system slices
- return self.update_table ('Slice', peer_get_slices,
- {'Node':peer_foreign_nodes},
- lambda x: x['creator_person_id']==1)
-
def refresh_peer (self):
- peer_local_slices = self.peer_server.GetSlices(self.auth,{'peer_id':None})
+ # so as to minimize the numer of requests
+ # we get all objects in a single call and sort afterwards
+ # xxx ideally get objects either local or the ones attached here
+ # requires to know remote peer's peer_id for ourselves, mmhh..
+ # does not make any difference in a 2-peer deployment though
# refresh keys
- peer_local_keys = self.peer_server.GetKeys(self.auth,{'peer_id':None})
- nb_new_keys = self.update_table('Key', peer_local_keys)
+ all_keys = self.peer_server.GetKeys(self.auth)
+ local_keys = self.get_locals (all_keys)
+ nb_new_keys = self.update_table('Key', local_keys)
# refresh nodes
- peer_local_nodes = self.peer_server.GetNodes(self.auth,{'peer_id':None})
- nb_new_nodes = self.update_table('Node', peer_local_nodes)
+ all_nodes = self.peer_server.GetNodes(self.auth)
+ local_nodes = self.get_locals(all_nodes)
+ nb_new_nodes = self.update_table('Node', local_nodes)
# refresh persons
- peer_local_persons = self.peer_server.GetPersons(self.auth,{'peer_id':None})
- # xxx ideally get our own persons only
- # requires to know remote peer's peer_id for ourselves, mmhh
- peer_all_keys = peer_local_keys + self.peer_server.GetKeys(self.auth,{'~peer_id':None})
- nb_new_persons = self.update_table ('Person', peer_local_persons,
- { 'Key': peer_all_keys} )
+ all_persons = self.peer_server.GetPersons(self.auth)
+ local_persons = self.get_locals(all_persons)
+ nb_new_persons = self.update_table ('Person', local_persons,
+ { 'Key': all_keys} )
# refresh slices
+ local_slices = self.peer_server.GetSlices(self.auth,{'peer_id':None})
+
def is_system_slice (slice):
return slice['creator_person_id'] == 1
- # xxx would ideally get our own nodes only,
- peer_all_nodes = peer_local_nodes+self.peer_server.GetNodes(self.auth,{'~peer_id':None})
- nb_new_slices = self.update_table ('Slice', peer_local_slices,
- {'Node':peer_all_nodes},
+ nb_new_slices = self.update_table ('Slice', local_slices,
+ {'Node': all_nodes,
+ 'Person': all_persons},
is_system_slice)
-
return {'plcname':self.api.config.PLC_NAME,
'new_keys':nb_new_keys,
'new_nodes':nb_new_nodes,
import getopt
import sys
+import time
## we use indexes 1 and 2
try:
####################
# set initial conditions
-def define_test (keys,persons,nodes,slices):
- global number_keys, number_persons, number_nodes, number_slices
+def define_test (keys,persons,nodes,slices,fast_mode):
+ global number_keys, number_persons, number_nodes, number_slices, fast_flag
number_keys=keys
number_persons=persons
number_nodes=nodes
number_slices=slices
+ fast_flag=fast_mode
def fast():
- define_test(1,1,1,1)
+ define_test(1,1,1,1,True)
-define_test (keys=4,persons=2,nodes=5,slices=3)
+define_test (keys=4,persons=2,nodes=5,slices=3,fast_mode=False)
# predefined stuff
# number of 'system' persons
print "====================",
print args
+##########
+def timer_start ():
+ global epoch
+ epoch = time.time()
+ print '+++ timer start'
+
+def timer_show ():
+ print '+++ %d seconds ellapsed'%(time.time()-epoch)
+
####################
def test00_init (args=[1,2]):
global plc,s,a,aa
def test01_refresh (message,args=[1,2]):
print '=== refresh',message
+ timer_show()
for i in args:
print '%02d:== Refreshing peer'%(i),
retcod=s[i].RefreshPeer(a[i],get_peer_id(i))
print 'got ',retcod
+ timer_show()
####################
# retrieves node_id from hostname - checks for local nodes only
'instanciation':'plc-instantiated',
})
print '%02d:== created slice %d - max nodes=%d'%(i,slice_id,max_nodes)
+ for np in myrange(number_persons):
+ email = person_name (i,np)
+ retcod = s[i].AddPersonToSlice (a[i], email, slicename)
+ print '%02d:== Attached person %s to slice %s'%(i,email,slicename)
def test04_node_slice (is_local, add_if_true, args=[1,2]):
check_nodes(number_nodes,0)
test01_refresh ('after node creation')
check_nodes(number_nodes,number_nodes)
- message ("2 extra del/add cycles on plc2 for different indexes")
- test02_delnode([2])
- test02_node ([2])
- test02_delnode([2])
- test02_node ([2])
test02_delnode([2])
+ if not fast_flag:
+ message ("2 extra del/add cycles on plc2 for different indexes")
+ test02_node ([2])
+ test02_delnode([2])
+ test02_node ([2])
+ test02_delnode([2])
check_nodes(0,number_nodes,[2])
test01_refresh('after deletion on plc2')
check_nodes(number_nodes,0,[1])
def test_all_persons ():
test05_del_person()
- check_keys(0,0)
- check_persons(system_persons,0)
test01_refresh ('before persons&keys creation')
check_keys(0,0)
check_persons(system_persons,system_persons_cross)
- message ("Creating persons&keys - 1 extra del/add cycle for unique indexes")
+ message ("Creating persons&keys")
test05_person ()
- test05_del_person([2])
- test05_person([2])
+ if not fast_flag:
+ message ("1 extra del/add cycle for unique indexes")
+ test05_del_person([2])
+ test05_person([2])
check_keys(number_persons*number_keys,0)
check_persons(system_persons+number_persons,system_persons_cross)
test01_refresh ('after persons&keys creation')
def test_all ():
test_all_init ()
+ timer_show()
test_all_persons ()
+ timer_show()
test_all_nodes ()
+ timer_show()
test_all_slices ()
+ timer_show()
### ad hoc test sequences
def populate ():
+ test05_person()
test02_node()
test03_slice([1])
test01_refresh ("populate: refreshing peer 1",[1])
print "Usage: %s [-n] [-f]"%sys.argv[0]
print " -f runs faster (1 node - 1 slice)"
print " -n runs test_now instead of test_all"
+ print " -p runs populate instead of test_all"
sys.exit(1)
def main ():
try:
- (o,a) = getopt.getopt(sys.argv[1:], "fn")
+ (o,a) = getopt.getopt(sys.argv[1:], "fnp")
except:
usage()
- now_opt = False;
+ func = test_all
for (opt,val) in o:
if opt=='-f':
fast()
elif opt=='-n':
- now_opt=True
+ print 'Running test_now'
+ func = test_now
+ elif opt=='-p':
+ print 'Running populate'
+ func = populate
else:
usage()
if a:
usage()
print '%d nodes & %d slices'%(number_nodes,number_slices)
- if now_opt:
- print 'Running test_now'
- test_now()
- else:
- test_all()
+ timer_start()
+ func()
if __name__ == '__main__':
main()
5 nodes & 3 slices
++++ timer start
==================== ('INIT',)
initializing s[1] https://lurch.cs.princeton.edu:443/PLCAPI/
initialized aa[1] {'Username': 'root@plc1.org', 'AuthMethod': 'password', 'Role': 'admin', 'AuthString': 'root'}
-> system returns 0
01:== Created site 2 with max_slices=3
02:== Created site 2 with max_slices=3
-01: Checking keys: got 0 local (e=0) & 0 foreign (e=0)
-02: Checking keys: got 0 local (e=0) & 0 foreign (e=0)
-01: Checking persons: got 4 local (e=4) & 0 foreign (e=0)
-02: Checking persons: got 4 local (e=4) & 0 foreign (e=0)
++++ 17 seconds ellapsed
=== refresh before persons&keys creation
++++ 19 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 1, 'new_nodes': 0}
++++ 23 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 1, 'new_nodes': 0}
++++ 27 seconds ellapsed
01: Checking keys: got 0 local (e=0) & 0 foreign (e=0)
02: Checking keys: got 0 local (e=0) & 0 foreign (e=0)
01: Checking persons: got 4 local (e=4) & 1 foreign (e=1)
02: Checking persons: got 4 local (e=4) & 1 foreign (e=1)
-==================== ('Creating persons&keys - 1 extra del/add cycle for unique indexes',)
+==================== ('Creating persons&keys',)
01:== created user account 6, user1-1@plc1.org - password1
01:== added key ssh-rsa 1111111111111111 user1-key1 to person user1-1@plc1.org
01:== added key ssh-rsa 1111111111111111 user1-key2 to person user1-1@plc1.org
02:== added key ssh-rsa 2222222222222222 user2-key2 to person user2-2@plc2.org
02:== added key ssh-rsa 2222222222222222 user2-key3 to person user2-2@plc2.org
02:== added key ssh-rsa 2222222222222222 user2-key4 to person user2-2@plc2.org
+==================== ('1 extra del/add cycle for unique indexes',)
02:== deleted person_id 6
02:== deleted person_id 7
02:== created user account 8, user2-1@plc2.org - password2
01: Checking persons: got 6 local (e=6) & 1 foreign (e=1)
02: Checking persons: got 6 local (e=6) & 1 foreign (e=1)
=== refresh after persons&keys creation
++++ 47 seconds ellapsed
01:== Refreshing peer got {'new_keys': 8, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 2, 'new_nodes': 0}
++++ 52 seconds ellapsed
02:== Refreshing peer got {'new_keys': 8, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 2, 'new_nodes': 0}
++++ 56 seconds ellapsed
01: Checking keys: got 8 local (e=8) & 8 foreign (e=8)
02: Checking keys: got 8 local (e=8) & 8 foreign (e=8)
01: Checking persons: got 6 local (e=6) & 3 foreign (e=3)
02: Checking persons: got 6 local (e=6) & 3 foreign (e=3)
++++ 58 seconds ellapsed
==================== ('RESETTING NODES',)
01:== Cleaning all nodes
02:== Cleaning all nodes
=== refresh cleaned nodes
++++ 59 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 64 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 68 seconds ellapsed
01: Checking nodes: got 0 local (e=0) & 0 foreign (e=0)
02: Checking nodes: got 0 local (e=0) & 0 foreign (e=0)
==================== ('CREATING NODES',)
01: Checking nodes: got 5 local (e=5) & 0 foreign (e=0)
02: Checking nodes: got 5 local (e=5) & 0 foreign (e=0)
=== refresh after node creation
++++ 81 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 5}
++++ 86 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 5}
++++ 90 seconds ellapsed
01: Checking nodes: got 5 local (e=5) & 5 foreign (e=5)
02: Checking nodes: got 5 local (e=5) & 5 foreign (e=5)
-==================== ('2 extra del/add cycles on plc2 for different indexes',)
02:== Deleted node 1, returns 1
02:== Deleted node 2, returns 1
02:== Deleted node 3, returns 1
02:== Deleted node 4, returns 1
02:== Deleted node 5, returns 1
+==================== ('2 extra del/add cycles on plc2 for different indexes',)
02:== Added node 11 n202.plc2.org
02:== Added node 12 n202.plc2.org
02:== Added node 13 n202.plc2.org
02:== Deleted node 20, returns 1
02: Checking nodes: got 0 local (e=0) & 5 foreign (e=5)
=== refresh after deletion on plc2
++++ 101 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': -5}
++++ 106 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 110 seconds ellapsed
01: Checking nodes: got 5 local (e=5) & 0 foreign (e=0)
02: Checking nodes: got 0 local (e=0) & 5 foreign (e=5)
==================== ('ADD on plc2 for different indexes',)
01: Checking nodes: got 5 local (e=5) & 0 foreign (e=0)
02: Checking nodes: got 5 local (e=5) & 5 foreign (e=5)
=== refresh after re-creation on plc2
++++ 114 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 5}
++++ 119 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 124 seconds ellapsed
01: Checking nodes: got 5 local (e=5) & 5 foreign (e=5)
02: Checking nodes: got 5 local (e=5) & 5 foreign (e=5)
++++ 125 seconds ellapsed
==================== ('RESETTING SLICES TEST',)
01:== Cleaning all nodes
01:==== Cleaning node 1
02:== Cleaning all slices
02:==== Cleaning slice 3
=== refresh After slices init
++++ 144 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 149 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 154 seconds ellapsed
==================== ('CREATING SLICES on plc1',)
01:== created slice 4 - max nodes=5
+01:== Attached person user1-1@plc1.org to slice one_s101
+01:== Attached person user1-2@plc1.org to slice one_s101
01:== created slice 5 - max nodes=5
+01:== Attached person user1-1@plc1.org to slice one_s102
+01:== Attached person user1-2@plc1.org to slice one_s102
01:== created slice 6 - max nodes=5
+01:== Attached person user1-1@plc1.org to slice one_s103
+01:== Attached person user1-2@plc1.org to slice one_s103
01: Checking slices: got 5 local (e=5) & 0 foreign (e=0)
02: Checking slices: got 2 local (e=2) & 0 foreign (e=0)
=== refresh after slice created on plc1
++++ 167 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 172 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 3, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 177 seconds ellapsed
01: Checking slices: got 5 local (e=5) & 0 foreign (e=0)
02: Checking slices: got 2 local (e=2) & 3 foreign (e=3)
01: local slice one_s101 (e=0) on nodes []
02: foreign slice one_s102 (e=0) on nodes []
02: foreign slice one_s103 (e=0) on nodes []
=== refresh After local nodes were added on plc1
++++ 197 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 202 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 207 seconds ellapsed
01: local slice one_s101 (e=5) on nodes [16, 17, 18, 19, 20]
[LOC:5] : n101.plc1.org n102.plc1.org n103.plc1.org n104.plc1.org n105.plc1.org
01: local slice one_s102 (e=5) on nodes [16, 17, 18, 19, 20]
02: foreign slice one_s103 (e=5) on nodes [8, 9, 10, 6, 7]
[FOR:5] : n101.plc1.org n102.plc1.org n103.plc1.org n104.plc1.org n105.plc1.org
=== refresh After foreign nodes were added in plc1
++++ 227 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 232 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 237 seconds ellapsed
01: local slice one_s101 (e=10) on nodes [16, 17, 18, 19, 20, 11, 12, 13, 14, 15]
[LOC:5] : n101.plc1.org n102.plc1.org n103.plc1.org n104.plc1.org n105.plc1.org
[FOR:5] : n201.plc2.org n202.plc2.org n203.plc2.org n204.plc2.org n205.plc2.org
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s101',
'slice_id': 4}
>>slivername = one_s102
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s102',
'slice_id': 5}
>>slivername = one_s103
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s103',
'slice_id': 6}
02: 3 slivers (exp. 3) in GetSlivers for node n201.plc2.org
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'}],
'name': 'one_s101',
'slice_id': 4}
>>slivername = one_s102
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'}],
'name': 'one_s102',
'slice_id': 5}
>>slivername = one_s103
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'}],
'name': 'one_s103',
'slice_id': 6}
==================== ('DELETING FOREIGN NODES FROM SLICES',)
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s101',
'slice_id': 4}
>>slivername = one_s102
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s102',
'slice_id': 5}
>>slivername = one_s103
{ 'attributes': [],
'expires': normalized,
'instantiation': 'plc-instantiated',
- 'keys': [],
+ 'keys': [ { 'key': 'ssh-rsa 1111111111111111 user1-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user1-key4',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key1',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key2',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key3',
+ 'key_type': 'ssh'},
+ { 'key': 'ssh-rsa 1111111111111111 user2-key4',
+ 'key_type': 'ssh'}],
'name': 'one_s103',
'slice_id': 6}
=== refresh After foreign nodes were removed on plc1
++++ 259 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 264 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 269 seconds ellapsed
01: local slice one_s101 (e=5) on nodes [16, 17, 18, 19, 20]
[LOC:5] : n101.plc1.org n102.plc1.org n103.plc1.org n104.plc1.org n105.plc1.org
01: local slice one_s102 (e=5) on nodes [16, 17, 18, 19, 20]
02: foreign slice one_s103 (e=5) on nodes [8, 9, 10, 6, 7]
[FOR:5] : n101.plc1.org n102.plc1.org n103.plc1.org n104.plc1.org n105.plc1.org
=== refresh After local nodes were removed on plc1
++++ 287 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 292 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 297 seconds ellapsed
01: local slice one_s101 (e=0) on nodes []
01: local slice one_s102 (e=0) on nodes []
01: local slice one_s103 (e=0) on nodes []
01: Checking slices: got 2 local (e=2) & 0 foreign (e=0)
02: Checking slices: got 2 local (e=2) & 3 foreign (e=3)
=== refresh After slices clenaup
++++ 309 seconds ellapsed
01:== Refreshing peer got {'new_keys': 0, 'new_slices': 0, 'plcname': 'Thierry plc1', 'new_persons': 0, 'new_nodes': 0}
++++ 315 seconds ellapsed
02:== Refreshing peer got {'new_keys': 0, 'new_slices': -3, 'plcname': 'Thierry plc2 on devbox', 'new_persons': 0, 'new_nodes': 0}
++++ 320 seconds ellapsed
01: Checking slices: got 2 local (e=2) & 0 foreign (e=0)
02: Checking slices: got 2 local (e=2) & 0 foreign (e=0)
++++ 322 seconds ellapsed