From 927c8faba297921a2ba494d0c3feb4c02eaa8b2f Mon Sep 17 00:00:00 2001 From: Sandrine Avakian Date: Tue, 19 Jun 2012 13:50:19 +0200 Subject: [PATCH] Debugging OARrestapi - modifications because of the mirgation to OAR2.5. Side effects. --- sfa/senslab/LDAPapi.py | 2 +- sfa/senslab/OARrestapi.py | 84 +++++++++++++++++++++--------------- sfa/senslab/slabaggregate.py | 11 +++-- sfa/senslab/slabdriver.py | 23 ++++++---- sfa/senslab/slabpostgres.py | 10 ++++- sfa/senslab/slabslices.py | 65 ++++++++++++++-------------- 6 files changed, 112 insertions(+), 83 deletions(-) diff --git a/sfa/senslab/LDAPapi.py b/sfa/senslab/LDAPapi.py index c9b2a390..28b2b063 100644 --- a/sfa/senslab/LDAPapi.py +++ b/sfa/senslab/LDAPapi.py @@ -90,7 +90,7 @@ class ldap_co: class LDAPapi : def __init__(self): - logger.setLevelDebug() + #logger.setLevelDebug() #SFA related config self.senslabauth=Hierarchy() config=Config() diff --git a/sfa/senslab/OARrestapi.py b/sfa/senslab/OARrestapi.py index 88c87528..3622038b 100644 --- a/sfa/senslab/OARrestapi.py +++ b/sfa/senslab/OARrestapi.py @@ -11,7 +11,7 @@ from sfa.util.plxrn import PlXrn from sfa.util.xrn import hrn_to_urn, get_authority,Xrn,get_leaf from sfa.util.config import Config - +from sfa.util.sfalogging import logger #OARIP='192.168.0.109' #OARIP='akila.inrialpes.fr' @@ -53,10 +53,10 @@ class OARrestapi: def __init__(self): self.oarserver= {} self.oarserver['ip'] = OARIP - self.oarserver['port'] = 80 + self.oarserver['port'] = 8800 self.oarserver['uri'] = None self.oarserver['postformat'] = 'json' - + #logger.setLevelDebug() self.jobstates = ['Terminated','Hold','Waiting','toLaunch','toError',\ 'toAckReservation','Launching','Finishing',\ @@ -69,9 +69,10 @@ class OARrestapi: self.oarserver['uri'] = OARrequests_get_uri_dict[request] headers = {} data = json.dumps({}) + logger.debug("OARrestapi \tGETRequestToOARRestAPI %s" %(request)) if strval: self.oarserver['uri'] = self.oarserver['uri'].replace("id",str(strval)) - print>>sys.stderr, "\r\n \r\n GETRequestToOARRestAPI replace : self.oarserver['uri'] %s", self.oarserver['uri'] + logger.debug("OARrestapi: \t GETRequestToOARRestAPI self.oarserver['uri'] %s" %( self.oarserver['uri'])) if username: headers['X-REMOTE_IDENT'] = username try : @@ -84,9 +85,9 @@ class OARrestapi: #conn.endheaders() #conn.putrequest("GET",self.oarserver['uri'] ) conn = httplib.HTTPConnection(self.oarserver['ip'],self.oarserver['port']) - conn.request("GET",self.oarserver['uri'],data , headers ) resp = ( conn.getresponse()).read() + #logger.debug("OARrestapi: \t GETRequestToOARRestAPI resp %s" %( resp)) conn.close() except: raise ServerError("GET_OAR_SRVR : Could not reach OARserver") @@ -166,16 +167,17 @@ class OARrestapi: class OARGETParser: - #Insert a new node into the dictnode dictionary - def AddNodeId(self,dictnode,value): - #Inserts new key. The value associated is a tuple list. - node_id = int(value) - dictnode[node_id] = [('node_id',node_id) ] - return node_id - - def AddNodeNetworkAddr(self,tuplelist,value): - tuplelist.append(('hostname',str(value))) - + + #def AddNodeNetworkAddr(self,tuplelist,value): + #tuplelist.append(('hostname',str(value))) + + def AddNodeNetworkAddr(self,dictnode,value): + #Inserts new key. The value associated is a tuple list + node_id = value + + dictnode[node_id] = [('node_id',node_id),('hostname',node_id) ] + + return node_id def AddNodeSite(self,tuplelist,value): tuplelist.append(('site',str(value))) @@ -185,10 +187,10 @@ class OARGETParser: tuplelist.append(('radio',str(value))) - def AddMobility(self,tuplelist,value): + def AddMobility(self,tuplelist,value): if value : tuplelist.append(('mobile',int(value))) - return 0 + def AddPosX(self,tuplelist,value): @@ -199,7 +201,15 @@ class OARGETParser: tuplelist.append(('posy',value)) def AddBootState(self,tuplelist,value): - tuplelist.append(('boot_state',str(value))) + tuplelist.append(('boot_state',str(value))) + + #Insert a new node into the dictnode dictionary + def AddNodeId(self,dictnode,value): + #Inserts new key. The value associated is a tuple list + node_id = int(value) + + dictnode[node_id] = [('node_id',node_id) ] + return node_id def ParseVersion(self) : #print self.raw_json @@ -291,7 +301,6 @@ class OARGETParser: resources_fulljson_dict= { - 'resource_id' : AddNodeId, 'network_address' : AddNodeNetworkAddr, 'site': AddNodeSite, 'radio': AddNodeRadio, @@ -299,6 +308,7 @@ class OARGETParser: 'posx': AddPosX, 'posy': AddPosY, 'state':AddBootState, + #'id' : AddNodeId, } @@ -307,35 +317,39 @@ class OARGETParser: #of the node properties and properties'values. def ParseNodes(self): node_id = None + keys = self.resources_fulljson_dict.keys() + keys.sort() + #print >>sys.stderr, " \r\n \r\n \t\t OARrestapi.py ParseNodes self.raw_json %s" %(self.raw_json) for dictline in self.raw_json: - #print >>sys.stderr, " \r\n \r\n \t\t OARrestapi.py ParseNodes dictline %s hey" %(dictline) - for k in dictline: - if k in self.resources_fulljson_dict: - # dictionary is empty and/or a new node has to be inserted - if node_id is None : - node_id = self.resources_fulljson_dict[k](self,self.node_dictlist, dictline[k]) - else: - ret = self.resources_fulljson_dict[k](self,self.node_dictlist[node_id], dictline[k]) + node_id = None + # dictionary is empty and/or a new node has to be inserted + node_id = self.resources_fulljson_dict['network_address'](self,self.node_dictlist, dictline['network_address']) + for k in keys: + if k in dictline: + if k == 'network_address': + continue + - #If last property has been inserted in the property tuple list, reset node_id - if ret == 0: - #Turn the property tuple list (=dict value) into a dictionary - self.node_dictlist[node_id] = dict(self.node_dictlist[node_id]) - node_id = None + self.resources_fulljson_dict[k](self,self.node_dictlist[node_id], dictline[k]) + + #The last property has been inserted in the property tuple list, reset node_id + #Turn the property tuple list (=dict value) into a dictionary + self.node_dictlist[node_id] = dict(self.node_dictlist[node_id]) + node_id = None - else: - pass def hostname_to_hrn(self, root_auth, login_base, hostname): - return PlXrn(auth=root_auth,hostname=login_base+'_'+hostname).get_hrn() + return PlXrn(auth=root_auth,hostname=login_base + '_' +hostname).get_hrn() #Retourne liste de dictionnaires contenant attributs des sites def ParseSites(self): nodes_per_site = {} config = Config() + logger.debug(" OARrestapi.py \t ParseSites self.node_dictlist %s"%(self.node_dictlist)) # Create a list of nodes per site_id for node_id in self.node_dictlist.keys(): node = self.node_dictlist[node_id] + if node['site'] not in nodes_per_site: nodes_per_site[node['site']] = [] nodes_per_site[node['site']].append(node['node_id']) diff --git a/sfa/senslab/slabaggregate.py b/sfa/senslab/slabaggregate.py index 040c54f1..1b190e7d 100644 --- a/sfa/senslab/slabaggregate.py +++ b/sfa/senslab/slabaggregate.py @@ -24,7 +24,7 @@ from sfa.rspecs.version_manager import VersionManager from sfa.util.sfatime import datetime_to_epoch def hostname_to_hrn(root_auth,login_base,hostname): - return PlXrn(auth=root_auth,hostname=login_base+'_'+hostname).get_hrn() + return PlXrn(auth=root_auth,hostname=login_base + '_'+hostname).get_hrn() class SlabAggregate: @@ -175,9 +175,12 @@ class SlabAggregate: # assumes that sites, interfaces and tags have already been prepared. #site = sites_dict[node['site_id']] - if node['posx'] and node['posy']: - location = Location({'longitude':node['posx'], 'latitude': node['posy']}) - rspec_node['location'] = location + try: + if node['posx'] and node['posy']: + location = Location({'longitude':node['posx'], 'latitude': node['posy']}) + rspec_node['location'] = location + except KeyError: + pass #rspec_node['interfaces'] = [] #if_count=0 #for if_id in node['interface_ids']: diff --git a/sfa/senslab/slabdriver.py b/sfa/senslab/slabdriver.py index d4fac712..e0d97843 100644 --- a/sfa/senslab/slabdriver.py +++ b/sfa/senslab/slabdriver.py @@ -53,7 +53,7 @@ class SlabDriver(Driver): self.oar = OARrestapi() self.ldap = LDAPapi() self.time_format = "%Y-%m-%d %H:%M:%S" - self.db = SlabDB(config) + self.db = SlabDB(config,debug = True) self.cache=None @@ -282,8 +282,9 @@ class SlabDriver(Driver): else: pointer = slices[0]['slice_id'] - elif type == 'user': - persons = self.GetPersons([sfa_record['hrn']]) + elif type == 'user': + persons = self.GetPersons([sfa_record]) + #persons = self.GetPersons([sfa_record['hrn']]) if not persons: pointer = self.AddPerson(dict(sfa_record)) #add in LDAP @@ -365,8 +366,9 @@ class SlabDriver(Driver): record_id= sfa_record['record_id'] if type == 'user': username = hrn.split(".")[len(hrn.split(".")) -1] - #get user in ldap - persons = self.GetPersons(username) + #get user in ldap + persons = self.GetPersons(sfa_record) + #persons = self.GetPersons(username) # only delete this person if he has site ids. if he doesnt, it probably means # he was just removed from a site, not actually deleted if persons and persons[0]['site_ids']: @@ -548,6 +550,9 @@ class SlabDriver(Driver): #jobs=self.oar.parser.SendRequest("GET_reserved_nodes") jobs=self.oar.parser.SendRequest("GET_jobs_details") nodes=[] + if jobs['total'] == 0: + return [] + for j in jobs : nodes=j['assigned_network_address']+nodes return nodes @@ -836,7 +841,7 @@ class SlabDriver(Driver): from_zone = tz.gettz(slot['timezone']) date = str(slot['date']) + " " + str(slot['start_time']) - user_datetime = datetime.datetime.strptime(date, self.time_format) + user_datetime = datetime.strptime(date, self.time_format) user_datetime = user_datetime.replace(tzinfo = from_zone) #Convert to UTC zone @@ -858,7 +863,7 @@ class SlabDriver(Driver): s_tz=tz.gettz(server_tz) UTC_zone = tz.gettz("UTC") #weird... datetime.fromtimestamp should work since we do from datetime import datetime - utc_server= datetime.datetime.fromtimestamp(float(server_timestamp)+20,UTC_zone) + utc_server= datetime.fromtimestamp(float(server_timestamp)+20,UTC_zone) server_localtime=utc_server.astimezone(s_tz) print>>sys.stderr, "\r\n \r\n \t\tLaunchExperimentOnOAR server_timestamp %s server_tz %s slice_name %s added_nodes %s username %s reqdict %s " %(server_timestamp,server_tz,slice_name,added_nodes,slice_user, reqdict ) @@ -984,8 +989,8 @@ class SlabDriver(Driver): 'person_ids':[recslice['record_id_user']]}) #GetPersons takes [] as filters - user_slab = self.GetPersons([{'hrn':recuser.hrn}]) - + #user_slab = self.GetPersons([{'hrn':recuser.hrn}]) + user_slab = self.GetPersons([record]) recslice.update({'type':'slice','hrn':recslice['slice_hrn']}) record.update(user_slab[0]) diff --git a/sfa/senslab/slabpostgres.py b/sfa/senslab/slabpostgres.py index ae7f1157..c3270248 100644 --- a/sfa/senslab/slabpostgres.py +++ b/sfa/senslab/slabpostgres.py @@ -94,9 +94,15 @@ class SliceSenslab (SlabBase): #return result class SlabDB: - def __init__(self,config): + def __init__(self,config, debug = False): self.sl_base = SlabBase dbname="slab_sfa" + if debug == True : + l_echo_pool = True + l_echo=True + else : + l_echo_pool = False + l_echo = False # will be created lazily on-demand self.slab_session = None # the former PostgreSQL.py used the psycopg2 directly and was doing @@ -114,7 +120,7 @@ class SlabDB: (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_HOST,config.SFA_DB_PORT,dbname) for url in [ unix_url, tcp_url ] : try: - self.slab_engine = create_engine (url,echo_pool=True,echo=True) + self.slab_engine = create_engine (url,echo_pool = l_echo_pool, echo = l_echo) self.check() self.url=url return diff --git a/sfa/senslab/slabslices.py b/sfa/senslab/slabslices.py index 49c226e0..08fc5ff9 100644 --- a/sfa/senslab/slabslices.py +++ b/sfa/senslab/slabslices.py @@ -338,7 +338,7 @@ class SlabSlices: users_by_hrn[user['hrn']] = user users_dict[user['hrn']] = {'person_id':user['person_id'], 'hrn':user['hrn']} - print>>sys.stderr, " \r\n \r\n \t slabslices.py verify_person users_dict %s \r\n user_by_hrn %s \r\n \tusers_by_id %s " %( users_dict,users_by_hrn, users_by_id) + logger.debug( "\r\n \r\n SLABSLICE.PY \tverify_person users_dict %s \r\n user_by_hrn %s \r\n \tusers_by_id %s " %( users_dict,users_by_hrn, users_by_id) ) existing_user_ids = [] existing_user_hrns = [] @@ -350,7 +350,9 @@ class SlabSlices: #Construct the list of filters for GetPersons filter_user = [] for hrn in users_by_hrn: - filter_user.append ( {'hrn':hrn}) + #filter_user.append ( {'hrn':hrn}) + filter_user.append (users_by_hrn[hrn]) + logger.debug(" SLABSLICE.PY \tverify_person filter_user %s " %(filter_user) ) existing_users = self.driver.GetPersons(filter_user) #existing_users = self.driver.GetPersons({'hrn': users_by_hrn.keys()}) #existing_users = self.driver.GetPersons({'hrn': users_by_hrn.keys()}, @@ -368,7 +370,7 @@ class SlabSlices: else: if isinstance(users,list): - ldap_reslt = self.driver.ldap.LdapSearch(users[0]) + ldap_reslt = self.driver.ldap.LdapSearch(users[0]) else: ldap_reslt = self.driver.ldap.LdapSearch(users) if ldap_result: @@ -378,41 +380,40 @@ class SlabSlices: else: #User not existing in LDAP - print>>sys.stderr, " \r\n \r\n \t slabslices.py verify_person users HUMHUMHUMHUM ... %s \r\n \t ldap_reslt %s " %(users, ldap_reslt) + logger.debug(" SLABSLICE.PY \tverify_person users HUMHUMHUMHUM ... %s \r\n \t ldap_reslt %s " %(users, ldap_reslt)) # requested slice users requested_user_ids = users_by_id.keys() requested_user_hrns = users_by_hrn.keys() - print>>sys.stderr, " \r\n \r\n \t slabslices.py verify_person requested_user_ids %s user_by_hrn %s " %( requested_user_ids,users_by_hrn) + logger.debug(" SLABSLICE.PY \tverify_person requested_user_ids %s user_by_hrn %s " %( requested_user_ids,users_by_hrn)) # existing slice users - existing_slice_users_filter = {'hrn': slice_record['PI'][0]} - print>>sys.stderr, " \r\n \r\n slices.py verify_person requested_user_ids %s existing_slice_users_filter %s slice_record %s" %(requested_user_ids,existing_slice_users_filter,slice_record) + + #existing_slice_users_filter = {'hrn': slice_record['PI'][0]} + #logger.debug(" SLABSLICE.PY \tverify_person requested_user_ids %s existing_slice_users_filter %s slice_record %s" %(requested_user_ids,existing_slice_users_filter,slice_record)) - existing_slice_users = self.driver.GetPersons([existing_slice_users_filter]) + #existing_slice_users = self.driver.GetPersons([existing_slice_users_filter]) #existing_slice_users = self.driver.GetPersons(existing_slice_users_filter,['hrn','pkey']) - print>>sys.stderr, " \r\n \r\n slices.py verify_person existing_slice_users %s " %(existing_slice_users) - - existing_slice_user_hrns = [user['hrn'] for user in existing_slice_users] + #logger.debug(" SLABSLICE.PY \tverify_person existing_slice_users %s " %(existing_slice_users)) + #Check that the user of the slice in the slice record + #matches the existing users + try: + if slice_record['record_id_user'] in requested_user_ids and slice_record['PI'][0] in requested_user_hrns: + logger.debug(" SLABSLICE.PY \tverify_person requested_user_ids %s = slice_record['record_id_user'] %s" %(requested_user_ids,slice_record['record_id_user'])) + + except KeyError: + pass + + #existing_slice_user_hrns = [user['hrn'] for user in existing_slice_users] # users to be added, removed or updated + #One user in one senslab slice : there should be no need + #to remove/ add any user from/to a slice. + #However a user from SFA which is not registered in Senslab yet + #should be added to the LDAP. added_user_hrns = set(requested_user_hrns).difference(set(existing_user_hrns)) - added_slice_user_hrns = set(requested_user_hrns).difference(existing_slice_user_hrns) - - removed_user_hrns = set(existing_slice_user_hrns).difference(requested_user_hrns) - - - updated_user_hrns = set(existing_slice_user_hrns).intersection(requested_user_hrns) - # Remove stale users (only if we are not appending) - append = options.get('append', True) - if append == False: - for removed_user_hrn in removed_user_hrns: - self.driver.DeletePersonFromSlice(removed_user_hrn, slice_record['name']) - # update_existing users - updated_users_list = [user for user in existing_slice_users if user['hrn'] in \ - updated_user_hrns] #self.verify_keys(existing_slice_users, updated_users_list, peer, append) added_persons = [] @@ -421,12 +422,12 @@ class SlabSlices: added_user = users_dict[added_user_hrn] #hrn, type = urn_to_hrn(added_user['urn']) person = { - #'first_name': added_user.get('first_name', hrn), - #'last_name': added_user.get('last_name', hrn), + 'first_name': added_user.get('first_name', hrn), + 'last_name': added_user.get('last_name', hrn), 'person_id': added_user['person_id'], - #'peer_person_id': None, - #'keys': [], - #'key_ids': added_user.get('key_ids', []), + 'peer_person_id': None, + 'keys': [], + 'key_ids': added_user.get('key_ids', []), } person['person_id'] = self.driver.AddPerson(person) @@ -450,8 +451,8 @@ class SlabSlices: #peer_dict = {'type': 'user', 'hrn': hrn, 'peer_authority': sfa_peer, \ #'pointer': person['person_id']} #self.registry.register_peer_object(self.credential, peer_dict) - for added_slice_user_hrn in added_slice_user_hrns.union(added_user_hrns): - self.driver.AddPersonToSlice(added_slice_user_hrn, slice_record['name']) + #for added_slice_user_hrn in added_slice_user_hrns.union(added_user_hrns): + #self.driver.AddPersonToSlice(added_slice_user_hrn, slice_record['name']) #for added_slice_user_id in added_slice_user_ids.union(added_user_ids): # add person to the slice #self.driver.AddPersonToSlice(added_slice_user_id, slice_record['name']) -- 2.47.0