+ def ParseJobsIds(self):
+
+ job_resources =['wanted_resources', 'name','id', 'start_time','state','owner','walltime','message']
+
+
+ job_resources_full = ['launching_directory', 'links', 'resubmit_job_id', 'owner', 'events', 'message', 'scheduled_start', 'id', 'array_id', 'exit_code','properties', 'state','array_index', 'walltime', 'type', 'initial_request', 'stop_time', 'project', 'start_time', 'dependencies','api_timestamp','submission_time', 'reservation', 'stdout_file', 'types', 'cpuset_name', 'name', 'wanted_resources','queue','stderr_file','command']
+
+
+ job_info = self.raw_json
+
+ values=[]
+ try:
+ for k in job_resources:
+ values.append(job_info[k])
+ return dict(zip(job_resources,values))
+
+ except KeyError:
+ print>>sys.stderr, " \r\n \t ParseJobsIds Key Error"
+
+
+
+
+ def ParseJobsIdResources(self):
+ logger.debug("OARESTAPI ParseJobsIdResources %s" %(self.raw_json))
+ print>>sys.stderr, "ParseJobsIdResources"
+ return self.raw_json
+
+ def ParseResources(self) :
+ print>>sys.stderr, " \r\n \t\t\t ParseResources__________________________ "
+ #resources are listed inside the 'items' list from the json
+ self.raw_json = self.raw_json['items']
+ self.ParseNodes()
+
+ def ParseReservedNodes(self):
+ print>>sys.stderr, " \r\n \t\t\t ParseReservedNodes__________________________ "
+ #resources are listed inside the 'items' list from the json
+ return self.raw_json
+
+ def ParseDeleteJobs(self):
+ return
+
+ def ParseResourcesFull(self ) :
+ print>>sys.stderr, " \r\n \t\t\t ParseResourcesFull_____________________________ "
+ #print self.raw_json[1]
+ #resources are listed inside the 'items' list from the json
+ if self.version_json_dict['apilib_version'] != "0.2.10" :
+ self.raw_json = self.raw_json['items']
+ self.ParseNodes()
+ self.ParseSites()
+ return self.node_dictlist
+
+ def ParseResourcesFullSites(self ) :
+ if self.version_json_dict['apilib_version'] != "0.2.10" :
+ self.raw_json = self.raw_json['items']
+ self.ParseNodes()
+ self.ParseSites()
+ return self.site_dict
+
+
+ resources_fulljson_dict= {
+ 'network_address' : AddNodeNetworkAddr,
+ 'site': AddNodeSite,
+ 'radio': AddNodeRadio,
+ 'mobile': AddMobility,
+ 'posx': AddPosX,
+ 'posy': AddPosY,
+ 'state':AddBootState,
+ #'id' : AddNodeId,
+ }
+
+
+ #Parse nodes properties from OAR
+ #Put them into a dictionary with key = node id and value is a dictionary
+ #of the node properties and properties'values.
+ def ParseNodes(self):
+ node_id = None
+ keys = self.resources_fulljson_dict.keys()
+ keys.sort()
+
+ #print >>sys.stderr, " \r\n \r\n \t\t OARrestapi.py ParseNodes self.raw_json %s" %(self.raw_json)
+ for dictline in self.raw_json:
+ node_id = None
+ # dictionary is empty and/or a new node has to be inserted
+ node_id = self.resources_fulljson_dict['network_address'](self,self.node_dictlist, dictline['network_address'])
+ for k in keys:
+ if k in dictline:
+ if k == 'network_address':
+ continue
+
+
+ self.resources_fulljson_dict[k](self,self.node_dictlist[node_id], dictline[k])
+
+ #The last property has been inserted in the property tuple list, reset node_id
+ #Turn the property tuple list (=dict value) into a dictionary
+ self.node_dictlist[node_id] = dict(self.node_dictlist[node_id])
+ node_id = None
+
+
+ def hostname_to_hrn(self, root_auth, login_base, hostname):
+ return PlXrn(auth=root_auth,hostname= login_base + '_' + hostname).get_hrn()
+ #Retourne liste de dictionnaires contenant attributs des sites
+ def ParseSites(self):
+ nodes_per_site = {}
+ config = Config()
+ logger.debug(" OARrestapi.py \t ParseSites self.node_dictlist %s"%(self.node_dictlist))
+ # Create a list of nodes per site_id
+ for node_id in self.node_dictlist.keys():
+ node = self.node_dictlist[node_id]
+
+ if node['site'] not in nodes_per_site:
+ nodes_per_site[node['site']] = []
+ nodes_per_site[node['site']].append(node['node_id'])
+ else:
+ if node['node_id'] not in nodes_per_site[node['site']]:
+ nodes_per_site[node['site']].append(node['node_id'])
+
+ #Create a site dictionary with key is site_login_base (name of the site)
+ # and value is a dictionary of properties, including the list of the node_ids
+ for node_id in self.node_dictlist.keys():
+ node = self.node_dictlist[node_id]
+ node.update({'hrn':self.hostname_to_hrn(self.interface_hrn, node['site'],node['hostname'])})
+ #node['hrn'] = self.hostname_to_hrn(self.interface_hrn, node['site_login_base'],node['hostname'])
+ self.node_dictlist.update({node_id:node})
+ #if node_id is 1:
+ if node['site'] not in self.site_dict:
+ self.site_dict[node['site']] = {'site':node['site'],
+ 'node_ids':nodes_per_site[node['site']],
+ 'latitude':"48.83726",
+ 'longitude':"- 2.10336",'name':config.SFA_REGISTRY_ROOT_AUTH,
+ 'pcu_ids':[], 'max_slices':None, 'ext_consortium_id':None,
+ 'max_slivers':None, 'is_public':True, 'peer_site_id': None,
+ 'abbreviated_name':"senslab", 'address_ids': [],
+ 'url':"http,//www.senslab.info", 'person_ids':[],
+ 'site_tag_ids':[], 'enabled': True, 'slice_ids':[],
+ 'date_created': None, 'peer_id': None }
+ #if node['site_login_base'] not in self.site_dict.keys():
+ #self.site_dict[node['site_login_base']] = {'login_base':node['site_login_base'],
+ #'node_ids':nodes_per_site[node['site_login_base']],
+ #'latitude':"48.83726",
+ #'longitude':"- 2.10336",'name':"senslab",
+ #'pcu_ids':[], 'max_slices':None, 'ext_consortium_id':None,
+ #'max_slivers':None, 'is_public':True, 'peer_site_id': None,
+ #'abbreviated_name':"senslab", 'address_ids': [],
+ #'url':"http,//www.senslab.info", 'person_ids':[],
+ #'site_tag_ids':[], 'enabled': True, 'slice_ids':[],
+ #'date_created': None, 'peer_id': None }
+
+
+
+
+ OARrequests_uri_dict = {
+ 'GET_version': {'uri':'/oarapi/version.json', 'parse_func': ParseVersion},
+ 'GET_timezone':{'uri':'/oarapi/timezone.json' ,'parse_func': ParseTimezone },
+ 'GET_jobs': {'uri':'/oarapi/jobs.json','parse_func': ParseJobs},
+ 'GET_jobs_id': {'uri':'/oarapi/jobs/id.json','parse_func': ParseJobsIds},
+ 'GET_jobs_id_resources': {'uri':'/oarapi/jobs/id/resources.json','parse_func': ParseJobsIdResources},
+ 'GET_jobs_table': {'uri':'/oarapi/jobs/table.json','parse_func': ParseJobsTable},
+ 'GET_jobs_details': {'uri':'/oarapi/jobs/details.json','parse_func': ParseJobsDetails},
+ 'GET_reserved_nodes':{'uri':'/oarapi/jobs/details.json?state=Running,Waiting,Launching','parse_func':ParseReservedNodes},
+ 'GET_resources_full': {'uri':'/oarapi/resources/full.json','parse_func': ParseResourcesFull},
+ 'GET_sites':{'uri':'/oarapi/resources/full.json','parse_func': ParseResourcesFullSites},
+ 'GET_resources':{'uri':'/oarapi/resources.json' ,'parse_func': ParseResources},
+ 'DELETE_jobs_id':{'uri':'/oarapi/jobs/id.json' ,'parse_func': ParseDeleteJobs}
+ }
+
+
+ def __init__(self, srv ):
+ self.version_json_dict= { 'api_version' : None , 'apilib_version' :None, 'api_timezone': None, 'api_timestamp': None, 'oar_version': None ,}
+ self.config = Config()
+ self.interface_hrn = self.config.SFA_INTERFACE_HRN
+ self.timezone_json_dict = { 'timezone': None, 'api_timestamp': None, }
+ self.jobs_json_dict = { 'total' : None, 'links' : [] , 'offset':None , 'items' : [] , }
+ self.jobs_table_json_dict = self.jobs_json_dict
+ self.jobs_details_json_dict = self.jobs_json_dict
+ self.server = srv
+ self.node_dictlist = {}
+
+ self.site_dict = {}
+ self.SendRequest("GET_version")
+
+ def SendRequest(self,request, strval = None , username = None):
+ if request in OARrequests_get_uri_dict:
+ self.raw_json = self.server.GETRequestToOARRestAPI(request,strval,username)
+ return self.OARrequests_uri_dict[request]['parse_func'](self)
+ else:
+ print>>sys.stderr, "\r\n OARGetParse __init__ : ERROR_REQUEST " ,request
+
+
+