+"""
+This API is adapted for OpenLDAP.
+The file contains all LDAP classes and methods needed to:
+ - Load the LDAP connection configuration file (login, address..) with
+ LdapConfig
+ - Connect to LDAP with ldap_co
+ - Create a unique LDAP login and password for a user based on his email or
+ last name and first name with LoginPassword.
+ - Manage entries in LDAP using SFA records with LDAPapi
+ (Search, Add, Delete, Modify)
+
+"""
import random
from passlib.hash import ldap_salted_sha1 as lssha
+
from sfa.util.xrn import get_authority
-import ldap
+from sfa.util.sfalogging import logger
from sfa.util.config import Config
-
+import ldap
import ldap.modlist as modlist
-from sfa.util.sfalogging import logger
-import os.path
-#API for OpenLDAP
+import os.path
class LdapConfig():
self.config_path = os.path.dirname(config_file)
except IOError:
raise IOError, "Could not find or load the configuration file: %s" \
- % config_file
+ % config_file
class ldap_co:
""" Binding method.
:returns: dictionary with the bind status. True if Successful,
- False if not and in this case the error message( {'bool', 'message'} )
+ False if not and in this case the error message({'bool','message'})
:rtype: dict
"""
# Bind/authenticate with a user with apropriate
#rights to add objects
- self.ldapserv.simple_bind_s(self.ldapAdminDN, \
- self.ldapAdminPassword)
+ self.ldapserv.simple_bind_s(self.ldapAdminDN,
+ self.ldapAdminPassword)
except ldap.LDAPError, error:
return {'bool': False, 'message': error}
""" Close the LDAP connection.
Can throw an exception if the unbinding fails.
+ :returns: dictionary with the bind status if fails.
+ False if not and in this case the error message({'bool','message'})
+ :rtype: dict or None
"""
try:
try:
login = \
lower_first_name[0:index] + \
- lower_last_name[0:self.login_pwd.login_max_length-index]
+ lower_last_name[0:
+ self.login_pwd.login_max_length
+ - index]
login_filter = '(uid=' + login + ')'
except KeyError:
print "lower_first_name - lower_last_name too short"
- logger.debug("LDAP.API \t LdapGenerateUniqueLogin login %s" % (login))
+ logger.debug("LDAP.API \t LdapGenerateUniqueLogin login %s"
+ % (login))
return login
except ldap.LDAPError, error:
+"""
+File used to handle issuing request to OAR and parse OAR's JSON responses.
+Contains the following classes:
+- JsonPage : handles multiple pages OAR answers.
+- OARRestapi : handles issuing POST or GET requests to OAR.
+- ParsingResourcesFull : dedicated to parsing OAR's answer to a get resources
+full request.
+- OARGETParser : handles parsing the Json answers to different GET requests.
+
+"""
from httplib import HTTPConnection, HTTPException, NotConnected
import json
from sfa.util.config import Config
from sfa.util.sfalogging import logger
import os.path
-import sys
class JsonPage:
except IOError:
raise IOError, "Could not find or load the configuration file: %s" \
- % config_file
+ % config_file
#logger.setLevelDebug()
self.oarserver['ip'] = self.OAR_IP
self.oarserver['port'] = self.OAR_PORT
def GETRequestToOARRestAPI(self, request, strval=None,
- next_page=None, username=None):
+ next_page=None, username=None):
"""Makes a GET request to OAR.
logger.debug("OARrestapi \tGETRequestToOARRestAPI %s" % (request))
if strval:
self.oarserver['uri'] = self.oarserver['uri'].\
- replace("id",str(strval))
+ replace("id", str(strval))
if next_page:
self.oarserver['uri'] += next_page
headers['X-REMOTE_IDENT'] = username
logger.debug("OARrestapi: \t GETRequestToOARRestAPI \
- self.oarserver['uri'] %s strval %s" \
- %(self.oarserver['uri'], strval))
- try :
+ self.oarserver['uri'] %s strval %s"
+ % (self.oarserver['uri'], strval))
+ try:
#seems that it does not work if we don't add this
headers['content-length'] = '0'
- conn = HTTPConnection(self.oarserver['ip'], \
- self.oarserver['port'])
+ conn = HTTPConnection(self.oarserver['ip'],
+ self.oarserver['port'])
conn.request("GET", self.oarserver['uri'], data, headers)
- resp = ( conn.getresponse()).read()
+ resp = (conn.getresponse()).read()
conn.close()
- except HTTPException, error :
- logger.log_exc("GET_OAR_SRVR : Problem with OAR server : %s " \
- %(error))
+ except HTTPException, error:
+ logger.log_exc("GET_OAR_SRVR : Problem with OAR server : %s "
+ % (error))
#raise ServerError("GET_OAR_SRVR : Could not reach OARserver")
try:
js_dict = json.loads(resp)
return js_dict
except ValueError, error:
- logger.log_exc("Failed to parse Server Response: %s ERROR %s"\
- %(js_dict, error))
+ logger.log_exc("Failed to parse Server Response: %s ERROR %s"
+ % (js_dict, error))
#raise ServerError("Failed to parse Server Response:" + js)
class OARGETParser:
- # resources_fulljson_dict = {
- # 'network_address': AddNodeNetworkAddr,
- # 'site': AddNodeSite,
- # # 'radio': AddNodeRadio,
- # 'mobile': AddMobility,
- # 'x': AddPosX,
- # 'y': AddPosY,
- # 'z':AddPosZ,
- # 'archi':AddHardwareType,
- # 'state':AddBootState,
- # 'id' : AddOarNodeId,
- # }
-
-
- def __init__(self, srv) :
+ """Class providing parsing methods associated to specific GET requests.
+
+ """
+
+ def __init__(self, srv):
self.version_json_dict = {
- 'api_version' : None , 'apilib_version' :None,\
- 'api_timezone': None, 'api_timestamp': None, 'oar_version': None ,}
+ 'api_version': None, 'apilib_version': None,
+ 'api_timezone': None, 'api_timestamp': None, 'oar_version': None}
self.config = Config()
self.interface_hrn = self.config.SFA_INTERFACE_HRN
self.timezone_json_dict = {
self.json_page = JsonPage()
self.parsing_resourcesfull = ParsingResourcesFull()
self.site_dict = {}
+ self.jobs_list = []
self.SendRequest("GET_version")
-
-
-
- def ParseVersion(self) :
+ def ParseVersion(self):
"""Parses the OAR answer to the GET_version ( /oarapi/version.json.)
Finds the OAR apilib version currently used. Has an impact on the json
"""
- if 'oar_version' in self.json_page.raw_json :
- self.version_json_dict.update(api_version =
- self.json_page.raw_json['api_version'],
- apilib_version=self.json_page.raw_json['apilib_version'],
- api_timezone=self.json_page.raw_json['api_timezone'],
- api_timestamp=self.json_page.raw_json['api_timestamp'],
- oar_version=self.json_page.raw_json['oar_version'])
+ if 'oar_version' in self.json_page.raw_json:
+ self.version_json_dict.update(
+ api_version=self.json_page.raw_json['api_version'],
+ apilib_version=self.json_page.raw_json['apilib_version'],
+ api_timezone=self.json_page.raw_json['api_timezone'],
+ api_timestamp=self.json_page.raw_json['api_timestamp'],
+ oar_version=self.json_page.raw_json['oar_version'])
else:
- self.version_json_dict.update(api_version =
- self.json_page.raw_json['api'],
- apilib_version=self.json_page.raw_json['apilib'],
- api_timezone=self.json_page.raw_json['api_timezone'],
- api_timestamp=self.json_page.raw_json['api_timestamp'],
- oar_version=self.json_page.raw_json['oar'])
+ self.version_json_dict.update(
+ api_version=self.json_page.raw_json['api'],
+ apilib_version=self.json_page.raw_json['apilib'],
+ api_timezone=self.json_page.raw_json['api_timezone'],
+ api_timestamp=self.json_page.raw_json['api_timestamp'],
+ oar_version=self.json_page.raw_json['oar'])
print self.version_json_dict['apilib_version']
"""
job_resources = ['wanted_resources', 'name', 'id', 'start_time',
- 'state', 'owner', 'walltime', 'message']
-
+ 'state', 'owner', 'walltime', 'message']
- job_resources_full = ['launching_directory', 'links',
+ # Unused variable providing the contents of the json dict returned from
+ # get job resources full request
+ job_resources_full = [
+ 'launching_directory', 'links',
'resubmit_job_id', 'owner', 'events', 'message',
- 'scheduled_start', 'id', 'array_id', 'exit_code',
- 'properties', 'state','array_index', 'walltime',
+ 'scheduled_start', 'id', 'array_id', 'exit_code',
+ 'properties', 'state', 'array_index', 'walltime',
'type', 'initial_request', 'stop_time', 'project',
- 'start_time', 'dependencies','api_timestamp','submission_time',
+ 'start_time', 'dependencies', 'api_timestamp', 'submission_time',
'reservation', 'stdout_file', 'types', 'cpuset_name',
- 'name', 'wanted_resources','queue','stderr_file','command']
+ 'name', 'wanted_resources', 'queue', 'stderr_file', 'command']
job_info = self.json_page.raw_json
return job_resources
- def ParseResources(self) :
+ def ParseResources(self):
""" Parses the json produced by a get_resources request on oar."""
#logger.debug("OARESTAPI \tParseResources " )
self.ParseNodes()
def ParseReservedNodes(self):
- """ Returns an array containing the list of the reserved nodes """
+ """ Returns an array containing the list of the jobs scheduled
+ with the reserved nodes if available.
+
+ :returns: list of job dicts, each dict containing the following keys:
+ t_from, t_until, resources_ids (of the reserved nodes for this job).
+ If the information is not available, default values will be set for
+ these keys. The other keys are : state, lease_id and user.
+ :rtype: list
+
+ """
#resources are listed inside the 'items' list from the json
reservation_list = []
job = {}
#Parse resources info
- for json_element in self.json_page.raw_json['items']:
+ for json_element in self.json_page.raw_json['items']:
#In case it is a real reservation (not asap case)
if json_element['scheduled_start']:
job['t_from'] = json_element['scheduled_start']
job['t_until'] = int(json_element['scheduled_start']) + \
- int(json_element['walltime'])
+ int(json_element['walltime'])
#Get resources id list for the job
- job['resource_ids'] = [ node_dict['id'] for node_dict
- in json_element['resources']]
+ job['resource_ids'] = [node_dict['id'] for node_dict
+ in json_element['resources']]
else:
job['t_from'] = "As soon as possible"
job['t_until'] = "As soon as possible"
job['resource_ids'] = ["Undefined"]
-
job['state'] = json_element['state']
job['lease_id'] = json_element['id']
-
job['user'] = json_element['owner']
#logger.debug("OARRestapi \tParseReservedNodes job %s" %(job))
reservation_list.append(job)
""" Gets the list of nodes currently in use from the attributes of the
running jobs.
+ :returns: list of hostnames, the nodes that are currently involved in
+ running jobs.
+ :rtype: list
+
+
"""
- logger.debug("OARESTAPI \tParseRunningJobs__________________________ ")
+ logger.debug("OARESTAPI \tParseRunningJobs_________________ ")
#resources are listed inside the 'items' list from the json
nodes = []
- for job in self.json_page.raw_json['items']:
+ for job in self.json_page.raw_json['items']:
for node in job['nodes']:
nodes.append(node['network_address'])
return nodes
def ChangeRawJsonDependingOnApilibVersion(self):
+ """
+ Check if the OAR apilib version is different from 0.2.10, in which case
+ the Json answer is also dict instead as a plain list.
+
+ .. warning:: the whole code is assuming the json contains a 'items' key
+ .. seealso:: ConcatenateJsonPages, ParseJobs, ParseReservedNodes,
+ ParseJobsIdResources, ParseResources, ParseRunningJobs
+ .. todo:: Clean the whole code. Either suppose the apilib will always
+ provide the 'items' key, or handle different options.
+ """
if self.version_json_dict['apilib_version'] != "0.2.10":
self.json_page.raw_json = self.json_page.raw_json['items']
"""
return
- def ParseResourcesFull(self) :
+ def ParseResourcesFull(self):
""" This method is responsible for parsing all the attributes
of all the nodes returned by OAR when issuing a get resources full.
The information from the nodes and the sites are separated.
Updates the node_dictlist so that the dictionnary of the platform's
nodes is available afterwards.
+ :returns: node_dictlist, a list of dictionaries about the nodes and
+ their properties.
+ :rtype: list
+
"""
- logger.debug("OARRESTAPI ParseResourcesFull________________________ ")
+ logger.debug("OARRESTAPI ParseResourcesFull___________ ")
#print self.json_page.raw_json[1]
#resources are listed inside the 'items' list from the json
self.ChangeRawJsonDependingOnApilibVersion()
self.ParseSites()
return self.node_dictlist
- def ParseResourcesFullSites(self) :
- """ UNUSED. Originally used to get information from the sites.
- ParseResourcesFull is used instead.
+ def ParseResourcesFullSites(self):
+ """ Called by GetSites which is unused.
+ Originally used to get information from the sites, with for each site
+ the list of nodes it has, along with their properties.
+
+ :return: site_dict, dictionary of sites
+ :rtype: dict
+
+ .. warning:: unused
+ .. seealso:: GetSites (IotlabTestbedAPI)
"""
self.ChangeRawJsonDependingOnApilibVersion()
for dictline in self.json_page.raw_json:
node_id = None
# dictionary is empty and/or a new node has to be inserted
- node_id = _resources_fulljson_dict['network_address'](\
- self.node_dictlist, dictline['network_address'])
+ node_id = _resources_fulljson_dict['network_address'](
+ self.node_dictlist, dictline['network_address'])
for k in keys:
if k in dictline:
if k == 'network_address':
continue
- _resources_fulljson_dict[k](\
- self.node_dictlist[node_id], dictline[k])
+ _resources_fulljson_dict[k](
+ self.node_dictlist[node_id], dictline[k])
#The last property has been inserted in the property tuple list,
#reset node_id
"""
return root_auth + '.' + hostname
-
-
def ParseSites(self):
""" Returns a list of dictionnaries containing the sites' attributes."""
#%(self.node_dictlist))
# Create a list of nodes per site_id
for node_id in self.node_dictlist:
- node = self.node_dictlist[node_id]
+ node = self.node_dictlist[node_id]
if node['site'] not in nodes_per_site:
nodes_per_site[node['site']] = []
# (name of the site) and value is a dictionary of properties,
# including the list of the node_ids
for node_id in self.node_dictlist:
- node = self.node_dictlist[node_id]
- node.update({'hrn':self.iotlab_hostname_to_hrn(self.interface_hrn,
- node['hostname'])})
- self.node_dictlist.update({node_id:node})
+ node = self.node_dictlist[node_id]
+ node.update({'hrn': self.iotlab_hostname_to_hrn(self.interface_hrn,
+ node['hostname'])})
+ self.node_dictlist.update({node_id: node})
if node['site'] not in self.site_dict:
self.site_dict[node['site']] = {
- 'site':node['site'],
- 'node_ids':nodes_per_site[node['site']],
- 'latitude':"48.83726",
- 'longitude':"- 2.10336",
+ 'site': node['site'],
+ 'node_ids': nodes_per_site[node['site']],
+ 'latitude': "48.83726",
+ 'longitude': "- 2.10336",
'name': config.SFA_REGISTRY_ROOT_AUTH,
- 'pcu_ids':[], 'max_slices':None, 'ext_consortium_id':None,
- 'max_slivers':None, 'is_public':True, 'peer_site_id': None,
- 'abbreviated_name':"iotlab", 'address_ids': [],
- 'url':"http,//www.senslab.info", 'person_ids':[],
- 'site_tag_ids':[], 'enabled': True, 'slice_ids':[],
- 'date_created': None, 'peer_id': None }
-
+ 'pcu_ids': [], 'max_slices': None,
+ 'ext_consortium_id': None,
+ 'max_slivers': None, 'is_public': True,
+ 'peer_site_id': None,
+ 'abbreviated_name': "iotlab", 'address_ids': [],
+ 'url': "https://portal.senslab.info", 'person_ids': [],
+ 'site_tag_ids': [], 'enabled': True, 'slice_ids': [],
+ 'date_created': None, 'peer_id': None
+ }
OARrequests_uri_dict = {
'GET_version':
- {'uri':'/oarapi/version.json', 'parse_func': ParseVersion},
+ {'uri': '/oarapi/version.json', 'parse_func': ParseVersion},
+
'GET_timezone':
- {'uri':'/oarapi/timezone.json' ,'parse_func': ParseTimezone },
+ {'uri': '/oarapi/timezone.json', 'parse_func': ParseTimezone},
+
'GET_jobs':
- {'uri':'/oarapi/jobs.json','parse_func': ParseJobs},
+ {'uri': '/oarapi/jobs.json', 'parse_func': ParseJobs},
+
'GET_jobs_id':
- {'uri':'/oarapi/jobs/id.json','parse_func': ParseJobsIds},
+ {'uri': '/oarapi/jobs/id.json', 'parse_func': ParseJobsIds},
+
'GET_jobs_id_resources':
- {'uri':'/oarapi/jobs/id/resources.json',\
- 'parse_func': ParseJobsIdResources},
+ {'uri': '/oarapi/jobs/id/resources.json',
+ 'parse_func': ParseJobsIdResources},
+
'GET_jobs_table':
- {'uri':'/oarapi/jobs/table.json','parse_func': ParseJobsTable},
+ {'uri': '/oarapi/jobs/table.json', 'parse_func': ParseJobsTable},
+
'GET_jobs_details':
- {'uri':'/oarapi/jobs/details.json',\
- 'parse_func': ParseJobsDetails},
- 'GET_reserved_nodes':
- {'uri':
- '/oarapi/jobs/details.json?state=Running,Waiting,Launching',\
- 'owner':'&user=',
- 'parse_func':ParseReservedNodes},
+ {'uri': '/oarapi/jobs/details.json', 'parse_func': ParseJobsDetails},
+ 'GET_reserved_nodes':
+ {'uri':
+ '/oarapi/jobs/details.json?state=Running,Waiting,Launching',
+ 'owner': '&user=', 'parse_func': ParseReservedNodes},
'GET_running_jobs':
- {'uri':'/oarapi/jobs/details.json?state=Running',\
- 'parse_func':ParseRunningJobs},
+ {'uri': '/oarapi/jobs/details.json?state=Running',
+ 'parse_func': ParseRunningJobs},
+
'GET_resources_full':
- {'uri':'/oarapi/resources/full.json',\
- 'parse_func': ParseResourcesFull},
+ {'uri': '/oarapi/resources/full.json',
+ 'parse_func': ParseResourcesFull},
+
'GET_sites':
- {'uri':'/oarapi/resources/full.json',\
- 'parse_func': ParseResourcesFullSites},
- 'GET_resources':
- {'uri':'/oarapi/resources.json' ,'parse_func': ParseResources},
- 'DELETE_jobs_id':
- {'uri':'/oarapi/jobs/id.json' ,'parse_func': ParseDeleteJobs}
- }
+ {'uri': '/oarapi/resources/full.json',
+ 'parse_func': ParseResourcesFullSites},
+ 'GET_resources':
+ {'uri': '/oarapi/resources.json', 'parse_func': ParseResources},
+ 'DELETE_jobs_id':
+ {'uri': '/oarapi/jobs/id.json', 'parse_func': ParseDeleteJobs}}
- def SendRequest(self, request, strval = None , username = None):
+ def SendRequest(self, request, strval=None, username=None):
""" Connects to OAR , sends the valid GET requests and uses
the appropriate json parsing functions.
+ :returns: calls to the appropriate parsing function, associated with the
+ GET request
+ :rtype: depends on the parsing function called.
+
+ .. seealso:: OARrequests_uri_dict
"""
save_json = None
self.json_page.ResetNextPage()
save_json = []
- if request in self.OARrequests_uri_dict :
+ if request in self.OARrequests_uri_dict:
while self.json_page.next_page:
- self.json_page.raw_json = self.server.GETRequestToOARRestAPI(\
- request, \
- strval, \
- self.json_page.next_offset, \
- username)
+ self.json_page.raw_json = self.server.GETRequestToOARRestAPI(
+ request,
+ strval,
+ self.json_page.next_offset,
+ username)
self.json_page.FindNextPage()
if self.json_page.concatenate:
save_json.append(self.json_page.raw_json)
- if self.json_page.concatenate and self.json_page.end :
+ if self.json_page.concatenate and self.json_page.end:
self.json_page.raw_json = \
self.json_page.ConcatenateJsonPages(save_json)
return self.OARrequests_uri_dict[request]['parse_func'](self)
else:
- logger.error("OARRESTAPI OARGetParse __init__ : ERROR_REQUEST " \
- %(request))
+ logger.error("OARRESTAPI OARGetParse __init__ : ERROR_REQUEST "
+ % (request))
+"""
+File providing methods to generate valid RSpecs for the Iotlab testbed.
+Contains methods to get information on slice, slivers, nodes and leases,
+formatting them and turn it into a RSpec.
+"""
from sfa.util.xrn import hrn_to_urn, urn_to_hrn, get_authority
from sfa.rspecs.rspec import RSpec
return (slices, slivers)
-
def get_nodes(self, slices=None, slivers=[], options=None):
"""Returns the nodes in the slice using the rspec format, with all the
nodes' properties.
:rtype: dict
.. note::There is no filtering of leases within a given time frame.
- All the running or scheduled leases are returned. options
- removed SA 15/05/2013
+ All the running or scheduled leases are returned. options
+ removed SA 15/05/2013
"""
+"""
+File containing the IotlabTestbedAPI, used to interact with nodes, users,
+slices, leases and keys, as well as the dedicated iotlab database and table,
+holding information about which slice is running which job.
+
+"""
from datetime import datetime
from sfa.util.sfalogging import logger
from sfa.storage.alchemy import dbsession
from sqlalchemy.orm import joinedload
from sfa.storage.model import RegRecord, RegUser, RegSlice, RegKey
-# from sfa.iotlab.iotlabpostgres import iotlab_dbsession, IotlabXP
from sfa.iotlab.iotlabpostgres import IotlabDB, IotlabXP
-from sfa.iotlab.OARrestapi import OARrestapi
+from sfa.iotlab.OARrestapi import OARrestapi
from sfa.iotlab.LDAPapi import LDAPapi
from sfa.util.xrn import Xrn, hrn_to_urn, get_authority
class IotlabTestbedAPI():
""" Class enabled to use LDAP and OAR api calls. """
+ _MINIMUM_DURATION = 600
+
def __init__(self, config):
"""Creates an instance of OARrestapi and LDAPapi which will be used to
issue calls to OAR or LDAP methods.
self.ldap = LDAPapi()
self.time_format = "%Y-%m-%d %H:%M:%S"
self.root_auth = config.SFA_REGISTRY_ROOT_AUTH
- self.grain = 1 # 10 mins lease minimum, 1 sec granularity
+ self.grain = 1 # 10 mins lease minimum, 1 sec granularity
#import logging, logging.handlers
#from sfa.util.sfalogging import _SfaLogger
#sql_logger = _SfaLogger(loggername = 'sqlalchemy.engine', \
#level=logging.DEBUG)
return
-
@staticmethod
def GetMinExperimentDurationInSec():
- return 600
+ """ Returns the minimum allowed duration for an experiment on the
+ testbed. In seconds.
+
+ """
+ return IotlabTestbedAPI._MINIMUM_DURATION
@staticmethod
def GetPeers (peer_filter=None ):
:rtype: dict
"""
- logger.debug("IOTLABDRIVER \tDeleteJobs jobid %s username %s "\
- %(job_id, username))
+ logger.debug("IOTLABDRIVER \tDeleteJobs jobid %s username %s "
+ % (job_id, username))
if not job_id or job_id is -1:
return
reqdict['method'] = "delete"
reqdict['strval'] = str(job_id)
-
- answer = self.oar.POSTRequestToOARRestAPI('DELETE_jobs_id', \
- reqdict,username)
+ answer = self.oar.POSTRequestToOARRestAPI('DELETE_jobs_id',
+ reqdict, username)
if answer['status'] == 'Delete request registered':
- ret = {job_id : True }
+ ret = {job_id: True}
else:
- ret = {job_id :False }
+ ret = {job_id: False}
logger.debug("IOTLABDRIVER \tDeleteJobs jobid %s \r\n answer %s \
- username %s" %(job_id, answer, username))
+ username %s" % (job_id, answer, username))
return ret
dbsession.add(sfa_record)
dbsession.commit()
#Update the reg-researcher dependance table
- sfa_record.reg_researchers = [user_record]
+ sfa_record.reg_researchers = [user_record]
dbsession.commit()
return
- def GetSites(self, site_filter_name_list = None, return_fields_list = None):
+ def GetSites(self, site_filter_name_list=None, return_fields_list=None):
+ """Returns the list of Iotlab's sites with the associated nodes and
+ their properties as dictionaries.
+
+ Uses the OAR request GET_sites to find the Iotlab's sites.
+
+ :param site_filter_name_list: used to specify specific sites
+ :param return_fields_list: field that has to be returned
+ :type site_filter_name_list: list
+ :type return_fields_list: list
+
+ .. warning:: unused
+ """
site_dict = self.oar.parser.SendRequest("GET_sites")
#site_dict : dict where the key is the sit ename
return_site_list = []
- if not ( site_filter_name_list or return_fields_list):
+ if not (site_filter_name_list or return_fields_list):
return_site_list = site_dict.values()
return return_site_list
try:
tmp[field] = site_dict[site_filter_name][field]
except KeyError:
- logger.error("GetSites KeyError %s "%(field))
+ logger.error("GetSites KeyError %s " % (field))
return None
return_site_list.append(tmp)
else:
- return_site_list.append( site_dict[site_filter_name])
-
+ return_site_list.append(site_dict[site_filter_name])
return return_site_list
-
-
-
#TODO : Check rights to delete person
def DeletePerson(self, person_record):
"""Disable an existing account in iotlab LDAP.
:rtype: dict
"""
- logger.debug("IOTLABDRIVER \t DeleteSliceFromNodese %s " %(slice_record))
+ logger.debug("IOTLABDRIVER \t DeleteSliceFromNodes %s "
+ % (slice_record))
if isinstance(slice_record['oar_job_id'], list):
oar_bool_answer = {}
oar_bool_answer.update(ret)
else:
- oar_bool_answer = [self.DeleteJobs(slice_record['oar_job_id'], \
- slice_record['user'])]
+ oar_bool_answer = [self.DeleteJobs(slice_record['oar_job_id'],
+ slice_record['user'])]
return oar_bool_answer
# return
-
def GetLeases(self, lease_filter_dict=None, login=None):
"""
reservation_list = []
#Find the slice associated with this user iotlab ldap uid
logger.debug(" IOTLABDRIVER.PY \tGetLeases login %s\
- unfiltered_reservation_list %s " %(login, unfiltered_reservation_list))
+ unfiltered_reservation_list %s "
+ % (login, unfiltered_reservation_list))
#Create user dict first to avoid looking several times for
#the same user in LDAP SA 27/07/12
job_oar_list = []
jobs_psql_query = self.iotlab_db.iotlab_session.query(IotlabXP).all()
- jobs_psql_dict = dict([(row.job_id, row.__dict__ ) for row in jobs_psql_query ])
+ jobs_psql_dict = dict([(row.job_id, row.__dict__)
+ for row in jobs_psql_query])
#jobs_psql_dict = jobs_psql_dict)
- logger.debug("IOTLABDRIVER \tGetLeases jobs_psql_dict %s"\
- %(jobs_psql_dict))
- jobs_psql_id_list = [ row.job_id for row in jobs_psql_query ]
-
-
+ logger.debug("IOTLABDRIVER \tGetLeases jobs_psql_dict %s"
+ % (jobs_psql_dict))
+ jobs_psql_id_list = [row.job_id for row in jobs_psql_query]
for resa in unfiltered_reservation_list:
- logger.debug("IOTLABDRIVER \tGetLeases USER %s"\
- %(resa['user']))
+ logger.debug("IOTLABDRIVER \tGetLeases USER %s"
+ % (resa['user']))
#Construct list of jobs (runing, waiting..) in oar
job_oar_list.append(resa['lease_id'])
#If there is information on the job in IOTLAB DB ]
#(slice used and job id)
if resa['lease_id'] in jobs_psql_dict:
job_info = jobs_psql_dict[resa['lease_id']]
- logger.debug("IOTLABDRIVER \tGetLeases job_info %s"\
- %(job_info))
+ logger.debug("IOTLABDRIVER \tGetLeases job_info %s"
+ % (job_info))
resa['slice_hrn'] = job_info['slice_hrn']
resa['slice_id'] = hrn_to_urn(resa['slice_hrn'], 'slice')
#otherwise, assume it is a iotlab slice:
else:
- resa['slice_id'] = hrn_to_urn(self.root_auth+'.'+ \
- resa['user'] +"_slice" , 'slice')
+ resa['slice_id'] = hrn_to_urn(self.root_auth + '.' +
+ resa['user'] + "_slice", 'slice')
resa['slice_hrn'] = Xrn(resa['slice_id']).get_hrn()
resa['component_id_list'] = []
if lease_filter_dict:
logger.debug("IOTLABDRIVER \tGetLeases resa_ %s \
- \r\n leasefilter %s" %(resa, lease_filter_dict))
+ \r\n leasefilter %s" % (resa, lease_filter_dict))
if lease_filter_dict['name'] == resa['slice_hrn']:
reservation_list.append(resa)
if lease_filter_dict is None:
reservation_list = unfiltered_reservation_list
-
self.iotlab_db.update_jobs_in_iotlabdb(job_oar_list, jobs_psql_id_list)
- logger.debug(" IOTLABDRIVER.PY \tGetLeases reservation_list %s"\
- %(reservation_list))
+ logger.debug(" IOTLABDRIVER.PY \tGetLeases reservation_list %s"
+ % (reservation_list))
return reservation_list
key_dict = {}
for key in keys:
- key_dict[key.key] = {'key_id': key.key_id, 'key': key.key, \
- 'email': key.reg_user.email, 'hrn':key.reg_user.hrn}
+ key_dict[key.key] = {'key_id': key.key_id, 'key': key.key,
+ 'email': key.reg_user.email,
+ 'hrn': key.reg_user.hrn}
#ldap_rslt = self.ldap.LdapSearch({'enabled']=True})
#user_by_email = dict((user[1]['mail'][0], user[1]['sshPublicKey']) \
#for user in ldap_rslt)
- logger.debug("IOTLABDRIVER GetKeys -key_dict %s \r\n " %(key_dict))
+ logger.debug("IOTLABDRIVER GetKeys -key_dict %s \r\n " % (key_dict))
return key_dict
#TODO : test
"""
all_user_keys = user_record['keys']
all_user_keys.remove(key_string)
- new_attributes = {'sshPublicKey':all_user_keys}
+ new_attributes = {'sshPublicKey':all_user_keys}
ret = self.ldap.LdapModifyUser(user_record, new_attributes)
- logger.debug("IOTLABDRIVER DeleteKey %s- "%(ret))
+ logger.debug("IOTLABDRIVER DeleteKey %s- " % (ret))
return ret['bool']
#Only one entry for one user = one slice in iotlab_xp table
#slicerec = dbsession.query(RegRecord).filter_by(hrn = slice_filter).first()
- raw_slicerec = dbsession.query(RegSlice).options(joinedload('reg_researchers')).filter_by(hrn = slice_filter).first()
+ raw_slicerec = dbsession.query(RegSlice).options(joinedload('reg_researchers')).filter_by(hrn=slice_filter).first()
#raw_slicerec = dbsession.query(RegRecord).filter_by(hrn = slice_filter).first()
if raw_slicerec:
#load_reg_researcher
#raw_slicerec.reg_researchers
raw_slicerec = raw_slicerec.__dict__
logger.debug(" IOTLABDRIVER \t get_slice_info slice_filter %s \
- raw_slicerec %s"%(slice_filter, raw_slicerec))
+ raw_slicerec %s" % (slice_filter, raw_slicerec))
slicerec = raw_slicerec
#only one researcher per slice so take the first one
#slicerec['reg_researchers'] = raw_slicerec['reg_researchers']
return None
@staticmethod
- def _sql_get_slice_info_from_user(slice_filter ):
+ def _sql_get_slice_info_from_user(slice_filter):
"""
Get the slice record based on the user recordid by using a joinedload
on the relationship reg_slices_as_researcher. Format the sql record
:rtype:dict or None..
"""
#slicerec = dbsession.query(RegRecord).filter_by(record_id = slice_filter).first()
- raw_slicerec = dbsession.query(RegUser).options(joinedload('reg_slices_as_researcher')).filter_by(record_id = slice_filter).first()
+ raw_slicerec = dbsession.query(RegUser).options(joinedload('reg_slices_as_researcher')).filter_by(record_id=slice_filter).first()
#raw_slicerec = dbsession.query(RegRecord).filter_by(record_id = slice_filter).first()
#Put it in correct order
- user_needed_fields = ['peer_authority', 'hrn', 'last_updated', 'classtype', 'authority', 'gid', 'record_id', 'date_created', 'type', 'email', 'pointer']
- slice_needed_fields = ['peer_authority', 'hrn', 'last_updated', 'classtype', 'authority', 'gid', 'record_id', 'date_created', 'type', 'pointer']
+ user_needed_fields = ['peer_authority', 'hrn', 'last_updated',
+ 'classtype', 'authority', 'gid', 'record_id',
+ 'date_created', 'type', 'email', 'pointer']
+ slice_needed_fields = ['peer_authority', 'hrn', 'last_updated',
+ 'classtype', 'authority', 'gid', 'record_id',
+ 'date_created', 'type', 'pointer']
if raw_slicerec:
#raw_slicerec.reg_slices_as_researcher
raw_slicerec = raw_slicerec.__dict__
slicerec = {}
slicerec = \
- dict([(k, raw_slicerec['reg_slices_as_researcher'][0].__dict__[k]) \
- for k in slice_needed_fields])
- slicerec['reg_researchers'] = dict([(k, raw_slicerec[k]) \
- for k in user_needed_fields])
+ dict([(k, raw_slicerec[
+ 'reg_slices_as_researcher'][0].__dict__[k])
+ for k in slice_needed_fields])
+ slicerec['reg_researchers'] = dict([(k, raw_slicerec[k])
+ for k in user_needed_fields])
#TODO Handle multiple slices for one user SA 10/12/12
#for now only take the first slice record associated to the rec user
##slicerec = raw_slicerec['reg_slices_as_researcher'][0].__dict__
else:
return None
- def _get_slice_records(self, slice_filter = None, \
- slice_filter_type = None):
+ def _get_slice_records(self, slice_filter=None,
+ slice_filter_type=None):
"""
Get the slice record depending on the slice filter and its type.
:param slice_filter: Can be either the slice hrn or the user's record
slicerec = self._sql_get_slice_info(slice_filter)
if slicerec is None:
- return None
+ return None
#return login, None
#Get slice based on user id
#If several jobs for one slice , put the slice record into
# each lease information dict
- for lease in leases_list :
+ for lease in leases_list:
slicerec_dict = {}
logger.debug("IOTLABDRIVER.PY \tGetSlices slice_filter %s \
- \ lease['slice_hrn'] %s"
- % (slice_filter, lease['slice_hrn']))
+ \t lease['slice_hrn'] %s"
+ % (slice_filter, lease['slice_hrn']))
if lease['slice_hrn'] == slice_hrn:
slicerec_dict['slice_hrn'] = lease['slice_hrn']
slicerec_dict['hrn'] = lease['slice_hrn']
slicerec_dict['oar_job_id'] = lease['lease_id']
slicerec_dict.update(
{'list_node_ids':
- {'hostname': lease['reserved_nodes']}
- })
+ {'hostname': lease['reserved_nodes']}})
slicerec_dict.update({'node_ids': lease['reserved_nodes']})
#Update lease dict with the slice record
if fixed_slicerec_dict:
fixed_slicerec_dict['oar_job_id'] = []
fixed_slicerec_dict['oar_job_id'].append(
- slicerec_dict['oar_job_id'])
+ slicerec_dict['oar_job_id'])
slicerec_dict.update(fixed_slicerec_dict)
#slicerec_dict.update({'hrn':\
#str(fixed_slicerec_dict['slice_hrn'])})
return_slicerec_dictlist.append(slicerec_dict)
logger.debug("IOTLABDRIVER.PY \tGetSlices \
- OHOHOHOH %s" %(return_slicerec_dictlist ))
+ OHOHOHOH %s" %(return_slicerec_dictlist))
logger.debug("IOTLABDRIVER.PY \tGetSlices \
slicerec_dict %s return_slicerec_dictlist %s \
lease['reserved_nodes'] \
- %s" %(slicerec_dict, return_slicerec_dictlist, \
- lease['reserved_nodes'] ))
+ %s" % (slicerec_dict, return_slicerec_dictlist,
+ lease['reserved_nodes']))
logger.debug("IOTLABDRIVER.PY \tGetSlices RETURN \
- return_slicerec_dictlist %s" \
- %(return_slicerec_dictlist))
+ return_slicerec_dictlist %s"
+ % (return_slicerec_dictlist))
return return_slicerec_dictlist
#Get all slices from the iotlab sfa database ,
#put them in dict format
#query_slice_list = dbsession.query(RegRecord).all()
- query_slice_list = dbsession.query(RegSlice).options(joinedload('reg_researchers')).all()
+ query_slice_list = \
+ dbsession.query(RegSlice).options(joinedload('reg_researchers')).all()
for record in query_slice_list:
tmp = record.__dict__
#Get all the jobs reserved nodes
leases_list = self.GetReservedNodes()
-
for fixed_slicerec_dict in return_slicerec_dictlist:
slicerec_dict = {}
#Check if the slice belongs to a iotlab user
if fixed_slicerec_dict['peer_authority'] is None:
- owner = fixed_slicerec_dict['hrn'].split(".")[1].split("_")[0]
+ owner = fixed_slicerec_dict['hrn'].split(
+ ".")[1].split("_")[0]
else:
owner = None
for lease in leases_list:
slicerec_dict['oar_job_id'] = lease['lease_id']
#for reserved_node in lease['reserved_nodes']:
- logger.debug("IOTLABDRIVER.PY \tGetSlices lease %s "\
- %(lease ))
-
- slicerec_dict.update({'node_ids':lease['reserved_nodes']})
- slicerec_dict.update({'list_node_ids':{'hostname':lease['reserved_nodes']}})
+ logger.debug("IOTLABDRIVER.PY \tGetSlices lease %s "
+ % (lease))
+
+ slicerec_dict.update({'node_ids':
+ lease['reserved_nodes']})
+ slicerec_dict.update({'list_node_ids':
+ {'hostname':
+ lease['reserved_nodes']}})
slicerec_dict.update(fixed_slicerec_dict)
#slicerec_dict.update({'hrn':\
#str(fixed_slicerec_dict['slice_hrn'])})
+"""
+Implements what a driver should provide for SFA to work.
+"""
from sfa.util.faults import SliverDoesNotExist, UnknownSfaType
from sfa.util.sfalogging import logger
from sfa.storage.alchemy import dbsession
from sfa.util.xrn import Xrn, hrn_to_urn, get_authority
-
-from sfa.iotlab.iotlabpostgres import IotlabDB
-
from sfa.iotlab.iotlabaggregate import IotlabAggregate, iotlab_xrn_to_hostname
from sfa.iotlab.iotlabslices import IotlabSlices
:type config: Config object
"""
- Driver.__init__ (self, config)
+ Driver.__init__(self, config)
self.config = config
-
- # self.db = IotlabDB(config, debug = False)
self.iotlab_api = IotlabTestbedAPI(config)
self.cache = None
- def augment_records_with_testbed_info (self, record_list ):
+ def augment_records_with_testbed_info(self, record_list):
"""
Adds specific testbed info to the records.
:rtype: list
"""
- return self.fill_record_info (record_list)
+ return self.fill_record_info(record_list)
def fill_record_info(self, record_list):
"""
#information is in the Iotlab's DB.
if str(record['type']) == 'slice':
if 'reg_researchers' in record and \
- isinstance(record['reg_researchers'], list) :
+ isinstance(record['reg_researchers'], list):
record['reg_researchers'] = \
record['reg_researchers'][0].__dict__
- record.update({'PI':[record['reg_researchers']['hrn']],
- 'researcher': [record['reg_researchers']['hrn']],
- 'name':record['hrn'],
- 'oar_job_id':[],
- 'node_ids': [],
- 'person_ids': [record['reg_researchers']
- ['record_id']],
- # For client_helper.py compatibility
- 'geni_urn': '',
- # For client_helper.py compatibility
- 'keys': '',
- # For client_helper.py compatibility
- 'key_ids': ''})
+ record.update(
+ {'PI': [record['reg_researchers']['hrn']],
+ 'researcher': [record['reg_researchers']['hrn']],
+ 'name': record['hrn'],
+ 'oar_job_id': [],
+ 'node_ids': [],
+ 'person_ids': [record['reg_researchers']
+ ['record_id']],
+ # For client_helper.py compatibility
+ 'geni_urn': '',
+ # For client_helper.py compatibility
+ 'keys': '',
+ # For client_helper.py compatibility
+ 'key_ids': ''})
#Get iotlab slice record and oar job id if any.
recslice_list = self.iotlab_api.GetSlices(
% (rec['oar_job_id']))
record['node_ids'] = [self.iotlab_api.root_auth +
- hostname for hostname in
- rec['node_ids']]
+ hostname for hostname in
+ rec['node_ids']]
except KeyError:
pass
#Will update PIs and researcher for the slice
recuser = recslice_list[0]['reg_researchers']
- logger.debug( "IOTLABDRIVER.PY \t fill_record_info USER \
+ logger.debug("IOTLABDRIVER.PY \t fill_record_info USER \
recuser %s \r\n \r\n" % (recuser))
recslice = {}
recslice = recslice_list[0]
- recslice.update({'PI':[recuser['hrn']],
- 'researcher': [recuser['hrn']],
- 'name':record['hrn'],
- 'node_ids': [],
- 'oar_job_id': [],
- 'person_ids':[recuser['record_id']]})
+ recslice.update(
+ {'PI': [recuser['hrn']],
+ 'researcher': [recuser['hrn']],
+ 'name': record['hrn'],
+ 'node_ids': [],
+ 'oar_job_id': [],
+ 'person_ids': [recuser['record_id']]})
try:
for rec in recslice_list:
recslice['oar_job_id'].append(rec['oar_job_id'])
except KeyError:
pass
- recslice.update({'type':'slice', \
- 'hrn':recslice_list[0]['hrn']})
-
+ recslice.update({'type': 'slice',
+ 'hrn': recslice_list[0]['hrn']})
#GetPersons takes [] as filters
user_iotlab = self.iotlab_api.GetPersons([record])
-
record.update(user_iotlab[0])
#For client_helper.py compatibility
- record.update( { 'geni_urn':'',
- 'keys':'',
- 'key_ids':'' })
+ record.update(
+ {'geni_urn': '',
+ 'keys': '',
+ 'key_ids': ''})
record_list.append(recslice)
logger.debug("IOTLABDRIVER.PY \t \
fill_record_info ADDING SLICE\
INFO TO USER records %s" % (record_list))
-
except TypeError, error:
- logger.log_exc("IOTLABDRIVER \t fill_record_info EXCEPTION %s"\
- % (error))
+ logger.log_exc("IOTLABDRIVER \t fill_record_info EXCEPTION %s"
+ % (error))
return record_list
-
def sliver_status(self, slice_urn, slice_hrn):
"""
-
Receive a status request for slice named urn/hrn
urn:publicid:IDN+iotlab+nturro_slice hrn iotlab.nturro_slice
shall return a structure as described in
"""
-
#First get the slice with the slice hrn
slice_list = self.iotlab_api.GetSlices(slice_filter=slice_hrn,
slice_filter_type='slice_hrn')
#Used for fetching the user info witch comes along the slice info
one_slice = slice_list[0]
-
#Make a list of all the nodes hostnames in use for this slice
slice_nodes_list = []
- #for single_slice in slice_list:
- #for node in single_slice['node_ids']:
- #slice_nodes_list.append(node['hostname'])
- #for node in one_slice:
- #slice_nodes_list.append(node['hostname'])
slice_nodes_list = one_slice['node_ids']
#Get all the corresponding nodes details
- nodes_all = self.iotlab_api.GetNodes({'hostname':slice_nodes_list},
- ['node_id', 'hostname','site','boot_state'])
- nodeall_byhostname = dict([(one_node['hostname'], one_node) \
- for one_node in nodes_all])
-
-
+ nodes_all = self.iotlab_api.GetNodes(
+ {'hostname': slice_nodes_list},
+ ['node_id', 'hostname', 'site', 'boot_state'])
+ nodeall_byhostname = dict([(one_node['hostname'], one_node)
+ for one_node in nodes_all])
for single_slice in slice_list:
-
#For compatibility
top_level_status = 'empty'
result = {}
result.fromkeys(
- ['geni_urn','geni_error', 'iotlab_login','geni_status',
- 'geni_resources'], None)
+ ['geni_urn', 'geni_error', 'iotlab_login', 'geni_status',
+ 'geni_resources'], None)
# result.fromkeys(\
# ['geni_urn','geni_error', 'pl_login','geni_status',
# 'geni_resources'], None)
# result['pl_login'] = one_slice['reg_researchers'][0].hrn
result['iotlab_login'] = one_slice['user']
logger.debug("Slabdriver - sliver_status Sliver status \
- urn %s hrn %s single_slice %s \r\n " \
- %(slice_urn, slice_hrn, single_slice))
+ urn %s hrn %s single_slice %s \r\n "
+ % (slice_urn, slice_hrn, single_slice))
if 'node_ids' not in single_slice:
#No job in the slice
res = {}
res['iotlab_hostname'] = node_hostname
res['iotlab_boot_state'] = \
- nodeall_byhostname[node_hostname]['boot_state']
+ nodeall_byhostname[node_hostname]['boot_state']
#res['pl_hostname'] = node['hostname']
#res['pl_boot_state'] = \
#nodeall_byhostname[node['hostname']]['boot_state']
#res['pl_last_contact'] = strftime(self.time_format, \
#gmtime(float(timestamp)))
- sliver_id = Xrn(slice_urn, type='slice', \
- id=nodeall_byhostname[node_hostname]['node_id']).urn
+ sliver_id = Xrn(
+ slice_urn, type='slice',
+ id=nodeall_byhostname[node_hostname]['node_id']).urn
res['geni_urn'] = sliver_id
#node_name = node['hostname']
result['geni_status'] = top_level_status
result['geni_resources'] = resources
- logger.debug("IOTLABDRIVER \tsliver_statusresources %s res %s "\
- %(resources,res))
+ logger.debug("IOTLABDRIVER \tsliver_statusresources %s res %s "
+ % (resources, res))
return result
@staticmethod
:rtype: RegUser
"""
- return dbsession.query(RegRecord).filter_by(hrn = hrn).first()
-
+ return dbsession.query(RegRecord).filter_by(hrn=hrn).first()
- def testbed_name (self):
+ def testbed_name(self):
"""
Returns testbed's name.
-
+ :returns: testbed authority name.
:rtype: string
"""
return self.hrn
# 'geni_request_rspec_versions' and 'geni_ad_rspec_versions' are mandatory
- def aggregate_version (self):
+ def aggregate_version(self):
"""
- Returns the testbed's supported rspec advertisement and
- request versions.
-
+ Returns the testbed's supported rspec advertisement and request
+ versions.
+ :returns: rspec versions supported ad a dictionary.
:rtype: dict
"""
if rspec_version.content_type in ['*', 'request']:
request_rspec_versions.append(rspec_version.to_dict())
return {
- 'testbed':self.testbed_name(),
+ 'testbed': self.testbed_name(),
'geni_request_rspec_versions': request_rspec_versions,
- 'geni_ad_rspec_versions': ad_rspec_versions,
- }
-
-
+ 'geni_ad_rspec_versions': ad_rspec_versions}
def _get_requested_leases_list(self, rspec):
"""
if not lease.get('lease_id'):
if get_authority(lease['component_id']) == \
- self.iotlab_api.root_auth:
+ self.iotlab_api.root_auth:
single_requested_lease['hostname'] = \
- iotlab_xrn_to_hostname(\
- lease.get('component_id').strip())
+ iotlab_xrn_to_hostname(\
+ lease.get('component_id').strip())
single_requested_lease['start_time'] = \
- lease.get('start_time')
+ lease.get('start_time')
single_requested_lease['duration'] = lease.get('duration')
#Check the experiment's duration is valid before adding
#the lease to the requested leases list
duration_in_seconds = \
- int(single_requested_lease['duration'])
- if duration_in_seconds >= self.iotlab_api.GetMinExperimentDurationInSec() :
+ int(single_requested_lease['duration'])
+ if duration_in_seconds >= self.iotlab_api.GetMinExperimentDurationInSec():
requested_lease_list.append(single_requested_lease)
return requested_lease_list
if isinstance(lease['hostname'], str):
lease['hostname'] = [lease['hostname']]
-
requested_job_dict[lease['start_time']] = lease
else:
job_lease = requested_job_dict[lease['start_time']]
- if lease['duration'] == job_lease['duration'] :
+ if lease['duration'] == job_lease['duration']:
job_lease['hostname'].append(lease['hostname'])
return requested_job_dict
"""
requested_lease_list = self._get_requested_leases_list(rspec)
- logger.debug("IOTLABDRIVER _process_requested_jobs requested_lease_list \
- %s"%(requested_lease_list))
- job_dict = self._group_leases_by_start_time(requested_lease_list)
+ logger.debug("IOTLABDRIVER _process_requested_jobs \
+ requested_lease_list %s" % (requested_lease_list))
+ job_dict = self._group_leases_by_start_time(requested_lease_list)
logger.debug("IOTLABDRIVER _process_requested_jobs job_dict\
- %s"%(job_dict))
+ %s" % (job_dict))
return job_dict
- def create_sliver (self, slice_urn, slice_hrn, creds, rspec_string, \
- users, options):
+ def create_sliver(self, slice_urn, slice_hrn, creds, rspec_string,
+ users, options):
"""Answer to CreateSliver.
Creates the leases and slivers for the users from the information
if users:
slice_record = users[0].get('slice_record', {})
logger.debug("IOTLABDRIVER.PY \t ===============create_sliver \t\
- creds %s \r\n \r\n users %s" \
- %(creds, users))
- slice_record['user'] = {'keys':users[0]['keys'], \
- 'email':users[0]['email'], \
- 'hrn':slice_record['reg-researchers'][0]}
+ creds %s \r\n \r\n users %s"
+ % (creds, users))
+ slice_record['user'] = {'keys': users[0]['keys'],
+ 'email': users[0]['email'],
+ 'hrn': slice_record['reg-researchers'][0]}
# parse rspec
rspec = RSpec(rspec_string)
logger.debug("IOTLABDRIVER.PY \t create_sliver \trspec.version \
- %s slice_record %s users %s" \
- %(rspec.version,slice_record, users))
-
+ %s slice_record %s users %s"
+ % (rspec.version, slice_record, users))
# ensure site record exists?
# ensure slice record exists
- #Removed options to verify_slice SA 14/08/12
- sfa_slice = slices.verify_slice(slice_hrn, slice_record, peer, \
- sfa_peer)
+ #Removed options in verify_slice SA 14/08/12
+ #Removed peer record in verify_slice SA 18/07/13
+ sfa_slice = slices.verify_slice(slice_hrn, slice_record, sfa_peer)
# ensure person records exists
- #verify_persons returns added persons but since the return value
+ #verify_persons returns added persons but the return value
#is not used
- slices.verify_persons(slice_hrn, sfa_slice, users, peer, \
- sfa_peer, options=options)
+ #Removed peer record and sfa_peer in verify_persons SA 18/07/13
+ slices.verify_persons(slice_hrn, sfa_slice, users, options=options)
#requested_attributes returned by rspec.version.get_slice_attributes()
#unused, removed SA 13/08/12
#rspec.version.get_slice_attributes()
- logger.debug("IOTLABDRIVER.PY create_sliver slice %s " %(sfa_slice))
+ logger.debug("IOTLABDRIVER.PY create_sliver slice %s " % (sfa_slice))
# add/remove slice from nodes
#requested_slivers = [node.get('component_id') \
- #for node in rspec.version.get_nodes_with_slivers()\
- #if node.get('authority_id') is self.iotlab_api.root_auth]
+ #for node in rspec.version.get_nodes_with_slivers()\
+ #if node.get('authority_id') is self.iotlab_api.root_auth]
#l = [ node for node in rspec.version.get_nodes_with_slivers() ]
#logger.debug("SLADRIVER \tcreate_sliver requested_slivers \
#requested_slivers %s listnodes %s" \
#verify_slice_nodes returns nodes, but unused here. Removed SA 13/08/12.
#slices.verify_slice_nodes(sfa_slice, requested_slivers, peer)
-
requested_job_dict = self._process_requested_jobs(rspec)
-
- logger.debug("IOTLABDRIVER.PY \tcreate_sliver requested_job_dict %s "\
- %(requested_job_dict))
+ logger.debug("IOTLABDRIVER.PY \tcreate_sliver requested_job_dict %s "
+ % (requested_job_dict))
#verify_slice_leases returns the leases , but the return value is unused
#here. Removed SA 13/08/12
- slices.verify_slice_leases(sfa_slice, \
- requested_job_dict, peer)
+ slices.verify_slice_leases(sfa_slice,
+ requested_job_dict, peer)
- return aggregate.get_rspec(slice_xrn=slice_urn, \
- login=sfa_slice['login'], version=rspec.version)
+ return aggregate.get_rspec(slice_xrn=slice_urn,
+ login=sfa_slice['login'],
+ version=rspec.version)
-
- def delete_sliver (self, slice_urn, slice_hrn, creds, options):
+ def delete_sliver(self, slice_urn, slice_hrn, creds, options):
"""
Deletes the lease associated with the slice hrn and the credentials
if the slice belongs to iotlab. Answer to DeleteSliver.
+ :param slice_urn: urn of the slice
+ :param slice_hrn: name of the slice
+ :param creds: slice credenials
+ :type slice_urn: string
+ :type slice_hrn: string
+ :type creds: ? unused
+
:returns: 1 if the slice to delete was not found on iotlab,
True if the deletion was successful, False otherwise otherwise.
.. note:: Should really be named delete_leases because iotlab does
not have any slivers, but only deals with leases. However,
- SFA api only have delete_sliver define so far. SA 13.05/2013
+ SFA api only have delete_sliver define so far. SA 13/05/2013
+ .. note:: creds are unused, and are not used either in the dummy driver
+ delete_sliver .
"""
- sfa_slice_list = self.iotlab_api.GetSlices(slice_filter = slice_hrn, \
- slice_filter_type = 'slice_hrn')
+ sfa_slice_list = self.iotlab_api.GetSlices(
+ slice_filter=slice_hrn,
+ slice_filter_type='slice_hrn')
if not sfa_slice_list:
return 1
#Delete all leases in the slice
for sfa_slice in sfa_slice_list:
-
-
- logger.debug("IOTLABDRIVER.PY delete_sliver slice %s" %(sfa_slice))
+ logger.debug("IOTLABDRIVER.PY delete_sliver slice %s" % (sfa_slice))
slices = IotlabSlices(self)
# determine if this is a peer slice
peer = slices.get_peer(slice_hrn)
logger.debug("IOTLABDRIVER.PY delete_sliver peer %s \
- \r\n \t sfa_slice %s " %(peer, sfa_slice))
+ \r\n \t sfa_slice %s " % (peer, sfa_slice))
try:
-
self.iotlab_api.DeleteSliceFromNodes(sfa_slice)
return True
- except :
+ except:
return False
-
def list_resources (self, slice_urn, slice_hrn, creds, options):
"""
nodes in a rspec format. Answer to ListResources.
Caching unused.
+ :param slice_urn: urn of the slice
+ :param slice_hrn: name of the slice
+ :param creds: slice credenials
+ :type slice_urn: string
+ :type slice_hrn: string
+ :type creds: ? unused
:param options: options used when listing resources (list_leases, info,
geni_available)
:returns: rspec string in xml
:rtype: string
+ .. note:: creds are unused
"""
#cached_requested = options.get('cached', True)
version_manager = VersionManager()
# get the rspec's return format from options
rspec_version = \
- version_manager.get_version(options.get('geni_rspec_version'))
+ version_manager.get_version(options.get('geni_rspec_version'))
version_string = "rspec_%s" % (rspec_version)
#panos adding the info option to the caching key (can be improved)
if options.get('info'):
version_string = version_string + "_" + \
- options.get('info', 'default')
+ options.get('info', 'default')
# Adding the list_leases option to the caching key
if options.get('list_leases'):
#panos: passing user-defined options
aggregate = IotlabAggregate(self)
- rspec = aggregate.get_rspec(slice_xrn=slice_urn, \
- version=rspec_version, options=options)
+ rspec = aggregate.get_rspec(slice_xrn=slice_urn,
+ version=rspec_version, options=options)
# cache the result
#if self.cache and not slice_hrn:
return rspec
- def list_slices (self, creds, options):
+ def list_slices(self, creds, options):
"""Answer to ListSlices.
List slices belonging to iotlab, returns slice urns list.
:returns: slice urns list
:rtype: list
+ .. note:: creds are unused
"""
# look in cache first
#if self.cache:
# get data from db
slices = self.iotlab_api.GetSlices()
- logger.debug("IOTLABDRIVER.PY \tlist_slices hrn %s \r\n \r\n" %(slices))
+ logger.debug("IOTLABDRIVER.PY \tlist_slices hrn %s \r\n \r\n"
+ % (slices))
slice_hrns = [iotlab_slice['hrn'] for iotlab_slice in slices]
- slice_urns = [hrn_to_urn(slice_hrn, 'slice') \
- for slice_hrn in slice_hrns]
+ slice_urns = [hrn_to_urn(slice_hrn, 'slice')
+ for slice_hrn in slice_hrns]
# cache the result
#if self.cache:
return slice_urns
- def register (self, sfa_record, hrn, pub_key):
+ def register(self, sfa_record, hrn, pub_key):
"""
Adding new user, slice, node or site should not be handled
by SFA.
:param sfa_record: record provided by the client of the
Register API call.
:type sfa_record: dict
+ :param pub_key: public key of the user
+ :type pub_key: string
+
+ .. note:: DOES NOTHING. Returns -1.
"""
return -1
- def update (self, old_sfa_record, new_sfa_record, hrn, new_key):
+ def update(self, old_sfa_record, new_sfa_record, hrn, new_key):
"""
No site or node record update allowed in Iotlab.
The only modifications authorized here are key deletion/addition
:param old_sfa_record: what is in the db for this hrn
:param new_sfa_record: what was passed to the Update call
+ :param new_key: the new user's public key
+ :param hrn: the user's sfa hrn
+ :type old_sfa_record: dictionary
+ :type new_sfa_record: dictionary
+ :type pub_key: string
+ :type hrn: string
+ TODO: needs review
.. seealso::: update in driver.py.
"""
-
pointer = old_sfa_record['pointer']
old_sfa_record_type = old_sfa_record['type']
# new_key implemented for users only
- if new_key and old_sfa_record_type not in [ 'user' ]:
+ if new_key and old_sfa_record_type not in ['user']:
raise UnknownSfaType(old_sfa_record_type)
-
if old_sfa_record_type == "user":
update_fields = {}
all_fields = new_sfa_record
if key in ['key', 'password']:
update_fields[key] = all_fields[key]
-
if new_key:
# must check this key against the previous one if it exists
persons = self.iotlab_api.GetPersons([old_sfa_record])
#remove all the other keys
for key in keys_dict:
self.iotlab_api.DeleteKey(person, key)
- self.iotlab_api.AddPersonKey(person, \
- {'sshPublicKey': person['pkey']},{'sshPublicKey': new_key} )
- #self.iotlab_api.AddPersonKey(person, {'key_type': 'ssh', \
- #'key': new_key})
+ self.iotlab_api.AddPersonKey(
+ person, {'sshPublicKey': person['pkey']},
+ {'sshPublicKey': new_key})
return True
-
- def remove (self, sfa_record):
+ def remove(self, sfa_record):
"""
Removes users only. Mark the user as disabled in
LDAP. The user and his slice are then deleted from the
db by running an import on the registry.
-
-
:param sfa_record: record is the existing sfa record in the db
:type sfa_record: dict
..warning::As fas as the slice is concerned, here only the leases are
removed from the slice. The slice is record itself is not removed
from the db.
+ TODO: needs review
TODO : REMOVE SLICE FROM THE DB AS WELL? SA 14/05/2013,
#No registering at a given site in Iotlab.
#Once registered to the LDAP, all iotlab sites are
#accesible.
- if person :
+ if person:
#Mark account as disabled in ldap
return self.iotlab_api.DeletePerson(sfa_record)
elif sfa_record_type == 'slice':
- if self.iotlab_api.GetSlices(slice_filter = hrn, \
- slice_filter_type = 'slice_hrn'):
+ if self.iotlab_api.GetSlices(slice_filter=hrn,
+ slice_filter_type='slice_hrn'):
ret = self.iotlab_api.DeleteSlice(sfa_record)
-
-
-
- return True
\ No newline at end of file
+ return True
+"""
+File defining classes to handle the table in the iotlab dedicated database.
+"""
+
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
-from sfa.util.config import Config
+# from sfa.util.config import Config
from sfa.util.sfalogging import logger
from sqlalchemy import Column, Integer, String
from sqlalchemy import Table, MetaData
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.dialects import postgresql
+# from sqlalchemy.dialects import postgresql
from sqlalchemy.exc import NoSuchTableError
#Dict holding the columns names of the table as keys
#and their type, used for creation of the table
slice_table = {'record_id_user': 'integer PRIMARY KEY references X ON DELETE \
-CASCADE ON UPDATE CASCADE','oar_job_id':'integer DEFAULT -1', \
-'record_id_slice':'integer', 'slice_hrn':'text NOT NULL'}
+ CASCADE ON UPDATE CASCADE', 'oar_job_id': 'integer DEFAULT -1',
+ 'record_id_slice': 'integer', 'slice_hrn': 'text NOT NULL'}
#Dict with all the specific iotlab tables
tablenames_dict = {'iotlab_xp': slice_table}
IotlabBase = declarative_base()
-
class IotlabXP (IotlabBase):
- """ SQL alchemy class to manipulate slice_iotlab table in
- iotlab_sfa database.
+ """ SQL alchemy class to manipulate the rows of the slice_iotlab table in
+ iotlab_sfa database. Handles the records representation and creates the
+ table if it does not exist yet.
"""
__tablename__ = 'iotlab_xp'
-
slice_hrn = Column(String)
- job_id = Column(Integer, primary_key = True)
- end_time = Column(Integer, nullable = False)
-
+ job_id = Column(Integer, primary_key=True)
+ end_time = Column(Integer, nullable=False)
- #oar_job_id = Column( Integer,default = -1)
- #node_list = Column(postgresql.ARRAY(String), nullable =True)
-
- def __init__ (self, slice_hrn =None, job_id=None, end_time=None):
+ def __init__(self, slice_hrn=None, job_id=None, end_time=None):
"""
Defines a row of the slice_iotlab table
"""
if slice_hrn:
self.slice_hrn = slice_hrn
- if job_id :
+ if job_id:
self.job_id = job_id
if end_time:
self.end_time = end_time
-
def __repr__(self):
"""Prints the SQLAlchemy record to the format defined
by the function.
"""
result = "<iotlab_xp : slice_hrn = %s , job_id %s end_time = %s" \
- %(self.slice_hrn, self.job_id, self.end_time)
+ % (self.slice_hrn, self.job_id, self.end_time)
result += ">"
return result
-
class IotlabDB(object):
""" SQL Alchemy connection class.
From alchemy.py
"""
# Stores the unique Singleton instance-
_connection_singleton = None
- _dbname = "iotlab_sfa"
-
+ # defines the database name
+ dbname = "iotlab_sfa"
class Singleton:
"""
- Class used with this Python singleton design pattern
+ Class used with this Python singleton design pattern to allow the
+ definition of one single instance of iotlab db session in the whole
+ code. Wherever a conenction to the database is needed, this class
+ returns the same instance every time. Removes the need for global
+ variable throughout the code.
"""
- def __init__(self, config, debug = False):
+ def __init__(self, config, debug=False):
self.iotlab_engine = None
self.iotlab_session = None
+ self.url = None
self.create_iotlab_engine(config, debug)
self.session()
- def create_iotlab_engine(self, config, debug = False):
-
+ def create_iotlab_engine(self, config, debug=False):
+ """Creates the SQLAlchemy engine, which is the starting point
+ for any SQLAlchemy application.
+ :param config: configuration object created by SFA based on the
+ configuration file in /etc
+ :param debug: if set to true, echo and echo pool will be set to true
+ as well. If echo is True, all statements as well as a repr()
+ of their parameter lists to the engines logger, which defaults
+ to sys.stdout. If echo_pool is True, the connection pool will
+ log all checkouts/checkins to the logging stream. A python
+ logger can be used to configure this logging directly but
+ so far it has not been configured. Refer to sql alchemy engine
+ documentation.
+ :type config: Config instance (sfa.util.config)
+ :type debug: bool
+ """
- if debug == True:
+ if debug is True:
l_echo_pool = True
l_echo = True
else:
# this setting is unset, it might be an angle to tweak that if need
# be try a unix socket first
# - omitting the hostname does the trick
- unix_url = "postgresql+psycopg2://%s:%s@:%s/%s"% \
- (config.SFA_DB_USER, config.SFA_DB_PASSWORD, \
- config.SFA_DB_PORT, IotlabDB._dbname)
+ unix_url = "postgresql+psycopg2://%s:%s@:%s/%s" \
+ % (config.SFA_DB_USER, config.SFA_DB_PASSWORD,
+ config.SFA_DB_PORT, IotlabDB.dbname)
# the TCP fallback method
- tcp_url = "postgresql+psycopg2://%s:%s@%s:%s/%s"% \
- (config.SFA_DB_USER, config.SFA_DB_PASSWORD, config.SFA_DB_HOST, \
- config.SFA_DB_PORT, IotlabDB._dbname)
+ tcp_url = "postgresql+psycopg2://%s:%s@%s:%s/%s" \
+ % (config.SFA_DB_USER, config.SFA_DB_PASSWORD,
+ config.SFA_DB_HOST, config.SFA_DB_PORT, IotlabDB.dbname)
- for url in [ unix_url, tcp_url ] :
+ for url in [unix_url, tcp_url]:
try:
- self.iotlab_engine = create_engine (url, echo_pool =
- l_echo_pool, echo = l_echo)
+ self.iotlab_engine = create_engine(
+ url, echo_pool=l_echo_pool, echo=l_echo)
self.check()
self.url = url
return
pass
self.iotlab_engine = None
+ raise Exception("Could not connect to database")
- raise Exception, "Could not connect to database"
-
- def check (self):
+ def check(self):
""" Check if a table exists by trying a selection
on the table.
"""
- self.iotlab_engine.execute ("select 1").scalar()
+ self.iotlab_engine.execute("select 1").scalar()
- def session (self):
+ def session(self):
"""
Creates a SQLalchemy session. Once the session object is created
it should be used throughout the code for all the operations on
"""
if self.iotlab_session is None:
Session = sessionmaker()
- self.iotlab_session = Session(bind = self.iotlab_engine)
+ self.iotlab_session = Session(bind=self.iotlab_engine)
return self.iotlab_session
def close_session(self):
def update_jobs_in_iotlabdb(self, job_oar_list, jobs_psql):
""" Cleans the iotlab db by deleting expired and cancelled jobs.
+
Compares the list of job ids given by OAR with the job ids that
are already in the database, deletes the jobs that are no longer in
the OAR job id list.
+
:param job_oar_list: list of job ids coming from OAR
:type job_oar_list: list
- :param job_psql: list of job ids cfrom the database.
+ :param job_psql: list of job ids from the database.
type job_psql: list
+
"""
#Turn the list into a set
set_jobs_psql = set(jobs_psql)
kept_jobs = set(job_oar_list).intersection(set_jobs_psql)
- logger.debug ( "\r\n \t\ update_jobs_in_iotlabdb jobs_psql %s \r\n \t \
- job_oar_list %s kept_jobs %s "%(set_jobs_psql, job_oar_list, kept_jobs))
+ logger.debug("\r\n \t update_jobs_in_iotlabdb jobs_psql %s \r\n \
+ job_oar_list %s kept_jobs %s "
+ % (set_jobs_psql, job_oar_list, kept_jobs))
deleted_jobs = set_jobs_psql.difference(kept_jobs)
deleted_jobs = list(deleted_jobs)
if len(deleted_jobs) > 0:
self.iotlab_session.query(IotlabXP).filter(IotlabXP.job_id.in_(deleted_jobs)).delete(synchronize_session='fetch')
self.iotlab_session.commit()
-
return
-
-
- def __init__(self, config, debug = False):
+ def __init__(self, config, debug=False):
self.sl_base = IotlabBase
# Check whether we already have an instance
# Store instance reference as the only member in the handle
self._EventHandler_singleton = IotlabDB._connection_singleton
-
-
def __getattr__(self, aAttr):
"""
Delegate access to implementation.
- :param attr: Attribute wanted.
- :return: Attribute
+ :param aAttr: Attribute wanted.
+ :returns: Attribute
"""
return getattr(self._connection_singleton, aAttr)
# """
# return setattr(self._connection_singleton, aAttr, aValue)
-
-
-
-
def exists(self, tablename):
"""
Checks if the table specified as tablename exists.
+ :param tablename: name of the table in the db that has to be checked.
+ :type tablename: string
+ :returns: True if the table exists, False otherwise.
+ :rtype: bool
"""
-
+ metadata = MetaData(bind=self.iotlab_engine)
try:
- metadata = MetaData (bind=self.iotlab_engine)
- table = Table (tablename, metadata, autoload=True)
+ table = Table(tablename, metadata, autoload=True)
return True
except NoSuchTableError:
- logger.log_exc("SLABPOSTGRES tablename %s does not exists" \
- %(tablename))
+ logger.log_exc("SLABPOSTGRES tablename %s does not exist"
+ % (tablename))
return False
-
def createtable(self):
"""
Creates all the table sof the engine.
"""
- logger.debug("SLABPOSTGRES createtable IotlabBase.metadata.sorted_tables \
- %s \r\n engine %s" %(IotlabBase.metadata.sorted_tables , self.iotlab_engine))
+ logger.debug("SLABPOSTGRES createtable \
+ IotlabBase.metadata.sorted_tables %s \r\n engine %s"
+ % (IotlabBase.metadata.sorted_tables, self.iotlab_engine))
IotlabBase.metadata.create_all(self.iotlab_engine)
return
-
-
-
-# iotlab_alchemy = IotlabDB(Config())
-# iotlab_engine = iotlab_alchemy.iotlab_engine
-# iotlab_dbsession = iotlab_alchemy.session()
+"""
+This file defines the IotlabSlices class by which all the slice checkings
+upon lease creation are done.
+"""
from sfa.util.xrn import get_authority, urn_to_hrn
from sfa.util.sfalogging import logger
MAXINT = 2L**31-1
-class IotlabSlices:
+class IotlabSlices:
+ """
+ This class is responsible for checking the slice when creating a
+ lease or a sliver. Those checks include verifying that the user is valid,
+ that the slice is known from the testbed or from our peers, that the list
+ of nodes involved has not changed (in this case the lease is modified
+ accordingly).
+ """
rspec_to_slice_tag = {'max_rate': 'net_max_rate'}
-
def __init__(self, driver):
"""
Get the reference to the driver here.
site_authority = get_authority(slice_authority).lower()
# get this site's authority (sfa root authority or sub authority)
- logger.debug("IOTLABSLICES \ get_peer slice_authority %s \
+ logger.debug("IOTLABSLICES \t get_peer slice_authority %s \
site_authority %s hrn %s"
- % (slice_authority, site_authority, hrn))
+ % (slice_authority, site_authority, hrn))
# check if we are already peered with this site_authority
#if so find the peer record
peers = self.driver.iotlab_api.GetPeers(peer_filter=site_authority)
for peer_record in peers:
-
if site_authority == peer_record.hrn:
peer = peer_record
- logger.debug(" IOTLABSLICES \tget_peer peer %s " %(peer))
+ logger.debug(" IOTLABSLICES \tget_peer peer %s " % (peer))
return peer
def get_sfa_peer(self, xrn):
return sfa_peer
-
def verify_slice_leases(self, sfa_slice, requested_jobs_dict, peer):
"""
Compare requested leases with the leases already scheduled/
:param sfa_slice: sfa slice record
:param requested_jobs_dict: dictionary of requested leases
- :param peer: sfa peer
+ :param peer: sfa peer record
:type sfa_slice: dict
:type requested_jobs_dict: dict
- :type peer:
+ :type peer: dict
:returns: leases list of dictionary
:rtype: list
:returns: list requested nodes hostnames
:rtype: list
+ .. warning:: UNUSED SQA 24/07/13
.. seealso:: DeleteSliceFromNodes
.. todo:: check what to do with the peer? Can not remove peer nodes from
slice here. Anyway, in this case, the peer should have gotten the
#try:
##if peer:
#person = persondict[user['email']]
- #self.driver.iotlab_api.UnBindObjectFromPeer('person',
- #person['person_id'], peer['shortname'])
- ret = self.driver.iotlab_api.AddPersonKey(\
- user['email'], key)
+ #self.driver.iotlab_api.UnBindObjectFromPeer(
+ # 'person',person['person_id'],
+ # peer['shortname'])
+ ret = self.driver.iotlab_api.AddPersonKey(
+ user['email'], key)
#if peer:
#key_index = user_keys.index(key['key'])
#remote_key_id = user['key_ids'][key_index]
# remove old keys (only if we are not appending)
append = options.get('append', True)
- if append == False:
+ if append is False:
removed_keys = set(existing_keys).difference(requested_keys)
for key in removed_keys:
#if peer: