from sfa.util.xrn import get_authority
import ldap
from sfa.util.config import Config
-from sfa.trust.hierarchy import Hierarchy
+
import ldap.modlist as modlist
from sfa.util.sfalogging import logger
def __init__(self):
logger.setLevelDebug()
#SFA related config
- self.senslabauth = Hierarchy()
+
config = Config()
self.authname = config.SFA_REGISTRY_ROOT_AUTH
self.lengthPassword = 8
self.baseDN = self.conn.ldapPeopleDN
- #authinfo=self.senslabauth.get_auth_info(self.authname)
+
- self.charsPassword = [ '!','$','(',')','*','+',',','-','.', \
- '0','1','2','3','4','5','6','7','8','9', \
- 'A','B','C','D','E','F','G','H','I','J', \
- 'K','L','M','N','O','P','Q','R','S','T', \
- 'U','V','W','X','Y','Z','_','a','b','c', \
- 'd','e','f','g','h','i','j','k','l','m', \
- 'n','o','p','q','r','s','t','u','v','w', \
- 'x','y','z','\'']
+ self.charsPassword = [ '!', '$', '(',')', '*', '+', ',', '-', '.', \
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', \
+ '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', \
+ 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', \
+ 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', \
+ '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', \
+ 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p' ,'q', \
+ 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', \
+ '\'']
self.ldapShell = '/bin/bash'
lower_last_name = None
#Assume there is first name and last name in email
#if there is a separator
- separator_list = ['.','_','-']
+ separator_list = ['.', '_', '-']
for sep in separator_list:
if sep in email:
mail = email.split(sep)
#Otherwise just take the part before the @ as the
#lower_first_name and lower_last_name
if lower_first_name is None:
- lower_first_name = email
- lower_last_name = email
+ lower_first_name = email
+ lower_last_name = email
length_last_name = len(lower_last_name)
login_max_length = 8
"""
return 'A REMPLIR '
-
- def make_ldap_filters_from_record(self, record=None):
+
+ @staticmethod
+ def make_ldap_filters_from_record( record=None):
"""TODO Handle OR filtering in the ldap query when
dealing with a list of records instead of doing a for loop in GetPersons
Helper function to make LDAP filter requests out of SFA records.
attrs['cn'] = attrs['givenName'] + ' ' + attrs['sn']
attrs['gecos'] = attrs['givenName'] + ' ' + attrs['sn']
- except:
+ except KeyError:
attrs['givenName'] = attrs['uid']
attrs['sn'] = attrs['uid']
attrs['cn'] = attrs['uid']
try:
ldif = modlist.addModlist(user_ldap_attrs)
logger.debug("LDAPapi.py add attrs %s \r\n ldif %s"\
- %(user_ldap_attrs,ldif) )
- self.conn.ldapserv.add_s(dn,ldif)
+ %(user_ldap_attrs, ldif) )
+ self.conn.ldapserv.add_s(dn, ldif)
logger.info("Adding user %s login %s in LDAP" \
- %(user_ldap_attrs['cn'] ,user_ldap_attrs['uid']))
+ %(user_ldap_attrs['cn'] , user_ldap_attrs['uid']))
except ldap.LDAPError, error:
Deletes a SFA person in LDAP, based on the user's hrn.
"""
#Find uid of the person
- person = self.LdapFindUser(record_filter,[])
+ person = self.LdapFindUser(record_filter, [])
logger.debug("LDAPapi.py \t LdapDeleteUser record %s person %s" \
%(record_filter, person))
if person:
- dn = 'uid=' + person['uid'] + "," +self.baseDN
+ dn = 'uid=' + person['uid'] + "," + self.baseDN
else:
return {'bool': False}
def LdapModify(self, dn, old_attributes_dict, new_attributes_dict):
""" Modifies a LDAP entry """
- ldif = modlist.modifyModlist(old_attributes_dict,new_attributes_dict)
+ ldif = modlist.modifyModlist(old_attributes_dict, new_attributes_dict)
# Connect and bind/authenticate
result = self.conn.connect()
if (result['bool']):
try:
- self.conn.ldapserv.modify_s(dn,ldif)
+ self.conn.ldapserv.modify_s(dn, ldif)
self.conn.close()
return {'bool' : True }
except ldap.LDAPError, error:
#Get all the attributes of the user_uid_login
#person = self.LdapFindUser(record_filter,[])
req_ldap = self.make_ldap_filters_from_record(user_record)
- person_list = self.LdapSearch(req_ldap,[])
+ person_list = self.LdapSearch(req_ldap, [])
logger.debug("LDAPapi.py \t LdapModifyUser person_list : %s" \
%(person_list))
if person_list and len(person_list) > 1 :
# The dn of our existing entry/object
#One result only from ldapSearch
person = person_list[0][1]
- dn = 'uid=' + person['uid'][0] + "," +self.baseDN
+ dn = 'uid=' + person['uid'][0] + "," + self.baseDN
if new_attributes_dict:
old = {}
old[k] = person[k]
logger.debug(" LDAPapi.py \t LdapModifyUser new_attributes %s"\
%( new_attributes_dict))
- result = self.LdapModify(dn, old,new_attributes_dict)
+ result = self.LdapModify(dn, old, new_attributes_dict)
return result
else:
logger.error("LDAP \t LdapModifyUser No new attributes given. ")
return ret
- def LdapResetPassword(self,record):
+ def LdapResetPassword(self, record):
"""
Resets password for the user whose record is the parameter and changes
the corresponding entry in the LDAP.
return_fields_list = []
if expected_fields == None :
- return_fields_list = ['mail','givenName', 'sn', 'uid', \
+ return_fields_list = ['mail', 'givenName', 'sn', 'uid', \
'sshPublicKey', 'shadowExpire']
else :
return_fields_list = expected_fields
#Get all the results matching the search from ldap in one
#shot (1 value)
result_type, result_data = \
- self.conn.ldapserv.result(msg_id,1)
+ self.conn.ldapserv.result(msg_id, 1)
self.conn.close()
return result_data
- except ldap.LDAPError,error :
+ except ldap.LDAPError, error :
logger.log_exc("LDAP LdapSearch Error %s" %error)
return []
req_ldap = self.make_ldap_filters_from_record(custom_record)
return_fields_list = []
if expected_fields == None :
- return_fields_list = ['mail','givenName', 'sn', 'uid', \
+ return_fields_list = ['mail', 'givenName', 'sn', 'uid', \
'sshPublicKey']
else :
return_fields_list = expected_fields
logger.debug(" LDAP.py LdapFindUser ldapentry name : %s " \
%(ldapentry[1]['uid'][0]))
tmpname = ldapentry[1]['uid'][0]
- hrn=self.authname+"."+ tmpname
+ hrn = self.authname + "." + tmpname
tmpemail = ldapentry[1]['mail'][0]
if ldapentry[1]['mail'][0] == "unknown":
parent_hrn = get_authority(hrn)
- parent_auth_info = self.senslabauth.get_auth_info(parent_hrn)
+
try:
results.append( {
'type': 'user',
'pointer' : -1,
'hrn': hrn,
} )
- except KeyError,error:
+ except KeyError, error:
logger.log_exc("LDAPapi.PY \t LdapFindUser EXCEPTION %s" \
%(error))
return
#import sys
-from httplib import HTTPConnection, HTTPException
+from httplib import HTTPConnection, HTTPException, NotConnected
import json
#import datetime
#from time import gmtime, strftime
+class JsonPage:
+ """Class used to manipulate jsopn pages given by OAR."""
+ def __init__(self):
+ #All are boolean variables
+ self.concatenate = False
+ #Indicates end of data, no more pages to be loaded.
+ self.end = False
+ self.next_page = False
+ #Next query address
+ self.next_offset = None
+ #Json page
+ self.raw_json = None
+ def FindNextPage(self):
+ """ Gets next data page from OAR when the query's results
+ are too big to be transmitted in a single page.
+ Uses the "links' item in the json returned to check if
+ an additionnal page has to be loaded.
+ Returns : next page , next offset query
+ """
+ if "links" in self.raw_json:
+ for page in self.raw_json['links']:
+ if page['rel'] == 'next':
+ self.concatenate = True
+ self.next_page = True
+ self.next_offset = "?" + page['href'].split("?")[1]
+ print>>sys.stderr, "\r\n \t FindNextPage NEXT LINK"
+ return
+
+ if self.concatenate :
+ self.end = True
+ self.next_page = False
+ self.next_offset = None
+ return
+
+ #Otherwise, no next page and no concatenate, must be a single page
+ #Concatenate the single page and get out of here.
+ else:
+ self.next_page = False
+ self.concatenate = True
+ self.next_offset = None
+ return
+
+ @staticmethod
+ def ConcatenateJsonPages(saved_json_list):
+ #reset items list
+
+ tmp = {}
+ tmp['items'] = []
+
+ for page in saved_json_list:
+ tmp['items'].extend(page['items'])
+ return tmp
+
+
+ def ResetNextPage(self):
+ self.next_page = True
+ self.next_offset = None
+ self.concatenate = False
+ self.end = False
+
+
class OARrestapi:
def __init__(self, config_file = '/etc/sfa/oar_config.py'):
self.oarserver = {}
self.parser = OARGETParser(self)
- def GETRequestToOARRestAPI(self, request, strval=None ,next_page=None, username = None ):
+ def GETRequestToOARRestAPI(self, request, strval=None, next_page=None, username = None ):
self.oarserver['uri'] = \
OARGETParser.OARrequests_uri_dict[request]['uri']
#Get job details with username
if 'owner' in OARGETParser.OARrequests_uri_dict[request] and username:
- self.oarserver['uri'] += OARGETParser.OARrequests_uri_dict[request]['owner'] + username
+ self.oarserver['uri'] += OARGETParser.OARrequests_uri_dict[request]['owner'] + username
headers = {}
data = json.dumps({})
logger.debug("OARrestapi \tGETRequestToOARRestAPI %s" %(request))
if username:
headers['X-REMOTE_IDENT'] = username
- print>>sys.stderr, " \r\n \t OARrestapi \tGETRequestToOARRestAPI %s" %( self.oarserver['uri'])
logger.debug("OARrestapi: \t GETRequestToOARRestAPI \
self.oarserver['uri'] %s strval %s" \
%(self.oarserver['uri'], strval))
conn.request("GET", self.oarserver['uri'], data, headers)
resp = ( conn.getresponse()).read()
conn.close()
+
except HTTPException, error :
logger.log_exc("GET_OAR_SRVR : Problem with OAR server : %s " \
%(error))
self.interface_hrn = self.config.SFA_INTERFACE_HRN
self.timezone_json_dict = {
'timezone': None, 'api_timestamp': None, }
- self.jobs_json_dict = {
- 'total' : None, 'links' : [],\
- 'offset':None , 'items' : [], }
- self.jobs_table_json_dict = self.jobs_json_dict
- self.jobs_details_json_dict = self.jobs_json_dict
+ #self.jobs_json_dict = {
+ #'total' : None, 'links' : [],\
+ #'offset':None , 'items' : [], }
+ #self.jobs_table_json_dict = self.jobs_json_dict
+ #self.jobs_details_json_dict = self.jobs_json_dict
self.server = srv
self.node_dictlist = {}
- self.raw_json = None
+
+ self.json_page = JsonPage()
+
self.site_dict = {}
self.SendRequest("GET_version")
def ParseVersion(self) :
- #print self.raw_json
- #print >>sys.stderr, self.raw_json
- if 'oar_version' in self.raw_json :
+ #print self.json_page.raw_json
+ #print >>sys.stderr, self.json_page.raw_json
+ if 'oar_version' in self.json_page.raw_json :
self.version_json_dict.update(api_version = \
- self.raw_json['api_version'],
- apilib_version = self.raw_json['apilib_version'],
- api_timezone = self.raw_json['api_timezone'],
- api_timestamp = self.raw_json['api_timestamp'],
- oar_version = self.raw_json['oar_version'] )
+ self.json_page.raw_json['api_version'],
+ apilib_version = self.json_page.raw_json['apilib_version'],
+ api_timezone = self.json_page.raw_json['api_timezone'],
+ api_timestamp = self.json_page.raw_json['api_timestamp'],
+ oar_version = self.json_page.raw_json['oar_version'] )
else :
- self.version_json_dict.update(api_version = self.raw_json['api'] ,
- apilib_version = self.raw_json['apilib'],
- api_timezone = self.raw_json['api_timezone'],
- api_timestamp = self.raw_json['api_timestamp'],
- oar_version = self.raw_json['oar'] )
+ self.version_json_dict.update(api_version = \
+ self.json_page.raw_json['api'] ,
+ apilib_version = self.json_page.raw_json['apilib'],
+ api_timezone = self.json_page.raw_json['api_timezone'],
+ api_timestamp = self.json_page.raw_json['api_timestamp'],
+ oar_version = self.json_page.raw_json['oar'] )
print self.version_json_dict['apilib_version']
def ParseTimezone(self) :
- api_timestamp = self.raw_json['api_timestamp']
- api_tz = self.raw_json['timezone']
+ api_timestamp = self.json_page.raw_json['api_timestamp']
+ api_tz = self.json_page.raw_json['timezone']
return api_timestamp, api_tz
def ParseJobs(self) :
self.jobs_list = []
print " ParseJobs "
- return self.raw_json
+ return self.json_page.raw_json
def ParseJobsTable(self) :
print "ParseJobsTable"
# currently, this function is not used a lot,
#so i have no idea what be usefull to parse,
#returning the full json. NT
- #logger.debug("ParseJobsDetails %s " %(self.raw_json))
- return self.raw_json
+ #logger.debug("ParseJobsDetails %s " %(self.json_page.raw_json))
+ return self.json_page.raw_json
def ParseJobsIds(self):
'name', 'wanted_resources','queue','stderr_file','command']
- job_info = self.raw_json
- #logger.debug("OARESTAPI ParseJobsIds %s" %(self.raw_json))
+ job_info = self.json_page.raw_json
+ #logger.debug("OARESTAPI ParseJobsIds %s" %(self.json_page.raw_json))
values = []
try:
for k in job_resources:
"""
job_resources = []
- for resource in self.raw_json['items']:
+ for resource in self.json_page.raw_json['items']:
job_resources.append(resource['id'])
- #logger.debug("OARESTAPI \tParseJobsIdResources %s" %(self.raw_json))
+ #logger.debug("OARESTAPI \tParseJobsIdResources %s" %(self.json_page.raw_json))
return job_resources
def ParseResources(self) :
#logger.debug("OARESTAPI \tParseResources " )
#resources are listed inside the 'items' list from the json
- self.raw_json = self.raw_json['items']
+ self.json_page.raw_json = self.json_page.raw_json['items']
self.ParseNodes()
def ParseReservedNodes(self):
#resources are listed inside the 'items' list from the json
reservation_list = []
- print "ParseReservedNodes_%s" %(self.raw_json['items'])
job = {}
#Parse resources info
- for json_element in self.raw_json['items']:
+ for json_element in self.json_page.raw_json['items']:
#In case it is a real reservation (not asap case)
if json_element['scheduled_start']:
job['t_from'] = json_element['scheduled_start']
job['t_until'] = int(json_element['scheduled_start']) + \
- int(json_element['walltime'])
+ int(json_element['walltime'])
#Get resources id list for the job
job['resource_ids'] = \
- [ node_dict['id'] for node_dict in json_element['resources'] ]
+ [ node_dict['id'] for node_dict in json_element['resources']]
else:
job['t_from'] = "As soon as possible"
job['t_until'] = "As soon as possible"
logger.debug("OARESTAPI \tParseRunningJobs__________________________ ")
#resources are listed inside the 'items' list from the json
nodes = []
- for job in self.raw_json['items']:
+ for job in self.json_page.raw_json['items']:
for node in job['nodes']:
nodes.append(node['network_address'])
return nodes
"""
logger.debug("OARRESTAPI ParseResourcesFull________________________ ")
- #print self.raw_json[1]
+ #print self.json_page.raw_json[1]
#resources are listed inside the 'items' list from the json
if self.version_json_dict['apilib_version'] != "0.2.10" :
- self.raw_json = self.raw_json['items']
+ self.json_page.raw_json = self.json_page.raw_json['items']
self.ParseNodes()
self.ParseSites()
return self.node_dictlist
"""
if self.version_json_dict['apilib_version'] != "0.2.10" :
- self.raw_json = self.raw_json['items']
+ self.json_page.raw_json = self.json_page.raw_json['items']
self.ParseNodes()
self.ParseSites()
return self.site_dict
keys = self.resources_fulljson_dict.keys()
keys.sort()
- for dictline in self.raw_json:
+ for dictline in self.json_page.raw_json:
node_id = None
# dictionary is empty and/or a new node has to be inserted
node_id = self.resources_fulljson_dict['network_address'](\
#Turn the property tuple list (=dict value) into a dictionary
self.node_dictlist[node_id] = dict(self.node_dictlist[node_id])
node_id = None
-
- def slab_hostname_to_hrn(self, root_auth, hostname):
+
+ @staticmethod
+ def slab_hostname_to_hrn( root_auth, hostname):
return root_auth + '.'+ hostname
}
- def FindNextPage(self):
- if "links" in self.raw_json:
- for page in self.raw_json['links']:
- if page['rel'] == 'next':
- self.concatenate = True
- print>>sys.stderr, " \r\n \t\t FindNextPage self.concatenate %s" %(self.concatenate )
- return True, "?"+page['href'].split("?")[1]
- if self.concatenate :
- self.end = True
- print>>sys.stderr, " \r\n \t\t END FindNextPage self.concatenate %s" %(self.concatenate )
- return False, None
-
- def ConcatenateJsonPages (self, saved_json_list):
- #reset items list
-
- tmp = {}
- tmp['items'] = []
- print >>sys.stderr, " \r\n ConcatenateJsonPages saved_json_list len ", len(saved_json_list)
- for page in saved_json_list:
- #for node in page['items']:
- #self.raw_json['items'].append(node)
- print>>sys.stderr, " \r\n ConcatenateJsonPages page['items']len ", len(page['items'])
- tmp['items'].extend(page['items'])
- #print>>sys.stderr, " \r\n ConcatenateJsonPages len ", len(self.raw_json['items'])
- #print>>sys.stderr, " \r\n ConcatenateJsonPages self.raw_json['items']", self.raw_json['items']
- return tmp
+
def SendRequest(self, request, strval = None , username = None):
""" Connects to OAR , sends the valid GET requests and uses
the appropriate json parsing functions.
"""
- self.raw_json = None
- next_page = True
- next_offset = None
save_json = None
- self.concatenate = False
- self.end = False
- a = 0
+
+ self.json_page.ResetNextPage()
save_json = []
- self.raw_json_list = []
+
if request in self.OARrequests_uri_dict :
- while next_page:
- self.raw_json = self.server.GETRequestToOARRestAPI(request, \
- strval, \
- next_offset, \
- username)
-
- next_page , next_offset = self.FindNextPage()
- if self.concatenate:
- #self.raw_json_list.append(self.raw_json)
- save_json.append(self.raw_json)
- if self.concatenate and self.end :
- #self.raw_json = self.ConcatenateJsonPages(self.raw_json_list)
- self.raw_json = self.ConcatenateJsonPages(save_json)
+ while self.json_page.next_page:
+ self.json_page.raw_json = self.server.GETRequestToOARRestAPI(\
+ request, \
+ strval, \
+ self.json_page.next_offset, \
+ username)
+ self.json_page.FindNextPage()
+ if self.json_page.concatenate:
+ save_json.append(self.json_page.raw_json)
+
+ if self.json_page.concatenate and self.json_page.end :
+ self.json_page.raw_json = \
+ self.json_page.ConcatenateJsonPages(save_json)
return self.OARrequests_uri_dict[request]['parse_func'](self)
else: