From 6d8f4a104dbe6e8925dec1f2a89fb3e152491d4a Mon Sep 17 00:00:00 2001 From: Sandrine Avakian Date: Thu, 8 Dec 2011 15:57:45 +0100 Subject: [PATCH] First debugging steps for creating a slice. --- sfa/client/client_helper.py | 20 +- sfa/client/sfi.py | 11 +- sfa/managers/aggregate_manager_slab.py | 17 +- sfa/managers/driver.py | 3 +- sfa/managers/registry_manager_slab.py | 1 + sfa/methods/CreateSliver.py | 7 +- sfa/plc/slices.py | 26 +- sfa/senslab/LDAPapi.py | 8 + sfa/senslab/OARrestapi.py | 25 +- sfa/senslab/SenslabImportUsers.py | 2 +- sfa/senslab/parsing.py | 74 ++- sfa/senslab/slab-import.py | 5 +- sfa/senslab/slabdriver.py | 55 +- sfa/senslab/slices.py | 662 +++++++++++++++++++++++++ sfa/trust/auth.py | 13 +- sfa/trust/credential.py | 25 +- sfa/util/xrn.py | 10 +- 17 files changed, 877 insertions(+), 87 deletions(-) create mode 100644 sfa/senslab/slices.py diff --git a/sfa/client/client_helper.py b/sfa/client/client_helper.py index 8f9876ca..b33911fe 100644 --- a/sfa/client/client_helper.py +++ b/sfa/client/client_helper.py @@ -1,4 +1,4 @@ - +import sys def pg_users_arg(records): users = [] for record in records: @@ -11,19 +11,25 @@ def pg_users_arg(records): def sfa_users_arg(records, slice_record): users = [] + print>>sys.stderr, " \r\n \r\n \t CLIENT_HELPER.PY sfa_users_arg slice_record %s \r\n records %s"%(slice_record,records) for record in records: if record['type'] != 'user': continue - user = {'urn': record['geni_urn'], # - 'keys': record['keys'], - 'email': record['email'], # needed for MyPLC - 'person_id': record['person_id'], # needed for MyPLC + user = {#'urn': record['geni_urn'], + #'keys': record['keys'], + #'email': record['email'], # needed for MyPLC + 'person_id': record['record_id'], + 'hrn': record['hrn'], + 'type': record['type'], + 'authority' : record['authority'], + 'gid' : record['gid'], #'first_name': record['first_name'], # needed for MyPLC #'last_name': record['last_name'], # needed for MyPLC 'slice_record': slice_record, # needed for legacy refresh peer - 'key_ids': record['key_ids'] # needed for legacy refresh peer + #'key_ids': record['key_ids'] # needed for legacy refresh peer } - users.append(user) + users.append(user) + print>>sys.stderr, " \r\n \r\n \t CLIENT_HELPER.PY sfa_users_arg user %s",user return users def sfa_to_pg_users_arg(users): diff --git a/sfa/client/sfi.py b/sfa/client/sfi.py index 845b2b13..b0fa53ca 100755 --- a/sfa/client/sfi.py +++ b/sfa/client/sfi.py @@ -543,6 +543,7 @@ class Sfi: gid = self.get_cached_gid(gidfile) if not gid: user_cred = self.get_user_cred() + print>>sys.stderr, " \r\n \t SFI.PY _get_gid " records = self.registry.Resolve(hrn, user_cred.save_to_string(save_parents=True)) if not records: raise RecordNotFound(args[0]) @@ -1003,7 +1004,9 @@ class Sfi: delegated_cred = None else: # delegate the cred to the callers root authority - delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)+'.slicemanager') + print>>sys.stderr, " \r\n \r\n \t SFI.PY get_authority(self.authority+'.slicemanager') %s self.authority %s slice_cred \t %s " %(get_authority(self.authority+'.slicemanager'), self.authority, slice_cred) + delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority+'.slicemanager')) + #delegated_cred = self.delegate_cred(slice_cred, get_authority(slice_hrn)) #creds.append(delegated_cred) @@ -1016,11 +1019,13 @@ class Sfi: # keys: [, ] # }] users = [] + print>>sys.stderr, " \r\n SFI.PY create slice_urn ", slice_urn slice_records = self.registry.Resolve(slice_urn, [user_cred.save_to_string(save_parents=True)]) if slice_records and 'researcher' in slice_records[0] and slice_records[0]['researcher']!=[]: slice_record = slice_records[0] user_hrns = slice_record['researcher'] user_urns = [hrn_to_urn(hrn, 'user') for hrn in user_hrns] + print>>sys.stderr, " \r\n SFI.PY create user_urns %s \r\n \t slice_records %s"%( user_urns,slice_records) user_records = self.registry.Resolve(user_urns, [user_cred.save_to_string(save_parents=True)]) if 'sfa' not in server_version: @@ -1030,6 +1035,7 @@ class Sfi: rspec = RSpecConverter.to_pg_rspec(rspec.toxml(), content_type='request') creds = [slice_cred] else: + print >>sys.stderr, "\r\n \r\n \r\n WOOOOOO" users = sfa_users_arg(user_records, slice_record) creds = [slice_cred] if delegated_cred: @@ -1037,7 +1043,8 @@ class Sfi: call_args = [slice_urn, creds, rspec, users] if self.server_supports_options_arg(server): options = {'call_id': unique_call_id()} - call_args.append(options) + call_args.append(options) + print>>sys.stderr, " \r\n SFI.PY create user" ,users result = server.CreateSliver(*call_args) value = ReturnValue.get_value(result) if opts.file is None: diff --git a/sfa/managers/aggregate_manager_slab.py b/sfa/managers/aggregate_manager_slab.py index 7090df4a..8b1d6d44 100644 --- a/sfa/managers/aggregate_manager_slab.py +++ b/sfa/managers/aggregate_manager_slab.py @@ -19,7 +19,7 @@ from sfa.server.sfaapi import SfaApi from sfa.senslab.OARrspec import OARrspec import sfa.plc.peers as peers #from sfa.plc.aggregate import Aggregate -from sfa.plc.slices import Slices +from sfa.senslab.slices import Slices class AggregateManager: @@ -166,8 +166,8 @@ class AggregateManager: """ call_id = options.get('call_id') if Callids().already_handled(call_id): return "" - - aggregate = Aggregate(api) + aggregate = OARrspec(api) + #aggregate = Aggregate(api) slices = Slices(api) (hrn, _) = urn_to_hrn(slice_xrn) peer = slices.get_peer(hrn) @@ -175,6 +175,7 @@ class AggregateManager: slice_record=None if users: slice_record = users[0].get('slice_record', {}) + print >>sys.stderr, " \r\n \t AGGREGATESLAB.PY Slice slice_record : ", slice_record # parse rspec rspec = RSpec(rspec_string) @@ -183,18 +184,22 @@ class AggregateManager: # ensure site record exists site = slices.verify_site(hrn, slice_record, peer, sfa_peer) # ensure slice record exists + print>>sys.stderr, " \r\n \t AGGREGATESLAB.PY Slice users : ", users slice = slices.verify_slice(hrn, slice_record, peer, sfa_peer) + print >>sys.stderr, " \r\n \t AGGREGATESLAB.PY Slice slice : ", slice # ensure person records exists - persons = slices.verify_persons(hrn, slice, users, peer, sfa_peer) + persons = slices.verify_persons(hrn, slice, users) + #persons = slices.verify_persons(hrn, slice, users, peer, sfa_peer) # ensure slice attributes exists - slices.verify_slice_attributes(slice, requested_attributes) + #slices.verify_slice_attributes(slice, requested_attributes) # add/remove slice from nodes requested_slivers = [node.get('component_name') for node in rspec.version.get_nodes_with_slivers()] + print >>sys.stderr, " \r\n \t AGGREGATESLAB.PY Slice requested_slivers : ", requested_slivers slices.verify_slice_nodes(slice, requested_slivers, peer) # add/remove links links - slices.verify_slice_links(slice, rspec.version.get_link_requests(), aggregate) + #slices.verify_slice_links(slice, rspec.version.get_link_requests(), aggregate) # handle MyPLC peer association. # only used by plc and ple. diff --git a/sfa/managers/driver.py b/sfa/managers/driver.py index 7d4b5b23..b12cd710 100644 --- a/sfa/managers/driver.py +++ b/sfa/managers/driver.py @@ -2,7 +2,7 @@ # an attempt to document what a driver class should provide, # and implement reasonable defaults # - +import sys class Driver: def __init__ (self): pass @@ -19,6 +19,7 @@ class Driver: # testbed-specific info as well # this at minima should fill in the 'researcher' field for slice records def augment_records_with_testbed_info (self, sfa_records): + print >>sys.stderr, " \r\n \r\n DRIVER.PY augment_records_with_testbed_info sfa_records ",sfa_records return sfa_records # incoming record, as provided by the client to the Register API call diff --git a/sfa/managers/registry_manager_slab.py b/sfa/managers/registry_manager_slab.py index ed9c2c6d..c35175e2 100644 --- a/sfa/managers/registry_manager_slab.py +++ b/sfa/managers/registry_manager_slab.py @@ -108,6 +108,7 @@ def Resolve(api, xrns, type=None, full=True): # load all known registry names into a prefix tree and attempt to find # the longest matching prefix + print >>sys.stderr , '\t\t REGISTRY MANAGER : resolve=========api ', api print >>sys.stderr , '\t\t REGISTRY MANAGER : resolve=========xrns ', xrns if not isinstance(xrns, types.ListType): if not type: diff --git a/sfa/methods/CreateSliver.py b/sfa/methods/CreateSliver.py index fb1a2920..8a325f2a 100644 --- a/sfa/methods/CreateSliver.py +++ b/sfa/methods/CreateSliver.py @@ -2,7 +2,7 @@ from sfa.util.faults import SfaInvalidArgument from sfa.util.xrn import urn_to_hrn from sfa.util.method import Method from sfa.util.sfatablesRuntime import run_sfatables - +import sys from sfa.trust.credential import Credential from sfa.storage.parameter import Parameter, Mixed @@ -36,7 +36,7 @@ class CreateSliver(Method): hrn, type = urn_to_hrn(slice_xrn) self.api.logger.info("interface: %s\ttarget-hrn: %s\tmethod-name: %s"%(self.api.interface, hrn, self.name)) - + print >>sys.stderr, " \r\n \r\n Createsliver.py call %s\ttarget-hrn: %s\tmethod-name: %s "%(self.api.interface, hrn, self.name) # Find the valid credentials valid_creds = self.api.auth.checkCredentials(creds, 'createsliver', hrn) origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn() @@ -53,7 +53,8 @@ class CreateSliver(Method): chain_name = 'FORWARD-INCOMING' self.api.logger.debug("CreateSliver: sfatables on chain %s"%chain_name) rspec = run_sfatables(chain_name, hrn, origin_hrn, rspec) - slivers = RSpec(rspec).version.get_nodes_with_slivers() + slivers = RSpec(rspec).version.get_nodes_with_slivers() + print >>sys.stderr, " \r\n \r\n Createsliver.py call users : ", users if slivers: result = self.api.manager.CreateSliver(self.api, slice_xrn, creds, rspec, users, options) else: diff --git a/sfa/plc/slices.py b/sfa/plc/slices.py index 9e188d79..dc317935 100644 --- a/sfa/plc/slices.py +++ b/sfa/plc/slices.py @@ -1,6 +1,6 @@ from types import StringTypes from collections import defaultdict - +import sys from sfa.util.xrn import get_leaf, get_authority, urn_to_hrn from sfa.util.plxrn import hrn_to_pl_slicename from sfa.util.policy import Policy @@ -135,19 +135,19 @@ class Slices: # slice belongs to out local plc or a myplc peer. We will assume it # is a local site, unless we find out otherwise peer = None - + print>>sys.stderr, " \r\n \r\n \t slices.py get_peer slice_authority " # get this slice's authority (site) slice_authority = get_authority(hrn) # get this site's authority (sfa root authority or sub authority) site_authority = get_authority(slice_authority).lower() - + print>>sys.stderr, " \r\n \r\n \t slices.py get_peer slice_authority %s site_authority %s" %(slice_authority,site_authority) # check if we are already peered with this site_authority, if so - peers = self.api.driver.GetPeers({}, ['peer_id', 'peername', 'shortname', 'hrn_root']) - for peer_record in peers: - names = [name.lower() for name in peer_record.values() if isinstance(name, StringTypes)] - if site_authority in names: - peer = peer_record + #peers = self.api.driver.GetPeers({}, ['peer_id', 'peername', 'shortname', 'hrn_root']) + #for peer_record in peers: + #names = [name.lower() for name in peer_record.values() if isinstance(name, StringTypes)] + #if site_authority in names: + #peer = peer_record return peer @@ -308,10 +308,12 @@ class Slices: return site def verify_slice(self, slice_hrn, slice_record, peer, sfa_peer): - slicename = hrn_to_pl_slicename(slice_hrn) - parts = slicename.split("_") + #slicename = hrn_to_pl_slicename(slice_hrn) + parts = hrn_to_pl_slicename(slice_hrn).split("_") login_base = parts[0] + slicename = slice_hrn slices = self.api.driver.GetSlices([slicename]) + print>>sys.stderr, " \r\n \r\rn Slices.py verify_slice slicename %s slices %s slice_record %s"%(slicename ,slices, slice_record) if not slices: slice = {'name': slicename, 'url': slice_record.get('url', slice_hrn), @@ -334,8 +336,8 @@ class Slices: # unbind from peer so we can modify if necessary. Will bind back later self.api.driver.UnBindObjectFromPeer('slice', slice['slice_id'], peer['shortname']) #Update existing record (e.g. expires field) it with the latest info. - if slice_record and slice['expires'] != slice_record['expires']: - self.api.driver.UpdateSlice( slice['slice_id'], {'expires' : slice_record['expires']}) + #if slice_record and slice['expires'] != slice_record['expires']: + #self.api.driver.UpdateSlice( slice['slice_id'], {'expires' : slice_record['expires']}) return slice diff --git a/sfa/senslab/LDAPapi.py b/sfa/senslab/LDAPapi.py index fc312294..d84f6f8b 100644 --- a/sfa/senslab/LDAPapi.py +++ b/sfa/senslab/LDAPapi.py @@ -18,6 +18,14 @@ class LDAPapi : self.auth=Auth() gid=authinfo.get_gid_object() + self.ldapdictlist = ['type', + 'pkey', + 'uid', + 'serial', + 'authority', + 'peer_authority', + 'pointer' , + 'hrn'] def ldapFind(self, record_filter = None, columns=None): diff --git a/sfa/senslab/OARrestapi.py b/sfa/senslab/OARrestapi.py index 0ec528c9..71e4abdc 100644 --- a/sfa/senslab/OARrestapi.py +++ b/sfa/senslab/OARrestapi.py @@ -165,6 +165,7 @@ class OARGETParser: if self.version_json_dict['apilib_version'] != "0.2.10" : self.raw_json = self.raw_json['items'] self.ParseNodes() + self.ParseSites() @@ -193,29 +194,29 @@ class OARGETParser: #Retourne liste de dictionnaires contenant attributs des sites def ParseSites(self): nodes_per_site = {} + # Create a list of nodes per site_id for node_id in self.node_dictlist.keys(): node = self.node_dictlist[node_id] - if node['site_id'] not in nodes_per_site.keys(): - nodes_per_site[node['site_id']] = [] - nodes_per_site[node['site_id']].append(node['node_id']) + if node['site_login_base'] not in nodes_per_site.keys(): + nodes_per_site[node['site_login_base']] = [] + nodes_per_site[node['site_login_base']].append(node['node_id']) else: - if node['node_id'] not in nodes_per_site[node['site_id']]: - nodes_per_site[node['site_id']].append(node['node_id']) + if node['node_id'] not in nodes_per_site[node['site_login_base']]: + nodes_per_site[node['site_login_base']].append(node['node_id']) #Create a site dictionary with key is site_login_base (name of the site) # and value is a dictionary of properties, including the list of the node_ids for node_id in self.node_dictlist.keys(): node = self.node_dictlist[node_id] - if node['site_id'] not in self.site_dict.keys(): - self.site_dict[node['site_login_base']] = [('site_id',node['site_id']),\ - ('login_base', node['site_login_base']),\ - ('node_ids',nodes_per_site[node['site_id']]),\ + if node['site_login_base'] not in self.site_dict.keys(): + self.site_dict[node['site_login_base']] = [('login_base', node['site_login_base']),\ + ('node_ids',nodes_per_site[node['site_login_base']]),\ ('latitude',"48.83726"),\ - ('longitude',"- 2.10336"),('name',"demolab"),\ + ('longitude',"- 2.10336"),('name',"senslab"),\ ('pcu_ids', []), ('max_slices', None), ('ext_consortium_id', None),\ ('max_slivers', None), ('is_public', True), ('peer_site_id', None),\ - ('abbreviated_name', "demolab"), ('address_ids', []),\ - ('url', "http,//www.sdemolab.fr"), ('person_ids', []),\ + ('abbreviated_name', "senslab"), ('address_ids', []),\ + ('url', "http,//www.senslab.info"), ('person_ids', []),\ ('site_tag_ids', []), ('enabled', True), ('slice_ids', []),\ ('date_created', None), ('peer_id', None),] self.site_dict[node['site_login_base']] = dict(self.site_dict[node['site_login_base']]) diff --git a/sfa/senslab/SenslabImportUsers.py b/sfa/senslab/SenslabImportUsers.py index 9a0c39bc..d1ee40d7 100644 --- a/sfa/senslab/SenslabImportUsers.py +++ b/sfa/senslab/SenslabImportUsers.py @@ -45,7 +45,7 @@ class SenslabImportUsers: def_slice.update(dflt_slice) self.slices_list.append(def_slice) - print>>sys.stderr, "InitSlices SliceLIST", self.slices_list + #print>>sys.stderr, "InitSlices SliceLIST", self.slices_list def InitPersons(self): persons_per_site = {} diff --git a/sfa/senslab/parsing.py b/sfa/senslab/parsing.py index 21d9a06f..0a5092a9 100644 --- a/sfa/senslab/parsing.py +++ b/sfa/senslab/parsing.py @@ -3,11 +3,11 @@ import sys import httplib import json - +from collections import defaultdict def strip_dictionnary (dict_to_strip): stripped_filter = [] - stripped_filterdict = {} + stripped_filterdict = {} for f in dict_to_strip : stripped_filter.append(str(f).strip('|')) @@ -18,22 +18,22 @@ def strip_dictionnary (dict_to_strip): def filter_return_fields( dict_to_filter, return_fields): filtered_dict = {} - #print>>sys.stderr, " \r\n \t \tfilter_return_fields return fields %s " %(return_fields) + print>>sys.stderr, " \r\n \t \tfilter_return_fields return fields %s " %(return_fields) for field in return_fields: #print>>sys.stderr, " \r\n \t \tfield %s " %(field) if field in dict_to_filter: filtered_dict[field] = dict_to_filter[field] - #print>>sys.stderr, " \r\n \t\t filter_return_fields filtered_dict %s " %(filtered_dict) + print>>sys.stderr, " \r\n \t\t filter_return_fields filtered_dict %s " %(filtered_dict) return filtered_dict def parse_filter(list_to_filter, param_filter, type_of_list, return_fields=None) : - list_type = { 'persons': {'str': 'email','int':'person_id'},\ + list_type = { 'persons': {'str': 'hrn','int':'record_id'},\ 'keys':{'int':'key_id'},\ 'site':{'str':'login_base','int':'site_id'},\ 'node':{'str':'hostname','int':'node_id'},\ - 'slice':{'str':'name','int':'slice_id'}} + 'slice':{'str':'slice_hrn','int':'record_id_slice'}} print>>sys.stderr, " \r\n ___ parse_filter param_filter %s type %s return fields %s " %(param_filter,type_of_list, return_fields) if param_filter is None and return_fields is None: @@ -63,7 +63,8 @@ def parse_filter(list_to_filter, param_filter, type_of_list, return_fields=None) #print>>sys.stderr, " \r\n 1tmp_item",tmp_item if type(p_filter) is str: - if item[list_type[type_of_list]['str']] == p_filter : + if item[list_type[type_of_list]['str']] == str(p_filter) : + print>>sys.stderr, " \r\n p_filter %s \t item %s "%(p_filter,item[list_type[type_of_list]['str']]) if return_fields: tmp_item = filter_return_fields(item,return_fields) else: @@ -73,26 +74,49 @@ def parse_filter(list_to_filter, param_filter, type_of_list, return_fields=None) elif type(param_filter) is dict: - stripped_filterdict = strip_dictionnary(param_filter) - - tmp_copy = {} - tmp_copy = item.copy() + #stripped_filterdict = strip_dictionnary(param_filter) + #tmp_copy = {} + #tmp_copy = item.copy() #print>>sys.stderr, " \r\n \t\t ________tmp_copy %s " %(tmp_copy) - key_list = tmp_copy.keys() - for key in key_list: - print>>sys.stderr, " \r\n \t\t key %s " %(key) - if key not in stripped_filterdict.keys(): - del tmp_copy[key] - + #key_list = tmp_copy.keys() + #for key in key_list: + #print>>sys.stderr, " \r\n \t\t key %s " %(key) + #if key not in stripped_filterdict: + #del tmp_copy[key] + + #rif the item matches the filter, returns it + founditem = [] + check = [ True for k in param_filter.keys() if 'id' in k ] + if check : + dflt= defaultdict(str,param_filter) + + else: + dflt= defaultdict(str,param_filter) + + + + #founditem = [ item for k in dflt if item[k] in dflt[k]] + for k in dflt: + if item[k] in dflt[k]: + founditem = [item] + + if founditem: + if return_fields: + print>>sys.stderr, " \r\n \r\n parsing.py param_filter dflt %s founditem %s " %(dflt, founditem) + tmp_item = filter_return_fields(founditem[0],return_fields) + else: + tmp_item = founditem[0] + return_filtered_list.append(tmp_item) #print>>sys.stderr, " \r\n tmp_copy %s param_filter %s cmp = %s " %(tmp_copy, param_filter,cmp(tmp_copy, stripped_filterdict)) - if cmp(tmp_copy, stripped_filterdict) == 0: - if return_fields: - tmp_item = filter_return_fields(item,return_fields) - else: + #if cmp(tmp_copy, stripped_filterdict) == 0: + #if return_fields: + #tmp_item = filter_return_fields(item,return_fields) + #else: - tmp_item = item - return_filtered_list.append(tmp_item) - - return return_filtered_list \ No newline at end of file + #tmp_item = item + #return_filtered_list.append(tmp_item) + if return_filtered_list : + return return_filtered_list + \ No newline at end of file diff --git a/sfa/senslab/slab-import.py b/sfa/senslab/slab-import.py index 432c284a..8ff065e5 100644 --- a/sfa/senslab/slab-import.py +++ b/sfa/senslab/slab-import.py @@ -17,6 +17,7 @@ from sfa.trust.gid import create_uuid from sfa.trust.trustedroots import TrustedRoots config = Config() +interface_hrn = config.SFA_INTERFACE_HRN TrustedR = TrustedRoots(Config.get_trustedroots_dir(config)) AuthHierarchy = Hierarchy() table = SfaTable() @@ -111,7 +112,7 @@ def import_node(hrn, node): node_record['date_created'] = int(time.mktime(extime.timetuple())) existing_records = table.find({'hrn': hrn, 'type': 'node', 'pointer': node['node_id']}) if not existing_records: - print>>sys.stderr, " \r\n \t slab-import : node record %s inserted" %(node_record ) + print>>sys.stderr, " \r\n \t slab-import : node record %s inserted" %(node['node_id']) table.insert(node_record) else: existing_record = existing_records[0] @@ -172,7 +173,7 @@ def delete_record( hrn, type): table.remove(record) def hostname_to_hrn(root_auth,login_base,hostname): - return PlXrn(auth=auth,hostname=login_base+'_'+hostname).get_hrn() + return PlXrn(auth=root_auth,hostname=login_base+'_'+hostname).get_hrn() def main(): diff --git a/sfa/senslab/slabdriver.py b/sfa/senslab/slabdriver.py index 5a92a590..998ce30e 100644 --- a/sfa/senslab/slabdriver.py +++ b/sfa/senslab/slabdriver.py @@ -55,10 +55,19 @@ class SlabDriver (): def GetPersons(self, person_filter=None, return_fields=None): - + person_list = self.ldap.ldapFind({'authority': self.root_auth }) + + #check = False + #if person_filter and isinstance(person_filter, dict): + #for k in person_filter.keys(): + #if k in person_list[0].keys(): + #check = True + return_person_list = parse_filter(person_list,person_filter ,'persons', return_fields) - return return_person_list + if return_person_list: + print>>sys.stderr, " \r\n GetPersons person_filter %s return_fields %s return_person_list %s " %(person_filter,return_fields,return_person_list) + return return_person_list def GetNodes(self,node_filter= None, return_fields=None): @@ -73,6 +82,32 @@ class SlabDriver (): return_node_list= parse_filter(node_dict.values(),node_filter ,'node', return_fields) return return_node_list + def GetSites(self, auth, site_filter = None, return_fields=None): + self.oar.parser.SendRequest("GET_resources_full") + site_dict = self.oar.parser.GetSitesFromOARParse() + return_site_list = [] + site = site_dict.values()[0] + if not (site_filter or return_fields): + return_site_list = site_dict.values() + return return_site_list + + return_site_list = parse_filter(site_dict.values(),site_filter ,'site', return_fields) + return return_site_list + + def GetSlices(self,slice_filter = None, return_fields=None): + db = SlabDB() + return_slice_list =[] + sliceslist = db.find('slice',columns = ['slice_hrn', 'record_id_slice','record_id_user']) + print >>sys.stderr, " \r\n \r\n SLABDRIVER.PY GetSlices slices %s" %(sliceslist) + #slicesdict = sliceslist[0] + if not (slice_filter or return_fields): + return_slice_list = sliceslist + return return_slice_list + + return_slice_list = parse_filter(sliceslist, slice_filter,'slice', return_fields) + print >>sys.stderr, " \r\n \r\n SLABDRIVER.PY GetSlices return_slice_list %s" %(return_slice_list) + return return_slice_list + ## # Convert SFA fields to PLC fields for use when registering up updating # registry record in the PLC database @@ -208,7 +243,15 @@ class SlabDriver (): records = self.fill_record_hrns(records) return records - + + + + def AddSliceToNodes(self, slice_name, added_nodes): + return + + def DeleteSliceFromNodes(self, slice_name, deleted_nodes): + return + def fill_record_hrns(self, records): """ convert pl ids to hrns @@ -408,10 +451,10 @@ class SlabDriver (): Given a SFA record, fill in the senslab specific and SFA specific fields in the record. """ - + print >>sys.stderr, "\r\n \t\t BEFORE fill_record_pl_info %s" %(records) if isinstance(records, list): records = records[0] - print >>sys.stderr, "\r\n \t\t BEFORE fill_record_pl_info %s" %(records) + #print >>sys.stderr, "\r\n \t\t BEFORE fill_record_pl_info %s" %(records) if records['type'] == 'slice': @@ -429,6 +472,8 @@ class SlabDriver (): records.update({'PI':[recuser['hrn']], 'researcher': [recuser['hrn']], 'name':records['hrn'], 'oar_job_id':recslice['oar_job_id'], + + 'node_ids': [], 'person_ids':[recslice['record_id_user']]}) #self.fill_record_pl_info(records) diff --git a/sfa/senslab/slices.py b/sfa/senslab/slices.py new file mode 100644 index 00000000..6cd46172 --- /dev/null +++ b/sfa/senslab/slices.py @@ -0,0 +1,662 @@ +from types import StringTypes +from collections import defaultdict +import sys +from sfa.util.xrn import get_leaf, get_authority, urn_to_hrn +from sfa.util.plxrn import hrn_to_pl_slicename +from sfa.util.policy import Policy +from sfa.rspecs.rspec import RSpec +from sfa.plc.vlink import VLink +from sfa.util.xrn import Xrn + +MAXINT = 2L**31-1 + +class Slices: + + rspec_to_slice_tag = {'max_rate':'net_max_rate'} + + def __init__(self, api, ttl = .5, origin_hrn=None): + self.api = api + #filepath = path + os.sep + filename + self.policy = Policy(self.api) + self.origin_hrn = origin_hrn + self.registry = api.registries[api.hrn] + self.credential = api.getCredential() + self.nodes = [] + self.persons = [] + + #def get_slivers(self, xrn, node=None): + #hrn, type = urn_to_hrn(xrn) + + #slice_name = hrn_to_pl_slicename(hrn) + ## XX Should we just call PLCAPI.GetSliceTicket(slice_name) instead + ## of doing all of this? + ##return self.api.driver.GetSliceTicket(self.auth, slice_name) + + ## from PLCAPI.GetSlivers.get_slivers() + #slice_fields = ['slice_id', 'name', 'instantiation', 'expires', 'person_ids', 'slice_tag_ids'] + #slices = self.api.driver.GetSlices(slice_name, slice_fields) + ## Build up list of users and slice attributes + #person_ids = set() + #all_slice_tag_ids = set() + #for slice in slices: + #person_ids.update(slice['person_ids']) + #all_slice_tag_ids.update(slice['slice_tag_ids']) + #person_ids = list(person_ids) + #all_slice_tag_ids = list(all_slice_tag_ids) + ## Get user information + #all_persons_list = self.api.driver.GetPersons({'person_id':person_ids,'enabled':True}, ['person_id', 'enabled', 'key_ids']) + #all_persons = {} + #for person in all_persons_list: + #all_persons[person['person_id']] = person + + ## Build up list of keys + #key_ids = set() + #for person in all_persons.values(): + #key_ids.update(person['key_ids']) + #key_ids = list(key_ids) + ## Get user account keys + #all_keys_list = self.api.driver.GetKeys(key_ids, ['key_id', 'key', 'key_type']) + #all_keys = {} + #for key in all_keys_list: + #all_keys[key['key_id']] = key + ## Get slice attributes + #all_slice_tags_list = self.api.driver.GetSliceTags(all_slice_tag_ids) + #all_slice_tags = {} + #for slice_tag in all_slice_tags_list: + #all_slice_tags[slice_tag['slice_tag_id']] = slice_tag + + #slivers = [] + #for slice in slices: + #keys = [] + #for person_id in slice['person_ids']: + #if person_id in all_persons: + #person = all_persons[person_id] + #if not person['enabled']: + #continue + #for key_id in person['key_ids']: + #if key_id in all_keys: + #key = all_keys[key_id] + #keys += [{'key_type': key['key_type'], + #'key': key['key']}] + #attributes = [] + ## All (per-node and global) attributes for this slice + #slice_tags = [] + #for slice_tag_id in slice['slice_tag_ids']: + #if slice_tag_id in all_slice_tags: + #slice_tags.append(all_slice_tags[slice_tag_id]) + ## Per-node sliver attributes take precedence over global + ## slice attributes, so set them first. + ## Then comes nodegroup slice attributes + ## Followed by global slice attributes + #sliver_attributes = [] + + #if node is not None: + #for sliver_attribute in filter(lambda a: a['node_id'] == node['node_id'], slice_tags): + #sliver_attributes.append(sliver_attribute['tagname']) + #attributes.append({'tagname': sliver_attribute['tagname'], + #'value': sliver_attribute['value']}) + + ## set nodegroup slice attributes + #for slice_tag in filter(lambda a: a['nodegroup_id'] in node['nodegroup_ids'], slice_tags): + ## Do not set any nodegroup slice attributes for + ## which there is at least one sliver attribute + ## already set. + #if slice_tag not in slice_tags: + #attributes.append({'tagname': slice_tag['tagname'], + #'value': slice_tag['value']}) + + #for slice_tag in filter(lambda a: a['node_id'] is None, slice_tags): + ## Do not set any global slice attributes for + ## which there is at least one sliver attribute + ## already set. + #if slice_tag['tagname'] not in sliver_attributes: + #attributes.append({'tagname': slice_tag['tagname'], + #'value': slice_tag['value']}) + + ## XXX Sanity check; though technically this should be a system invariant + ## checked with an assertion + #if slice['expires'] > MAXINT: slice['expires']= MAXINT + + #slivers.append({ + #'hrn': hrn, + #'name': slice['name'], + #'slice_id': slice['slice_id'], + #'instantiation': slice['instantiation'], + #'expires': slice['expires'], + #'keys': keys, + #'attributes': attributes + #}) + + #return slivers + + def get_peer(self, xrn): + hrn, type = urn_to_hrn(xrn) + # Becaues of myplc federation, we first need to determine if this + # slice belongs to out local plc or a myplc peer. We will assume it + # is a local site, unless we find out otherwise + peer = None + print>>sys.stderr, " \r\n \r\n \t slices.py get_peer slice_authority " + # get this slice's authority (site) + slice_authority = get_authority(hrn) + + # get this site's authority (sfa root authority or sub authority) + site_authority = get_authority(slice_authority).lower() + print>>sys.stderr, " \r\n \r\n \t slices.py get_peer slice_authority %s site_authority %s" %(slice_authority,site_authority) + # check if we are already peered with this site_authority, if so + #peers = self.api.driver.GetPeers({}, ['peer_id', 'peername', 'shortname', 'hrn_root']) + #for peer_record in peers: + #names = [name.lower() for name in peer_record.values() if isinstance(name, StringTypes)] + #if site_authority in names: + #peer = peer_record + + return peer + + def get_sfa_peer(self, xrn): + hrn, type = urn_to_hrn(xrn) + + # return the authority for this hrn or None if we are the authority + sfa_peer = None + slice_authority = get_authority(hrn) + site_authority = get_authority(slice_authority) + + if site_authority != self.api.hrn: + sfa_peer = site_authority + + return sfa_peer + + def verify_slice_nodes(self, slice, requested_slivers, peer): + current_slivers = [] + if slice['node_ids']: + nodes = self.api.driver.GetNodes(slice['node_ids'], ['hostname']) + current_slivers = [node['hostname'] for node in nodes] + + # remove nodes not in rspec + deleted_nodes = list(set(current_slivers).difference(requested_slivers)) + + # add nodes from rspec + added_nodes = list(set(requested_slivers).difference(current_slivers)) + print>>sys.stderr , "\r\n \r\n \t slices.py verify_slice_nodes added_nodes" , added_nodes + try: + if peer: + self.api.driver.UnBindObjectFromPeer('slice', slice['slice_id'], peer['shortname']) + self.api.driver.AddSliceToNodes(slice['name'], added_nodes) + if deleted_nodes: + self.api.driver.DeleteSliceFromNodes(slice['name'], deleted_nodes) + + except: + self.api.logger.log_exc('Failed to add/remove slice from nodes') + + def free_egre_key(self): + used = set() + for tag in self.api.driver.GetSliceTags({'tagname': 'egre_key'}): + used.add(int(tag['value'])) + + for i in range(1, 256): + if i not in used: + key = i + break + else: + raise KeyError("No more EGRE keys available") + + return str(key) + + def verify_slice_links(self, slice, links, aggregate): + + return + + + + + + def handle_peer(self, site, slice, persons, peer): + if peer: + # bind site + try: + if site: + self.api.driver.BindObjectToPeer('site', site['site_id'], peer['shortname'], slice['site_id']) + except Exception,e: + self.api.driver.DeleteSite(site['site_id']) + raise e + + # bind slice + try: + if slice: + self.api.driver.BindObjectToPeer('slice', slice['slice_id'], peer['shortname'], slice['slice_id']) + except Exception,e: + self.api.driver.DeleteSlice(slice['slice_id']) + raise e + + # bind persons + for person in persons: + try: + self.api.driver.BindObjectToPeer('person', + person['person_id'], peer['shortname'], person['peer_person_id']) + + for (key, remote_key_id) in zip(person['keys'], person['key_ids']): + try: + self.api.driver.BindObjectToPeer( 'key', key['key_id'], peer['shortname'], remote_key_id) + except: + self.api.driver.DeleteKey(key['key_id']) + self.api.logger("failed to bind key: %s to peer: %s " % (key['key_id'], peer['shortname'])) + except Exception,e: + self.api.driver.DeletePerson(person['person_id']) + raise e + + return slice + + def verify_site(self, slice_xrn, slice_record={}, peer=None, sfa_peer=None): + (slice_hrn, type) = urn_to_hrn(slice_xrn) + site_hrn = get_authority(slice_hrn) + # login base can't be longer than 20 characters + slicename = hrn_to_pl_slicename(slice_hrn) + authority_name = slicename.split('_')[0] + login_base = authority_name[:20] + sites = self.api.driver.GetSites(login_base) + if not sites: + # create new site record + site = {'name': 'geni.%s' % authority_name, + 'abbreviated_name': authority_name, + 'login_base': login_base, + 'max_slices': 100, + 'max_slivers': 1000, + 'enabled': True, + 'peer_site_id': None} + if peer: + site['peer_site_id'] = slice_record.get('site_id', None) + site['site_id'] = self.api.driver.AddSite(site) + # exempt federated sites from monitor policies + self.api.driver.AddSiteTag(site['site_id'], 'exempt_site_until', "20200101") + + # is this still necessary? + # add record to the local registry + if sfa_peer and slice_record: + peer_dict = {'type': 'authority', 'hrn': site_hrn, \ + 'peer_authority': sfa_peer, 'pointer': site['site_id']} + self.registry.register_peer_object(self.credential, peer_dict) + else: + site = sites[0] + if peer: + # unbind from peer so we can modify if necessary. Will bind back later + self.api.driver.UnBindObjectFromPeer('site', site['site_id'], peer['shortname']) + + return site + + def verify_slice(self, slice_hrn, slice_record, peer, sfa_peer): + #slicename = hrn_to_pl_slicename(slice_hrn) + parts = hrn_to_pl_slicename(slice_hrn).split("_") + login_base = parts[0] + slicename = slice_hrn + slices = self.api.driver.GetSlices([slicename]) + print>>sys.stderr, " \r\n \r\rn Slices.py verify_slice slicename %s slices %s slice_record %s"%(slicename ,slices, slice_record) + if not slices: + slice = {'name': slicename, + 'url': slice_record.get('url', slice_hrn), + #'description': slice_record.get('description', slice_hrn) + } + # add the slice + slice['slice_id'] = self.api.driver.AddSlice(slice) + slice['node_ids'] = [] + slice['person_ids'] = [] + if peer: + slice['peer_slice_id'] = slice_record.get('slice_id', None) + # mark this slice as an sfa peer record + if sfa_peer: + peer_dict = {'type': 'slice', 'hrn': slice_hrn, + 'peer_authority': sfa_peer, 'pointer': slice['slice_id']} + self.registry.register_peer_object(self.credential, peer_dict) + else: + slice = slices[0] + slice.update(slice_record) + del slice['last_updated'] + del slice['date_created'] + if peer: + slice['peer_slice_id'] = slice_record.get('slice_id', None) + # unbind from peer so we can modify if necessary. Will bind back later + self.api.driver.UnBindObjectFromPeer('slice', slice['slice_id'], peer['shortname']) + #Update existing record (e.g. expires field) it with the latest info. + #if slice_record and slice['expires'] != slice_record['expires']: + #self.api.driver.UpdateSlice( slice['slice_id'], {'expires' : slice_record['expires']}) + + return slice + + #def get_existing_persons(self, users): + def verify_persons(self, slice_hrn, slice_record, users, append=True): + users_by_id = {} + users_by_hrn = {} + users_dict = {} + + for user in users: + if 'person_id' in user and 'hrn' in user: + users_by_id[user['person_id']] = user + users_dict[user['person_id']] = {'person_id':user['person_id'], 'hrn':user['hrn']} + + #hrn, type = urn_to_hrn(user['urn']) + #username = get_leaf(hrn) + #login_base = get_leaf(get_authority(user['urn'])) + #user['username'] = username + #users_by_site[login_base].append(user) + users_by_hrn[user['hrn']] = user + users_dict[user['hrn']] = {'person_id':user['person_id'], 'hrn':user['hrn']} + + existing_user_ids = [] + existing_users= [] + if users_by_hrn: + # get existing users by email + + existing_users = self.api.driver.GetPersons({'hrn': users_by_hrn.keys()}, + ['hrn']) + #print>>sys.stderr, " \r\n \r\n \t slices.py HEEEEEEEEY===========verify_person existing_users %s users_dict %s " %(existing_users, users_dict) + #existing_user_ids = [(users_dict[user['hrn']]['hrn'],users_dict[user['hrn']]['person_id'] ) for user in existing_users] + for user in existing_users : + for k in users_dict[user['hrn']] : + existing_user_ids.append (users_dict[user['hrn']][k]) + + #print>>sys.stderr, " \r\n \r\n slices.py verify_person existing_user_ids %s " %(existing_user_ids) + #if users_by_id: + #existing_user_ids.extend([user for user in users_by_id]) + #if users_by_site: + ## get a list of user sites (based on requeste user urns + #site_list = self.api.driver.GetSites(users_by_site.keys(), \ + #['site_id', 'login_base', 'person_ids']) + #sites = {} + #site_user_ids = [] + + ## get all existing users at these sites + #for site in site_list: + #sites[site['site_id']] = site + #site_user_ids.extend(site['person_ids']) + + #existing_site_persons_list = self.api.driver.GetPersons(site_user_ids, + #['person_id', 'key_ids', 'email', 'site_ids']) + + ## all requested users are either existing users or new (added) users + #for login_base in users_by_site: + #requested_site_users = users_by_site[login_base] + #for requested_user in requested_site_users: + #user_found = False + #for existing_user in existing_site_persons_list: + #for site_id in existing_user['site_ids']: + #site = sites[site_id] + #if login_base == site['login_base'] and \ + #existing_user['email'].startswith(requested_user['username']): + #existing_user_ids.append(existing_user['email']) + #users_dict[existing_user['email']] = requested_user + #user_found = True + #break + #if user_found: + #break + + #if user_found == False: + #fake_email = requested_user['username'] + '@geni.net' + #users_dict[fake_email] = requested_user + + + # requested slice users + requested_user_ids = users_dict.keys() + # existing slice users + existing_slice_users_filter = {'hrn': slice_record.get('PI', [])} + #print>>sys.stderr, " \r\n \r\n slices.py verify_person requested_user_ids %s existing_slice_users_filter %s slice_record %s" %(requested_user_ids,existing_slice_users_filter,slice_record) + + existing_slice_users = self.api.driver.GetPersons(existing_slice_users_filter,['hrn']) + existing_slice_user_ids = [] + for user in existing_slice_users : + for k in users_dict[user['hrn']] : + existing_slice_user_ids.append (users_dict[user['hrn']][k]) + #existing_slice_user_ids = [user['hrn'] for user in existing_slice_users] + + #print>>sys.stderr, " \r\n \r\n slices.py verify_person requested_user_ids %s existing_slice_user_ids%s " %(requested_user_ids,existing_slice_user_ids) + # users to be added, removed or updated + added_user_ids = set(requested_user_ids).difference(set(existing_user_ids)) + added_slice_user_ids = set(requested_user_ids).difference(existing_slice_user_ids) + removed_user_ids = set(existing_slice_user_ids).difference(requested_user_ids) + #print>>sys.stderr, " \r\n \r\n slices.py verify_persons existing_slice_user_ids %s requested_user_ids %s " %(existing_slice_user_ids,requested_user_ids) + updated_user_ids = set(existing_slice_user_ids).intersection(requested_user_ids) + #print>>sys.stderr, " \r\n \r\n slices.py verify_persons added_user_ids %s added_slice_user_ids %s " %(added_user_ids,added_slice_user_ids) + #print>>sys.stderr, " \r\n \r\n slices.py verify_persons removed_user_ids %s updated_user_ids %s " %(removed_user_ids,updated_user_ids) + # Remove stale users (only if we are not appending). + if append == False: + for removed_user_id in removed_user_ids: + self.api.driver.DeletePersonFromSlice(removed_user_id, slice_record['name']) + # update_existing users + updated_users_list = [user for user in existing_slice_users if user['hrn'] in \ + updated_user_ids] + #self.verify_keys(existing_slice_users, updated_users_list, peer, append) + + added_persons = [] + # add new users + for added_user_id in added_user_ids: + added_user = users_dict[added_user_id] + #hrn, type = urn_to_hrn(added_user['urn']) + person = { + #'first_name': added_user.get('first_name', hrn), + #'last_name': added_user.get('last_name', hrn), + 'person_id': added_user_id, + #'peer_person_id': None, + #'keys': [], + #'key_ids': added_user.get('key_ids', []), + + } + #print>>sys.stderr, " \r\n \r\n slices.py verify_persons added_user_ids %s " %(added_user_ids) + person['person_id'] = self.api.driver.AddPerson(person) + if peer: + person['peer_person_id'] = added_user['person_id'] + added_persons.append(person) + + # enable the account + self.api.driver.UpdatePerson(person['person_id'], {'enabled': True}) + + # add person to site + #self.api.driver.AddPersonToSite(added_user_id, login_base) + + #for key_string in added_user.get('keys', []): + #key = {'key':key_string, 'key_type':'ssh'} + #key['key_id'] = self.api.driver.AddPersonKey(person['person_id'], key) + #person['keys'].append(key) + + # add the registry record + #if sfa_peer: + #peer_dict = {'type': 'user', 'hrn': hrn, 'peer_authority': sfa_peer, \ + #'pointer': person['person_id']} + #self.registry.register_peer_object(self.credential, peer_dict) + + for added_slice_user_id in added_slice_user_ids.union(added_user_ids): + # add person to the slice + self.api.driver.AddPersonToSlice(added_slice_user_id, slice_record['name']) + # if this is a peer record then it should already be bound to a peer. + # no need to return worry about it getting bound later + + return added_persons + + + def verify_keys(self, persons, users, peer, append=True): + # existing keys + key_ids = [] + for person in persons: + key_ids.extend(person['key_ids']) + keylist = self.api.driver.GetKeys(key_ids, ['key_id', 'key']) + keydict = {} + for key in keylist: + keydict[key['key']] = key['key_id'] + existing_keys = keydict.keys() + persondict = {} + for person in persons: + persondict[person['email']] = person + + # add new keys + requested_keys = [] + updated_persons = [] + for user in users: + user_keys = user.get('keys', []) + updated_persons.append(user) + for key_string in user_keys: + requested_keys.append(key_string) + if key_string not in existing_keys: + key = {'key': key_string, 'key_type': 'ssh'} + try: + if peer: + person = persondict[user['email']] + self.api.driver.UnBindObjectFromPeer('person', person['person_id'], peer['shortname']) + key['key_id'] = self.api.driver.AddPersonKey(user['email'], key) + if peer: + key_index = user_keys.index(key['key']) + remote_key_id = user['key_ids'][key_index] + self.api.driver.BindObjectToPeer('key', key['key_id'], peer['shortname'], remote_key_id) + + finally: + if peer: + self.api.driver.BindObjectToPeer('person', person['person_id'], peer['shortname'], user['person_id']) + + # remove old keys (only if we are not appending) + if append == False: + removed_keys = set(existing_keys).difference(requested_keys) + for existing_key_id in keydict: + if keydict[existing_key_id] in removed_keys: + try: + if peer: + self.api.driver.UnBindObjectFromPeer('key', existing_key_id, peer['shortname']) + self.api.driver.DeleteKey(existing_key_id) + except: + pass + + def verify_slice_attributes(self, slice, requested_slice_attributes, append=False, admin=False): + # get list of attributes users ar able to manage + filter = {'category': '*slice*'} + if not admin: + filter['|roles'] = ['user'] + slice_attributes = self.api.driver.GetTagTypes(filter) + valid_slice_attribute_names = [attribute['tagname'] for attribute in slice_attributes] + + # get sliver attributes + added_slice_attributes = [] + removed_slice_attributes = [] + ignored_slice_attribute_names = [] + existing_slice_attributes = self.api.driver.GetSliceTags({'slice_id': slice['slice_id']}) + + # get attributes that should be removed + for slice_tag in existing_slice_attributes: + if slice_tag['tagname'] in ignored_slice_attribute_names: + # If a slice already has a admin only role it was probably given to them by an + # admin, so we should ignore it. + ignored_slice_attribute_names.append(slice_tag['tagname']) + else: + # If an existing slice attribute was not found in the request it should + # be removed + attribute_found=False + for requested_attribute in requested_slice_attributes: + if requested_attribute['name'] == slice_tag['tagname'] and \ + requested_attribute['value'] == slice_tag['value']: + attribute_found=True + break + + if not attribute_found and not append: + removed_slice_attributes.append(slice_tag) + + # get attributes that should be added: + for requested_attribute in requested_slice_attributes: + # if the requested attribute wasn't found we should add it + if requested_attribute['name'] in valid_slice_attribute_names: + attribute_found = False + for existing_attribute in existing_slice_attributes: + if requested_attribute['name'] == existing_attribute['tagname'] and \ + requested_attribute['value'] == existing_attribute['value']: + attribute_found=True + break + if not attribute_found: + added_slice_attributes.append(requested_attribute) + + + # remove stale attributes + for attribute in removed_slice_attributes: + try: + self.api.driver.DeleteSliceTag(attribute['slice_tag_id']) + except Exception, e: + self.api.logger.warn('Failed to remove sliver attribute. name: %s, value: %s, node_id: %s\nCause:%s'\ + % (name, value, node_id, str(e))) + + # add requested_attributes + for attribute in added_slice_attributes: + try: + self.api.driver.AddSliceTag(slice['name'], attribute['name'], attribute['value'], attribute.get('node_id', None)) + except Exception, e: + self.api.logger.warn('Failed to add sliver attribute. name: %s, value: %s, node_id: %s\nCause:%s'\ + % (name, value, node_id, str(e))) + + def create_slice_aggregate(self, xrn, rspec): + hrn, type = urn_to_hrn(xrn) + # Determine if this is a peer slice + peer = self.get_peer(hrn) + sfa_peer = self.get_sfa_peer(hrn) + + spec = RSpec(rspec) + # Get the slice record from sfa + slicename = hrn_to_pl_slicename(hrn) + slice = {} + slice_record = None + registry = self.api.registries[self.api.hrn] + credential = self.api.getCredential() + + site_id, remote_site_id = self.verify_site(registry, credential, hrn, peer, sfa_peer) + slice = self.verify_slice(registry, credential, hrn, site_id, remote_site_id, peer, sfa_peer) + + # find out where this slice is currently running + nodelist = self.api.driver.GetNodes(slice['node_ids'], ['hostname']) + hostnames = [node['hostname'] for node in nodelist] + + # get netspec details + nodespecs = spec.getDictsByTagName('NodeSpec') + + # dict in which to store slice attributes to set for the nodes + nodes = {} + for nodespec in nodespecs: + if isinstance(nodespec['name'], list): + for nodename in nodespec['name']: + nodes[nodename] = {} + for k in nodespec.keys(): + rspec_attribute_value = nodespec[k] + if (self.rspec_to_slice_tag.has_key(k)): + slice_tag_name = self.rspec_to_slice_tag[k] + nodes[nodename][slice_tag_name] = rspec_attribute_value + elif isinstance(nodespec['name'], StringTypes): + nodename = nodespec['name'] + nodes[nodename] = {} + for k in nodespec.keys(): + rspec_attribute_value = nodespec[k] + if (self.rspec_to_slice_tag.has_key(k)): + slice_tag_name = self.rspec_to_slice_tag[k] + nodes[nodename][slice_tag_name] = rspec_attribute_value + + for k in nodespec.keys(): + rspec_attribute_value = nodespec[k] + if (self.rspec_to_slice_tag.has_key(k)): + slice_tag_name = self.rspec_to_slice_tag[k] + nodes[nodename][slice_tag_name] = rspec_attribute_value + + node_names = nodes.keys() + # remove nodes not in rspec + deleted_nodes = list(set(hostnames).difference(node_names)) + # add nodes from rspec + added_nodes = list(set(node_names).difference(hostnames)) + + try: + if peer: + self.api.driver.UnBindObjectFromPeer('slice', slice['slice_id'], peer) + + self.api.driver.AddSliceToNodes(slicename, added_nodes) + + # Add recognized slice tags + for node_name in node_names: + node = nodes[node_name] + for slice_tag in node.keys(): + value = node[slice_tag] + if (isinstance(value, list)): + value = value[0] + + self.api.driver.AddSliceTag(slicename, slice_tag, value, node_name) + + self.api.driver.DeleteSliceFromNodes(slicename, deleted_nodes) + finally: + if peer: + self.api.driver.BindObjectToPeer('slice', slice['slice_id'], peer, slice['peer_slice_id']) + + return 1 + diff --git a/sfa/trust/auth.py b/sfa/trust/auth.py index 43af7403..8e86eb42 100644 --- a/sfa/trust/auth.py +++ b/sfa/trust/auth.py @@ -40,6 +40,7 @@ class Auth: valid = [] if not isinstance(creds, list): creds = [creds] + #print>>sys.stderr, "\r\n \r\n \t AUTH.PY checkCredentials hrn %s" %(hrn) logger.debug("Auth.checkCredentials with %d creds"%len(creds)) for cred in creds: try: @@ -68,7 +69,7 @@ class Auth: self.client_cred = Credential(string = cred) self.client_gid = self.client_cred.get_gid_caller() self.object_gid = self.client_cred.get_gid_object() - + #print>>sys.stderr, " \r\n \r\n \t AUTH.PY check client_gid %s hrn %s object_gid %s" %(self.client_gid.get_hrn(),hrn, self.object_gid.get_hrn()) # make sure the client_gid is not blank if not self.client_gid: raise MissingCallerGID(self.client_cred.get_subject()) @@ -78,19 +79,25 @@ class Auth: self.verifyPeerCert(self.peer_cert, self.client_gid) # make sure the client is allowed to perform the operation - if operation: + if operation: + #print>>sys.stderr, " \r\n \r\n \t AUTH.PY check operation %s trusted_cert_list %s " %(operation,self.trusted_cert_list) if not self.client_cred.can_perform(operation): + #print>>sys.stderr, " \r\n \r\n \t AUTH.PY InsufficientRights(operation)" raise InsufficientRights(operation) if self.trusted_cert_list: self.client_cred.verify(self.trusted_cert_file_list, self.config.SFA_CREDENTIAL_SCHEMA) + #print>>sys.stderr, " \r\n \r\n \t AUTH.PY check trusted_cert_file_list %s self.config.SFA_CREDENTIAL_SCHEMA %s" %(self.trusted_cert_file_list, self.config.SFA_CREDENTIAL_SCHEMA) + else: raise MissingTrustedRoots(self.config.get_trustedroots_dir()) # Make sure the credential's target matches the specified hrn. # This check does not apply to trusted peers trusted_peers = [gid.get_hrn() for gid in self.trusted_cert_list] + #print>>sys.stderr, " \r\n \r\n \t AUTH.PY check trusted_peers ", trusted_peers if hrn and self.client_gid.get_hrn() not in trusted_peers: + target_hrn = self.object_gid.get_hrn() if not hrn == target_hrn: raise PermissionError("Target hrn: %s doesn't match specified hrn: %s " % \ @@ -233,7 +240,7 @@ class Auth: return #if name.startswith(get_authority(name)): #return - print>>sys.stderr, " \r\n \t AUTH.PY verify_object_permission GROSECHECDELENFER " + #print>>sys.stderr, " \r\n \t AUTH.PY verify_object_permission GROSECHECDELENFER " raise PermissionError(name) def determine_user_rights(self, caller_hrn, record): diff --git a/sfa/trust/credential.py b/sfa/trust/credential.py index 8fd11e8e..7f347574 100644 --- a/sfa/trust/credential.py +++ b/sfa/trust/credential.py @@ -26,7 +26,7 @@ # Credentials are signed XML files that assign a subject gid privileges to an object gid ## -import os +import os,sys from types import StringTypes import datetime from StringIO import StringIO @@ -160,8 +160,10 @@ class Signature(object): def get_refid(self): + #print>>sys.stderr," \r\n \r\n credential.py Signature get_refid\ self.refid %s " %(self.refid) if not self.refid: self.decode() + #print>>sys.stderr," \r\n \r\n credential.py Signature get_refid self.refid %s " %(self.refid) return self.refid def get_xml(self): @@ -588,18 +590,23 @@ class Credential(object): def updateRefID(self): if not self.parent: - self.set_refid('ref0') + self.set_refid('ref0') + #print>>sys.stderr, " \r\n \r\n updateRefID next_cred ref0 " return [] refs = [] next_cred = self.parent + while next_cred: + refs.append(next_cred.get_refid()) if next_cred.parent: next_cred = next_cred.parent + #print>>sys.stderr, " \r\n \r\n updateRefID next_cred " else: next_cred = None + #print>>sys.stderr, " \r\n \r\n updateRefID next_cred NONE" # Find a unique refid for this credential @@ -804,10 +811,12 @@ class Credential(object): # Failures here include unreadable files # or non PEM files trusted_cert_objects.append(GID(filename=f)) + #print>>sys.stderr, " \r\n \t\t\t credential.py verify trusted_certs %s" %(GID(filename=f).get_hrn()) ok_trusted_certs.append(f) except Exception, exc: logger.error("Failed to load trusted cert from %s: %r", f, exc) trusted_certs = ok_trusted_certs + #print>>sys.stderr, " \r\n \t\t\t credential.py verify trusted_certs elemnebts %s" %(len(trusted_certs)) # Use legacy verification if this is a legacy credential if self.legacy: @@ -833,7 +842,8 @@ class Credential(object): # Verify the gids of this cred and of its parents for cur_cred in self.get_credential_list(): cur_cred.get_gid_object().verify_chain(trusted_cert_objects) - cur_cred.get_gid_caller().verify_chain(trusted_cert_objects) + cur_cred.get_gid_caller().verify_chain(trusted_cert_objects) + #print>>sys.stderr, " \r\n \t\t\t credential.py verify cur_cred get_gid_object hrn %s get_gid_caller %s" %(cur_cred.get_gid_object().get_hrn(),cur_cred.get_gid_caller().get_hrn()) refs = [] refs.append("Sig_%s" % self.get_refid()) @@ -841,7 +851,7 @@ class Credential(object): parentRefs = self.updateRefID() for ref in parentRefs: refs.append("Sig_%s" % ref) - + #print>>sys.stderr, " \r\n \t\t\t credential.py verify trusted_certs refs", ref for ref in refs: # If caller explicitly passed in None that means skip xmlsec1 validation. # Strange and not typical @@ -852,6 +862,7 @@ class Credential(object): # (self.xmlsec_path, ref, cert_args, filename) verified = os.popen('%s --verify --node-id "%s" %s %s 2>&1' \ % (self.xmlsec_path, ref, cert_args, filename)).read() + #print>>sys.stderr, " \r\n \t\t\t credential.py verify filename %s verified %s " %(filename,verified) if not verified.strip().startswith("OK"): # xmlsec errors have a msg= which is the interesting bit. mstart = verified.find("msg=") @@ -862,11 +873,12 @@ class Credential(object): msg = verified[mstart:mend] raise CredentialNotVerifiable("xmlsec1 error verifying cred %s using Signature ID %s: %s %s" % (self.get_summary_tostring(), ref, msg, verified.strip())) os.remove(filename) - + + #print>>sys.stderr, " \r\n \t\t\t credential.py HUMMM parents %s", self.parent # Verify the parents (delegation) if self.parent: self.verify_parent(self.parent) - + #print>>sys.stderr, " \r\n \t\t\t credential.py verify trusted_certs parents" # Make sure the issuer is the target's authority, and is # itself a valid GID self.verify_issuer(trusted_cert_objects) @@ -969,6 +981,7 @@ class Credential(object): # . The expiry time on the child must be no later than the parent # . The signer of the child must be the owner of the parent def verify_parent(self, parent_cred): + #print>>sys.stderr, " \r\n\r\n \t verify_parent parent_cred.get_gid_caller().save_to_string(False) %s self.get_signature().get_issuer_gid().save_to_string(False) %s" %(parent_cred.get_gid_caller().get_hrn(),self.get_signature().get_issuer_gid().get_hrn()) # make sure the rights given to the child are a subset of the # parents rights (and check delegate bits) if not parent_cred.get_privileges().is_superset(self.get_privileges()): diff --git a/sfa/util/xrn.py b/sfa/util/xrn.py index 1f506289..26795129 100644 --- a/sfa/util/xrn.py +++ b/sfa/util/xrn.py @@ -22,7 +22,7 @@ #---------------------------------------------------------------------- import re - +import sys from sfa.util.faults import SfaAPIError # for convenience and smoother translation - we should get rid of these functions eventually @@ -116,16 +116,19 @@ class Xrn: # provide either urn, or (hrn + type) def __init__ (self, xrn, type=None): if not xrn: xrn = "" + # user has specified xrn : guess if urn or hrn if xrn.startswith(Xrn.URN_PREFIX): self.hrn=None self.urn=xrn self.urn_to_hrn() + #print>>sys.stderr, " \r\n \r\n \t XRN.PY init xrn.startswith(Xrn.URN_PREFIX) hrn %s urn %s type %s" %( self.hrn, self.urn, self.type) else: self.urn=None self.hrn=xrn self.type=type self.hrn_to_urn() + #print>>sys.stderr, " \r\n \r\n \t XRN.PY init ELSE hrn %s urn %s type %s" %( self.hrn, self.urn, self.type) # happens all the time .. # if not type: # debug_logger.debug("type-less Xrn's are not safe") @@ -136,13 +139,16 @@ class Xrn: def get_hrn_type(self): return (self.hrn, self.type) def _normalize(self): + #print>>sys.stderr, " \r\n \r\n \t XRN.PY _normalize self.hrn %s ",self.hrn if self.hrn is None: raise SfaAPIError, "Xrn._normalize" if not hasattr(self,'leaf'): self.leaf=Xrn.hrn_split(self.hrn)[-1] # self.authority keeps a list if not hasattr(self,'authority'): self.authority=Xrn.hrn_auth_list(self.hrn) - + #print>>sys.stderr, " \r\n \r\n \t XRN.PY _normalize self.hrn %s leaf %s authority %s"%(self.hrn, self.leaf, self.authority) + + def get_leaf(self): self._normalize() return self.leaf -- 2.47.0