X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=sfa%2Fmanagers%2Fslice_manager_pl.py;h=c6531d2975bf2f6ce93f5128f608622ed94b7ad2;hb=4ff67c801ceeb1d0c7ca2863c2b7bf8152182b8f;hp=03bd994174bc7023f25546f369918d579c945c4d;hpb=ea995a055eba04aedff577e86652abaaa5e881aa;p=sfa.git diff --git a/sfa/managers/slice_manager_pl.py b/sfa/managers/slice_manager_pl.py index 03bd9941..c6531d29 100644 --- a/sfa/managers/slice_manager_pl.py +++ b/sfa/managers/slice_manager_pl.py @@ -1,6 +1,4 @@ -### $Id: slices.py 15842 2009-11-22 09:56:13Z anil $ -### $URL: https://svn.planet-lab.org/svn/sfa/trunk/sfa/plc/slices.py $ - +# import sys import time,datetime from StringIO import StringIO @@ -11,7 +9,8 @@ from lxml import etree from sfa.util.sfalogging import sfa_logger from sfa.util.rspecHelper import merge_rspecs -from sfa.util.namespace import * +from sfa.util.xrn import Xrn, urn_to_hrn, hrn_to_urn +from sfa.util.plxrn import hrn_to_pl_slicename from sfa.util.rspec import * from sfa.util.specdict import * from sfa.util.faults import * @@ -23,21 +22,42 @@ from sfa.trust.credential import Credential from sfa.util.threadmanager import ThreadManager import sfa.util.xmlrpcprotocol as xmlrpcprotocol import sfa.plc.peers as peers +from sfa.util.version import version_core +from sfa.util.callids import Callids + +# XX FIX ME: should merge result from multiple aggregates instead of +# calling aggregate implementation +from sfa.managers.aggregate_manager_pl import slice_status + +# we have specialized xmlrpclib.ServerProxy to remember the input url +# OTOH it's not clear if we're only dealing with XMLRPCServerProxy instances +def get_serverproxy_url (server): + try: + return server.url + except: + sfa_logger().warning("GetVersion, falling back to xmlrpclib.ServerProxy internals") + return server._ServerProxy__host + server._ServerProxy__handler + +def GetVersion(api): + # peers explicitly in aggregates.xml + peers =dict ([ (peername,get_serverproxy_url(v)) for (peername,v) in api.aggregates.iteritems() + if peername != api.hrn]) + xrn=Xrn (api.hrn) + sm_version=version_core({'interface':'slicemgr', + 'hrn' : xrn.get_hrn(), + 'urn' : xrn.get_urn(), + 'peers': peers, + }) + # local aggregate if present needs to have localhost resolved + if api.hrn in api.aggregates: + local_am_url=get_serverproxy_url(api.aggregates[api.hrn]) + sm_version['peers'][api.hrn]=local_am_url.replace('localhost',sm_version['hostname']) + return sm_version + +def CreateSliver(api, xrn, creds, rspec, users, call_id): + + if Callids().already_handled(call_id): return "" -def get_version(): - version = {} - version['geni_api'] = 1 - version['sfa'] = 1 - return version - -def slice_status(api, slice_xrn, creds ): - result = {} - result['geni_urn'] = slice_xrn - result['geni_status'] = 'unknown' - result['geni_resources'] = {} - return result - -def create_slice(api, xrn, creds, rspec, users): hrn, type = urn_to_hrn(xrn) # Validate the RSpec against PlanetLab's schema --disabled for now @@ -76,7 +96,7 @@ def create_slice(api, xrn, creds, rspec, users): # Just send entire RSpec to each aggregate server = api.aggregates[aggregate] - threads.run(server.CreateSliver, xrn, credential, rspec, users) + threads.run(server.CreateSliver, xrn, credential, rspec, users, call_id) results = threads.get_results() merged_rspec = merge_rspecs(results) @@ -86,7 +106,7 @@ def renew_slice(api, xrn, creds, expiration_time): hrn, type = urn_to_hrn(xrn) # get the callers hrn - valid_cred = api.auth.checkCredentials(creds, 'renewesliver', hrn)[0] + valid_cred = api.auth.checkCredentials(creds, 'renewsliver', hrn)[0] caller_hrn = Credential(string=valid_cred).get_gid_caller().get_hrn() # attempt to use delegated credential first @@ -101,7 +121,7 @@ def renew_slice(api, xrn, creds, expiration_time): continue server = api.aggregates[aggregate] - threads.run(server.RenewSliver, xrn, credential, expiration_time) + threads.run(server.RenewSliver, xrn, [credential], expiration_time) threads.get_results() return 1 @@ -124,7 +144,7 @@ def get_ticket(api, xrn, creds, rspec, users): if not credential: credential = api.getCredential() threads = ThreadManager() - for aggregate, aggregate_rspec in aggregate_rspecs.items(): + for (aggregate, aggregate_rspec) in aggregate_rspecs.iteritems(): # prevent infinite loop. Dont send request back to caller # unless the caller is the aggregate's SM if caller_hrn == aggregate and aggregate != api.hrn: @@ -304,27 +324,35 @@ def get_slices(api, creds): api.cache.add('slices', slices) return slices - -def get_rspec(api, creds, options): - + + +# Thierry : caching at the slicemgr level makes sense to some extent +caching=True +#caching=False +def ListResources(api, creds, options, call_id): + + if Callids().already_handled(call_id): + api.logger.info("%d received ListResources with known call_id %s"%(api.interface,call_id)) + return "" + # get slice's hrn from options - xrn = options.get('geni_slice_urn', None) - hrn, type = urn_to_hrn(xrn) + xrn = options.get('geni_slice_urn', '') + (hrn, type) = urn_to_hrn(xrn) # get hrn of the original caller origin_hrn = options.get('origin_hrn', None) if not origin_hrn: - origin_hrn = Credential(string=creds[0]).get_gid_caller().get_hrn() + if isinstance(creds, list): + origin_hrn = Credential(string=creds[0]).get_gid_caller().get_hrn() + else: + origin_hrn = Credential(string=creds).get_gid_caller().get_hrn() # look in cache first - if api.cache and not xrn: + if caching and api.cache and not xrn: rspec = api.cache.get('nodes') if rspec: return rspec - hrn, type = urn_to_hrn(xrn) - rspec = None - # get the callers hrn valid_cred = api.auth.checkCredentials(creds, 'listnodes', hrn)[0] caller_hrn = Credential(string=valid_cred).get_gid_caller().get_hrn() @@ -343,41 +371,23 @@ def get_rspec(api, creds, options): server = api.aggregates[aggregate] my_opts = copy(options) my_opts['geni_compressed'] = False - threads.run(server.ListResources, credential, my_opts) + threads.run(server.ListResources, credential, my_opts, call_id) #threads.run(server.get_resources, cred, xrn, origin_hrn) results = threads.get_results() - # combine the rspecs into a single rspec - for agg_rspec in results: - try: - tree = etree.parse(StringIO(agg_rspec)) - except etree.XMLSyntaxError: - message = str(agg_rspec) + ": " + str(sys.exc_info()[1]) - raise InvalidRSpec(message) + merged_rspec = merge_rspecs(results) - root = tree.getroot() - if root.get("type") in ["SFA"]: - if rspec == None: - rspec = root - else: - for network in root.iterfind("./network"): - rspec.append(deepcopy(network)) - for request in root.iterfind("./request"): - rspec.append(deepcopy(request)) - - sfa_logger().debug('get_rspec: rspec=%r'%rspec) - rspec = etree.tostring(rspec, xml_declaration=True, pretty_print=True) # cache the result - if api.cache and not xrn: - api.cache.add('nodes', rspec) + if caching and api.cache and not xrn: + api.cache.add('nodes', merged_rspec) - return rspec + return merged_rspec def main(): r = RSpec() r.parseFile(sys.argv[1]) rspec = r.toDict() - create_slice(None,'plc.princeton.tmacktestslice',rspec) + CreateSliver(None,'plc.princeton.tmacktestslice',rspec,'create-slice-tmacktestslice') if __name__ == "__main__": main()