-### $Id: slices.py 15842 2009-11-22 09:56:13Z anil $
-### $URL: https://svn.planet-lab.org/svn/sfa/trunk/sfa/plc/slices.py $
-
+#
import sys
import time,datetime
from StringIO import StringIO
from sfa.util.sfalogging import sfa_logger
from sfa.util.rspecHelper import merge_rspecs
-from sfa.util.xrn import urn_to_hrn, hrn_to_urn
+from sfa.util.xrn import Xrn, urn_to_hrn, hrn_to_urn
+from sfa.util.plxrn import hrn_to_pl_slicename
from sfa.util.rspec import *
from sfa.util.specdict import *
from sfa.util.faults import *
from sfa.util.threadmanager import ThreadManager
import sfa.util.xmlrpcprotocol as xmlrpcprotocol
import sfa.plc.peers as peers
+from sfa.util.version import version_core
+from sfa.util.callids import Callids
+
+# XX FIX ME: should merge result from multiple aggregates instead of
+# calling aggregate implementation
+from sfa.managers.aggregate_manager_pl import slice_status
+
+# we have specialized xmlrpclib.ServerProxy to remember the input url
+# OTOH it's not clear if we're only dealing with XMLRPCServerProxy instances
+def get_serverproxy_url (server):
+ try:
+ return server.url
+ except:
+ sfa_logger().warning("GetVersion, falling back to xmlrpclib.ServerProxy internals")
+ return server._ServerProxy__host + server._ServerProxy__handler
+
+def GetVersion(api):
+ # peers explicitly in aggregates.xml
+ peers =dict ([ (peername,get_serverproxy_url(v)) for (peername,v) in api.aggregates.iteritems()
+ if peername != api.hrn])
+ xrn=Xrn (api.hrn)
+ sm_version=version_core({'interface':'slicemgr',
+ 'hrn' : xrn.get_hrn(),
+ 'urn' : xrn.get_urn(),
+ 'peers': peers,
+ })
+ # local aggregate if present needs to have localhost resolved
+ if api.hrn in api.aggregates:
+ local_am_url=get_serverproxy_url(api.aggregates[api.hrn])
+ sm_version['peers'][api.hrn]=local_am_url.replace('localhost',sm_version['hostname'])
+ return sm_version
+
+def CreateSliver(api, xrn, creds, rspec, users, call_id):
+
+ if Callids().already_handled(call_id): return ""
-def get_version():
- version = {}
- version['geni_api'] = 1
- version['sfa'] = 1
- return version
-
-def slice_status(api, slice_xrn, creds ):
- hrn, type = urn_to_hrn(slice_xrn)
- # find out where this slice is currently running
- api.logger.info(hrn)
- slicename = hrn_to_pl_slicename(hrn)
- api.logger.info("Checking status for %s" % slicename)
- slices = api.plshell.GetSlices(api.plauth, [slicename], ['node_ids','person_ids','name','expires'])
- if len(slices) == 0:
- raise Exception("Slice %s not found (used %s as slicename internally)" % (slice_xrn, slicename))
- slice = slices[0]
-
- nodes = api.plshell.GetNodes(api.plauth, slice['node_ids'],
- ['hostname', 'boot_state', 'last_contact'])
- api.logger.info(slice)
- api.logger.info(nodes)
-
- result = {}
- result['geni_urn'] = slice_xrn
- result['geni_status'] = 'unknown'
- result['pl_login'] = slice['name']
- result['pl_expires'] = slice['expires']
-
- resources = []
-
- for node in nodes:
- res = {}
- res['pl_hostname'] = node['hostname']
- res['pl_boot_state'] = node['boot_state']
- res['pl_last_contact'] = node['last_contact']
- res['geni_urn'] = ''
- res['geni_status'] = 'unknown'
- res['geni_error'] = ''
-
- resources.append(res)
-
- result['geni_resources'] = resources
- return result
-
-def create_slice(api, xrn, creds, rspec, users):
hrn, type = urn_to_hrn(xrn)
# Validate the RSpec against PlanetLab's schema --disabled for now
# Just send entire RSpec to each aggregate
server = api.aggregates[aggregate]
- threads.run(server.CreateSliver, xrn, credential, rspec, users)
+ threads.run(server.CreateSliver, xrn, credential, rspec, users, call_id)
results = threads.get_results()
merged_rspec = merge_rspecs(results)
hrn, type = urn_to_hrn(xrn)
# get the callers hrn
- valid_cred = api.auth.checkCredentials(creds, 'renewesliver', hrn)[0]
+ valid_cred = api.auth.checkCredentials(creds, 'renewsliver', hrn)[0]
caller_hrn = Credential(string=valid_cred).get_gid_caller().get_hrn()
# attempt to use delegated credential first
continue
server = api.aggregates[aggregate]
- threads.run(server.RenewSliver, xrn, credential, expiration_time)
+ threads.run(server.RenewSliver, xrn, [credential], expiration_time)
threads.get_results()
return 1
if not credential:
credential = api.getCredential()
threads = ThreadManager()
- for aggregate, aggregate_rspec in aggregate_rspecs.items():
+ for (aggregate, aggregate_rspec) in aggregate_rspecs.iteritems():
# prevent infinite loop. Dont send request back to caller
# unless the caller is the aggregate's SM
if caller_hrn == aggregate and aggregate != api.hrn:
api.cache.add('slices', slices)
return slices
-
-def get_rspec(api, creds, options):
-
+
+
+# Thierry : caching at the slicemgr level makes sense to some extent
+caching=True
+#caching=False
+def ListResources(api, creds, options, call_id):
+
+ if Callids().already_handled(call_id):
+ api.logger.info("%d received ListResources with known call_id %s"%(api.interface,call_id))
+ return ""
+
# get slice's hrn from options
xrn = options.get('geni_slice_urn', '')
- hrn, type = urn_to_hrn(xrn)
+ (hrn, type) = urn_to_hrn(xrn)
# get hrn of the original caller
origin_hrn = options.get('origin_hrn', None)
if not origin_hrn:
- origin_hrn = Credential(string=creds[0]).get_gid_caller().get_hrn()
+ if isinstance(creds, list):
+ origin_hrn = Credential(string=creds[0]).get_gid_caller().get_hrn()
+ else:
+ origin_hrn = Credential(string=creds).get_gid_caller().get_hrn()
# look in cache first
- if api.cache and not xrn:
+ if caching and api.cache and not xrn:
rspec = api.cache.get('nodes')
if rspec:
return rspec
- hrn, type = urn_to_hrn(xrn)
- rspec = None
-
# get the callers hrn
valid_cred = api.auth.checkCredentials(creds, 'listnodes', hrn)[0]
caller_hrn = Credential(string=valid_cred).get_gid_caller().get_hrn()
server = api.aggregates[aggregate]
my_opts = copy(options)
my_opts['geni_compressed'] = False
- threads.run(server.ListResources, credential, my_opts)
+ threads.run(server.ListResources, credential, my_opts, call_id)
#threads.run(server.get_resources, cred, xrn, origin_hrn)
results = threads.get_results()
- # combine the rspecs into a single rspec
- for agg_rspec in results:
- try:
- tree = etree.parse(StringIO(agg_rspec))
- except etree.XMLSyntaxError:
- message = str(agg_rspec) + ": " + str(sys.exc_info()[1])
- raise InvalidRSpec(message)
+ merged_rspec = merge_rspecs(results)
- root = tree.getroot()
- if root.get("type") in ["SFA"]:
- if rspec == None:
- rspec = root
- else:
- for network in root.iterfind("./network"):
- rspec.append(deepcopy(network))
- for request in root.iterfind("./request"):
- rspec.append(deepcopy(request))
-
- sfa_logger().debug('get_rspec: rspec=%r'%rspec)
- rspec = etree.tostring(rspec, xml_declaration=True, pretty_print=True)
# cache the result
- if api.cache and not xrn:
- api.cache.add('nodes', rspec)
+ if caching and api.cache and not xrn:
+ api.cache.add('nodes', merged_rspec)
- return rspec
+ return merged_rspec
def main():
r = RSpec()
r.parseFile(sys.argv[1])
rspec = r.toDict()
- create_slice(None,'plc.princeton.tmacktestslice',rspec)
+ CreateSliver(None,'plc.princeton.tmacktestslice',rspec,'create-slice-tmacktestslice')
if __name__ == "__main__":
main()