+$(RSYNC) --relative ./sfa/ --exclude migrations $(SSHURL)/usr/lib\*/python2.\*/site-packages/
synclibdeb: synccheck
+$(RSYNC) --relative ./sfa/ --exclude migrations $(SSHURL)/usr/share/pyshared/
+syncmigrations:
+ +$(RSYNC) ./sfa/storage/migrations/versions/*.py $(SSHURL)/usr/share/sfa/migrations/versions/
syncbin: synccheck
+$(RSYNC) $(BINS) $(SSHURL)/usr/bin/
syncinit: synccheck
if keys: print tab * (counter) + "(children: %s)" % (",".join(keys))
+#
+# this code probably is obsolete
+# RSpec is not imported, it does not have a toDict() method anyway
+# plus, getNodes.py is not exposed in packaging
+#
def main():
parser = create_parser();
(options, args) = parser.parse_args()
if not options.infile:
print "RSpec file not specified"
return
-
+
rspec = RSpec()
try:
rspec.parseFile(options.infile)
%define name sfa
%define version 3.1
-%define taglevel 15
+%define taglevel 17
%define release %{taglevel}%{?pldistro:.%{pldistro}}%{?date:.%{date}}
%global python_sitearch %( python -c "from distutils.sysconfig import get_python_lib; print get_python_lib(1)" )
#[ "$1" -ge "1" ] && service sfa-cm restart || :
%changelog
+* Fri Jun 05 2015 Thierry Parmentelat <thierry.parmentelat@sophia.inria.fr> - sfa-3.1-17
+- workaround for 'name' not being exposed properly by List() on authority objects
+- fix a corner case in PL importer
+- trashed module registry_manager_openstack
+
+* Thu Jun 04 2015 Thierry Parmentelat <thierry.parmentelat@sophia.inria.fr> - sfa-3.1-16
+- added a new builtin column 'name' for authorities in the sfa registry
+- this is kept in sync with MyPLC's site names when relevant
+- sfa update -t authority thus now has a new -n/--name option
+- sfi register or update can specify record type on only 2 characters (au, us, no, or sl)
+- reviewed Describe and Allocate wrt slice tags for a PL AM:
+- Describe now exposes all slice tags with a 'scope' being 'sliver' or 'slice'
+- Allocate now by default ignores incoming slice tags
+- Allocate's options can mention 'pltags' among 'ignore', 'append', 'sync'
+- default being 'ignore'
+- in 'ignore' mode, slice tags are unchanged in the PL db
+- in 'append' mode, slice tags from the rspec are added to the db unless
+- they are already present
+- in 'sync' mode, the code attempts to leave the PL db in sync with the tags
+- provided in rspec; this can be dangerous and is thus no longer the default
+- behaviour
+
* Thu Apr 23 2015 Thierry Parmentelat <thierry.parmentelat@sophia.inria.fr> - sfa-3.1-15
- major rework of the iotlab driver, that uses an IoT-lab REST API
- and so does not need to interact with LDAP and OAR directly
print "%s (%s)" % (record['hrn'], record['type'])
def terminal_render_user (record, options):
print "%s (User)"%record['hrn'],
- if record.get('reg-pi-authorities',None): print " [PI at %s]"%(" and ".join(record['reg-pi-authorities'])),
- if record.get('reg-slices',None): print " [IN slices %s]"%(" and ".join(record['reg-slices'])),
+ if options.verbose and record.get('email', None):
+ print "email='{}'".format(record['email']),
+ if record.get('reg-pi-authorities', None):
+ print " [PI at %s]"%(" and ".join(record['reg-pi-authorities'])),
+ if record.get('reg-slices', None):
+ print " [IN slices %s]"%(" and ".join(record['reg-slices'])),
user_keys=record.get('reg-keys',[])
if not options.verbose:
print " [has %s]"%(terminal_render_plural(len(user_keys),"key"))
def terminal_render_slice (record, options):
print "%s (Slice)"%record['hrn'],
- if record.get('reg-researchers',None): print " [USERS %s]"%(" and ".join(record['reg-researchers'])),
+ if record.get('reg-researchers', None):
+ print " [USERS %s]"%(" and ".join(record['reg-researchers'])),
# print record.keys()
print ""
def terminal_render_authority (record, options):
print "%s (Authority)"%record['hrn'],
- if record.get('reg-pis',None): print " [PIS %s]"%(" and ".join(record['reg-pis'])),
+ if options.verbose and record.get('name'):
+ print "name='{}'".format(record['name'])
+ if record.get('reg-pis', None):
+ print " [PIS %s]"%(" and ".join(record['reg-pis'])),
print ""
def terminal_render_node (record, options):
print "%s (Node)"%record['hrn']
### used in sfi list
-def terminal_render (records,options):
+def terminal_render (records, options):
# sort records by type
- grouped_by_type={}
+ grouped_by_type = {}
for record in records:
- type=record['type']
- if type not in grouped_by_type: grouped_by_type[type]=[]
+ type = record['type']
+ if type not in grouped_by_type:
+ grouped_by_type[type]=[]
grouped_by_type[type].append(record)
- group_types=grouped_by_type.keys()
+ group_types = grouped_by_type.keys()
group_types.sort()
for type in group_types:
- group=grouped_by_type[type]
+ group = grouped_by_type[type]
# print 20 * '-', type
- try: renderer=eval('terminal_render_'+type)
- except: renderer=terminal_render_default
- for record in group: renderer(record,options)
-
+ try: renderer = eval('terminal_render_' + type)
+ except: renderer = terminal_render_default
+ for record in group:
+ renderer(record, options)
####################
def filter_records(type, records):
from sfa.client.candidates import Candidates
from sfa.client.manifolduploader import ManifoldUploader
-CM_PORT=12346
+CM_PORT = 12346
+DEFAULT_RSPEC_VERSION = "GENI 3"
from sfa.client.common import optparse_listvalue_callback, optparse_dictvalue_callback, \
terminal_render, filter_records
record.dump(sort=True)
else:
info = record.getdict()
- print "%s (%s)" % (info['hrn'], info['type'])
+ print "{} ({})".format(info['hrn'], info['type'])
return
result += credential.pretty_cred()
result += "\n"
rights = credential.get_privileges()
- result += "type=%s\n" % credential.type
- result += "version=%s\n" % credential.version
- result += "rights=%s\n" % rights
+ result += "type={}\n".format(credential.type)
+ result += "version={}\n".format(credential.version)
+ result += "rights={}\n".format(rights)
return result
def show_credentials (cred_s):
if not isinstance (cred_s,list): cred_s = [cred_s]
for cred in cred_s:
- print "Using Credential %s"%credential_printable(cred)
+ print "Using Credential {}".format(credential_printable(cred))
-# save methods
-def save_raw_to_file(var, filename, format="text", banner=None):
- if filename == "-":
- # if filename is "-", send it to stdout
- f = sys.stdout
+########## save methods
+
+### raw
+def save_raw_to_file(var, filename, format='text', banner=None):
+ if filename == '-':
+ _save_raw_to_file(var, sys.stdout, format, banner)
else:
- f = open(filename, "w")
- if banner:
- f.write(banner+"\n")
+ with open(filename, w) as fileobj:
+ _save_raw_to_file(var, fileobj, format, banner)
+ print "(Over)wrote {}".format(filename)
+
+def _save_raw_to_file(var, f, format, banner):
if format == "text":
- f.write(str(var))
+ if banner: f.write(banner+"\n")
+ f.write("{}".format(var))
+ if banner: f.write('\n'+banner+"\n")
elif format == "pickled":
f.write(pickle.dumps(var))
elif format == "json":
- if hasattr(json, "dumps"):
- f.write(json.dumps(var)) # python 2.6
- else:
- f.write(json.write(var)) # python 2.5
+ f.write(json.dumps(var)) # python 2.6
else:
# this should never happen
print "unknown output format", format
- if banner:
- f.write('\n'+banner+"\n")
+###
def save_rspec_to_file(rspec, filename):
if not filename.endswith(".rspec"):
filename = filename + ".rspec"
- f = open(filename, 'w')
- f.write("%s"%rspec)
- f.close()
- return
+ with open(filename, 'w') as f:
+ f.write("{}".format(rspec))
+ print "(Over)wrote {}".format(filename)
+
+def save_record_to_file(filename, record_dict):
+ record = Record(dict=record_dict)
+ xml = record.save_as_xml()
+ with codecs.open(filename, encoding='utf-8',mode="w") as f:
+ f.write(xml)
+ print "(Over)wrote {}".format(filename)
def save_records_to_file(filename, record_dicts, format="xml"):
if format == "xml":
- index = 0
- for record_dict in record_dicts:
- if index > 0:
- save_record_to_file(filename + "." + str(index), record_dict)
- else:
- save_record_to_file(filename, record_dict)
- index = index + 1
+ for index, record_dict in enumerate(record_dicts):
+ save_record_to_file(filename + "." + str(index), record_dict)
elif format == "xmllist":
- f = open(filename, "w")
- f.write("<recordlist>\n")
- for record_dict in record_dicts:
- record_obj=Record(dict=record_dict)
- f.write('<record hrn="' + record_obj.hrn + '" type="' + record_obj.type + '" />\n')
- f.write("</recordlist>\n")
- f.close()
+ with open(filename, "w") as f:
+ f.write("<recordlist>\n")
+ for record_dict in record_dicts:
+ record_obj = Record(dict=record_dict)
+ f.write('<record hrn="' + record_obj.hrn + '" type="' + record_obj.type + '" />\n')
+ f.write("</recordlist>\n")
+ print "(Over)wrote {}".format(filename)
+
elif format == "hrnlist":
- f = open(filename, "w")
- for record_dict in record_dicts:
- record_obj=Record(dict=record_dict)
- f.write(record_obj.hrn + "\n")
- f.close()
+ with open(filename, "w") as f:
+ for record_dict in record_dicts:
+ record_obj = Record(dict=record_dict)
+ f.write(record_obj.hrn + "\n")
+ print "(Over)wrote {}".format(filename)
+
else:
# this should never happen
print "unknown output format", format
-def save_record_to_file(filename, record_dict):
- record = Record(dict=record_dict)
- xml = record.save_as_xml()
- f=codecs.open(filename, encoding='utf-8',mode="w")
- f.write(xml)
- f.close()
- return
-
# minimally check a key argument
def check_ssh_key (key):
good_ssh_key = r'^.*(?:ssh-dss|ssh-rsa)[ ]+[A-Za-z0-9+/=]+(?: .*)?$'
return re.match(good_ssh_key, key, re.IGNORECASE)
# load methods
+def normalize_type (type):
+ if type.startswith('au'):
+ return 'authority'
+ elif type.startswith('us'):
+ return 'user'
+ elif type.startswith('sl'):
+ return 'slice'
+ elif type.startswith('no'):
+ return 'node'
+ elif type.startswith('ag'):
+ return 'aggregate'
+ elif type.startswith('al'):
+ return 'all'
+ else:
+ print 'unknown type {} - should start with one of au|us|sl|no|ag|al'.format(type)
+ return None
+
def load_record_from_opts(options):
record_dict = {}
if hasattr(options, 'xrn') and options.xrn:
record_dict['reg-researchers'] = options.reg_researchers
if hasattr(options, 'email') and options.email:
record_dict['email'] = options.email
+ # authorities can have a name for standalone deployment
+ if hasattr(options, 'name') and options.name:
+ record_dict['name'] = options.name
if hasattr(options, 'reg_pis') and options.reg_pis:
record_dict['reg-pis'] = options.reg_pis
return Record(dict=record_dict)
def load_record_from_file(filename):
- f=codecs.open(filename, encoding="utf-8", mode="r")
- xml_string = f.read()
- f.close()
- return Record(xml=xml_string)
-
+ with codecs.open(filename, encoding="utf-8", mode="r") as f:
+ xml_str = f.read()
+ return Record(xml=xml_str)
import uuid
def unique_call_id(): return uuid.uuid4().urn
format3offset=47
line=80*'-'
if not verbose:
- print format3%("command","cmd_args","description")
+ print format3%("command", "cmd_args", "description")
print line
else:
print line
if verbose:
print line
if command==canonical:
- doc=doc.replace("\n","\n"+format3offset*' ')
- print format3%(command,args_string,doc)
+ doc = doc.replace("\n", "\n" + format3offset * ' ')
+ print format3 % (command,args_string,doc)
if verbose:
self.create_parser_command(command).print_help()
else:
- print format3%(command,"<<alias for %s>>"%canonical,"")
+ print format3 % (command,"<<alias for %s>>"%canonical,"")
### now if a known command was found we can be more verbose on that one
def print_help (self):
print "==================== Generic sfi usage"
self.sfi_parser.print_help()
- (doc,_,example,canonical)=commands_dict[self.command]
+ (doc, _, example, canonical) = commands_dict[self.command]
if canonical != self.command:
- print "\n==================== NOTE: %s is an alias for genuine %s"%(self.command,canonical)
- self.command=canonical
- print "\n==================== Purpose of %s"%self.command
+ print "\n==================== NOTE: {} is an alias for genuine {}"\
+ .format(self.command, canonical)
+ self.command = canonical
+ print "\n==================== Purpose of {}".format(self.command)
print doc
- print "\n==================== Specific usage for %s"%self.command
+ print "\n==================== Specific usage for {}".format(self.command)
self.command_parser.print_help()
if example:
- print "\n==================== %s example(s)"%self.command
+ print "\n==================== {} example(s)".format(self.command)
print example
def create_parser_global(self):
# Generate command line parser
parser = OptionParser(add_help_option=False,
usage="sfi [sfi_options] command [cmd_options] [cmd_args]",
- description="Commands: %s"%(" ".join(commands_list)))
+ description="Commands: {}".format(" ".join(commands_list)))
parser.add_option("-r", "--registry", dest="registry",
help="root registry", metavar="URL", default=None)
parser.add_option("-s", "--sliceapi", dest="sm", default=None, metavar="URL",
(_, args_string, __,canonical) = commands_dict[command]
parser = OptionParser(add_help_option=False,
- usage="sfi [sfi_options] %s [cmd_options] %s"
- % (command, args_string))
+ usage="sfi [sfi_options] {} [cmd_options] {}"\
+ .format(command, args_string))
parser.add_option ("-h","--help",dest='help',action='store_true',default=False,
help="Summary of one command usage")
if canonical in ("register", "update"):
parser.add_option('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn (mandatory)')
- parser.add_option('-t', '--type', dest='type', metavar='<type>', help='object type', default=None)
+ parser.add_option('-t', '--type', dest='type', metavar='<type>', help='object type (2 first chars is enough)', default=None)
parser.add_option('-e', '--email', dest='email', default="", help="email (mandatory for users)")
+ parser.add_option('-n', '--name', dest='name', default="", help="name (optional for authorities)")
parser.add_option('-k', '--key', dest='key', metavar='<key>', help='public key string or file',
default=None)
parser.add_option('-s', '--slices', dest='slices', metavar='<slices>', help='Set/replace slice xrns',
help="renew as long as possible")
# registy filter option
if canonical in ("list", "show", "remove"):
- parser.add_option("-t", "--type", dest="type", type="choice",
- help="type filter ([all]|user|slice|authority|node|aggregate)",
- choices=("all", "user", "slice", "authority", "node", "aggregate"),
- default="all")
+ parser.add_option("-t", "--type", dest="type", metavar="<type>",
+ default="all",
+ help="type filter - 2 first chars is enough ([all]|user|slice|authority|node|aggregate)")
if canonical in ("show"):
parser.add_option("-k","--key",dest="keys",action="append",default=[],
help="specify specific keys to be displayed from record")
help="call Resolve without the 'details' option")
if canonical in ("resources", "describe"):
# rspec version
- parser.add_option("-r", "--rspec-version", dest="rspec_version", default="GENI 3",
- help="schema type and version of resulting RSpec")
+ parser.add_option("-r", "--rspec-version", dest="rspec_version", default=DEFAULT_RSPEC_VERSION,
+ help="schema type and version of resulting RSpec (default:{})".format(DEFAULT_RSPEC_VERSION))
# disable/enable cached rspecs
parser.add_option("-c", "--current", dest="current", default=False,
action="store_true",
#panos: a new option to define the type of information about resources a user is interested in
parser.add_option("-i", "--info", dest="info",
help="optional component information", default=None)
- # a new option to retreive or not reservation-oriented RSpecs (leases)
+ # a new option to retrieve or not reservation-oriented RSpecs (leases)
parser.add_option("-l", "--list_leases", dest="list_leases", type="choice",
- help="Retreive or not reservation-oriented RSpecs ([resources]|leases|all )",
+ help="Retrieve or not reservation-oriented RSpecs ([resources]|leases|all)",
choices=("all", "resources", "leases"), default="resources")
(doc, args_string, example, canonical) = commands_dict[command]
method=getattr(self, canonical, None)
if not method:
- print "sfi: unknown command %s"%command
- raise SystemExit,"Unknown command %s"%command
+ print "sfi: unknown command {}".format(command)
+ raise SystemExit("Unknown command {}".format(command))
+ for arg in command_args:
+ if 'help' in arg or arg == '-h':
+ self.print_help()
+ sys.exit(1)
return method(command_options, command_args)
def main(self):
sys.exit(1)
self.command_options = command_options
+ # allow incoming types on 2 characters only
+ if hasattr(command_options, 'type'):
+ command_options.type = normalize_type(command_options.type)
+ if not command_options.type:
+ sys.exit(1)
+
self.read_config ()
self.bootstrap ()
- self.logger.debug("Command=%s" % self.command)
+ self.logger.debug("Command={}".format(self.command))
try:
retcod = self.dispatch(command, command_options, command_args)
except SystemExit:
return 1
except:
- self.logger.log_exc ("sfi command %s failed"%command)
+ self.logger.log_exc ("sfi command {} failed".format(command))
return 1
return retcod
config.save(config_file)
except:
- self.logger.critical("Failed to read configuration file %s"%config_file)
+ self.logger.critical("Failed to read configuration file {}".format(config_file))
self.logger.info("Make sure to remove the export clauses and to add quotes")
if self.options.verbose==0:
self.logger.info("Re-run with -v for more details")
else:
- self.logger.log_exc("Could not read config file %s"%config_file)
+ self.logger.log_exc("Could not read config file {}".format(config_file))
sys.exit(1)
self.config_instance=config
elif hasattr(config, "SFI_SM"):
self.sm_url = config.SFI_SM
else:
- self.logger.error("You need to set e.g. SFI_SM='http://your.slicemanager.url:12347/' in %s" % config_file)
+ self.logger.error("You need to set e.g. SFI_SM='http://your.slicemanager.url:12347/' in {}".format(config_file))
errors += 1
# Set Registry URL
elif hasattr(config, "SFI_REGISTRY"):
self.reg_url = config.SFI_REGISTRY
else:
- self.logger.error("You need to set e.g. SFI_REGISTRY='http://your.registry.url:12345/' in %s" % config_file)
+ self.logger.error("You need to set e.g. SFI_REGISTRY='http://your.registry.url:12345/' in {}".format(config_file))
errors += 1
# Set user HRN
elif hasattr(config, "SFI_USER"):
self.user = config.SFI_USER
else:
- self.logger.error("You need to set e.g. SFI_USER='plc.princeton.username' in %s" % config_file)
+ self.logger.error("You need to set e.g. SFI_USER='plc.princeton.username' in {}".format(config_file))
errors += 1
# Set authority HRN
elif hasattr(config, "SFI_AUTH"):
self.authority = config.SFI_AUTH
else:
- self.logger.error("You need to set e.g. SFI_AUTH='plc.princeton' in %s" % config_file)
+ self.logger.error("You need to set e.g. SFI_AUTH='plc.princeton' in {}".format(config_file))
errors += 1
self.config_file=config_file
if not os.path.isfile(client_bootstrap.private_key_filename()):
self.logger.info ("private key not found, trying legacy name")
try:
- legacy_private_key = os.path.join (self.options.sfi_dir, "%s.pkey"%Xrn.unescape(get_leaf(self.user)))
- self.logger.debug("legacy_private_key=%s"%legacy_private_key)
+ legacy_private_key = os.path.join (self.options.sfi_dir, "{}.pkey"
+ .format(Xrn.unescape(get_leaf(self.user))))
+ self.logger.debug("legacy_private_key={}"
+ .format(legacy_private_key))
client_bootstrap.init_private_key_if_missing (legacy_private_key)
- self.logger.info("Copied private key from legacy location %s"%legacy_private_key)
+ self.logger.info("Copied private key from legacy location {}"
+ .format(legacy_private_key))
except:
self.logger.log_exc("Can't find private key ")
sys.exit(1)
object_hrn = object_gid.get_hrn()
if not object_cred.get_privileges().get_all_delegate():
- self.logger.error("Object credential %s does not have delegate bit set"%object_hrn)
+ self.logger.error("Object credential {} does not have delegate bit set"
+ .format(object_hrn))
return
# the delegating user's gid
def registry (self):
# cache the result
if not hasattr (self, 'registry_proxy'):
- self.logger.info("Contacting Registry at: %s"%self.reg_url)
- self.registry_proxy = SfaServerProxy(self.reg_url, self.private_key, self.my_gid,
- timeout=self.options.timeout, verbose=self.options.debug)
+ self.logger.info("Contacting Registry at: {}".format(self.reg_url))
+ self.registry_proxy \
+ = SfaServerProxy(self.reg_url, self.private_key, self.my_gid,
+ timeout=self.options.timeout, verbose=self.options.debug)
return self.registry_proxy
def sliceapi (self):
records = self.registry().Resolve(node_hrn, self.my_credential_string)
records = filter_records('node', records)
if not records:
- self.logger.warning("No such component:%r"% opts.component)
+ self.logger.warning("No such component:{}".format(opts.component))
record = records[0]
- cm_url = "http://%s:%d/"%(record['hostname'],CM_PORT)
+ cm_url = "http://{}:{}/".format(record['hostname'], CM_PORT)
self.sliceapi_proxy=SfaServerProxy(cm_url, self.private_key, self.my_gid)
else:
# otherwise use what was provided as --sliceapi, or SFI_SM in the config
if not self.sm_url.startswith('http://') or self.sm_url.startswith('https://'):
self.sm_url = 'http://' + self.sm_url
- self.logger.info("Contacting Slice Manager at: %s"%self.sm_url)
- self.sliceapi_proxy = SfaServerProxy(self.sm_url, self.private_key, self.my_gid,
- timeout=self.options.timeout, verbose=self.options.debug)
+ self.logger.info("Contacting Slice Manager at: {}".format(self.sm_url))
+ self.sliceapi_proxy \
+ = SfaServerProxy(self.sm_url, self.private_key, self.my_gid,
+ timeout=self.options.timeout, verbose=self.options.debug)
return self.sliceapi_proxy
def get_cached_server_version(self, server):
cache = Cache(cache_file)
except IOError:
cache = Cache()
- self.logger.info("Local cache not found at: %s" % cache_file)
+ self.logger.info("Local cache not found at: {}".format(cache_file))
if cache:
version = cache.get(cache_key)
version= ReturnValue.get_value(result)
# cache version for 20 minutes
cache.add(cache_key, version, ttl= 60*20)
- self.logger.info("Updating cache file %s" % cache_file)
+ self.logger.info("Updating cache file {}".format(cache_file))
cache.save_to_file(cache_file)
return version
if (os.path.isfile(file)):
return file
else:
- self.logger.critical("No such rspec file %s"%rspec)
+ self.logger.critical("No such rspec file {}".format(rspec))
sys.exit(1)
def get_record_file(self, record):
if (os.path.isfile(file)):
return file
else:
- self.logger.critical("No such registry record file %s"%record)
+ self.logger.critical("No such registry record file {}".format(record))
sys.exit(1)
@declare_command("","")
def config (self, options, args):
"Display contents of current config"
- print "# From configuration file %s"%self.config_file
+ print "# From configuration file {}".format(self.config_file)
flags=[ ('sfi', [ ('registry','reg_url'),
('auth','authority'),
('user','user'),
flags.append ( ('myslice', ['backend', 'delegate', 'platform', 'username'] ) )
for (section, tuples) in flags:
- print "[%s]"%section
+ print "[{}]".format(section)
try:
for (external_name, internal_name) in tuples:
- print "%-20s = %s"%(external_name,getattr(self,internal_name))
+ print "{:-20} = {}".format(external_name, getattr(self, internal_name))
except:
for name in tuples:
- varname="%s_%s"%(section.upper(),name.upper())
- value=getattr(self.config_instance,varname)
- print "%-20s = %s"%(name,value)
+ varname = "{}_{}".format(section.upper(), name.upper())
+ value = getattr(self.config_instance,varname)
+ print "{:-20} = {}".format(name, value)
# xxx should analyze result
return 0
record_dicts = self.registry().Resolve(hrn, self.my_credential_string, resolve_options)
record_dicts = filter_records(options.type, record_dicts)
if not record_dicts:
- self.logger.error("No record of type %s"% options.type)
+ self.logger.error("No record of type {}".format(options.type))
return
# user has required to focus on some keys
if options.keys:
try:
record_filepath = args[0]
rec_file = self.get_record_file(record_filepath)
- record_dict.update(load_record_from_file(rec_file).todict())
+ record_dict.update(load_record_from_file(rec_file).record_to_dict())
except:
- print "Cannot load record file %s"%record_filepath
+ print "Cannot load record file {}".format(record_filepath)
sys.exit(1)
if options:
- record_dict.update(load_record_from_opts(options).todict())
+ record_dict.update(load_record_from_opts(options).record_to_dict())
# we should have a type by now
if 'type' not in record_dict :
self.print_help()
if len(args) > 0:
record_filepath = args[0]
rec_file = self.get_record_file(record_filepath)
- record_dict.update(load_record_from_file(rec_file).todict())
+ record_dict.update(load_record_from_file(rec_file).record_to_dict())
if options:
- record_dict.update(load_record_from_opts(options).todict())
+ record_dict.update(load_record_from_opts(options).record_to_dict())
# at the very least we need 'type' here
- if 'type' not in record_dict:
+ if 'type' not in record_dict or record_dict['type'] is None:
self.print_help()
sys.exit(1)
elif record_dict['type'] in ['node']:
cred = self.my_authority_credential_string()
else:
- raise "unknown record type" + record_dict['type']
+ raise Exception("unknown record type {}".format(record_dict['type']))
if options.show_credential:
show_credentials(cred)
update = self.registry().Update(record_dict, cred)
# ==================================================================
# show rspec for named slice
- @declare_command("","")
+ @declare_command("","",['discover'])
def resources(self, options, args):
"""
discover available resources (ListResources)
"""
server = self.sliceapi()
server_version = self.get_cached_server_version(server)
+ if len(args) != 2:
+ self.print_help()
+ sys.exit(1)
slice_hrn = args[0]
+ rspec_file = self.get_rspec_file(args[1])
+
slice_urn = Xrn(slice_hrn, type='slice').get_urn()
# credentials
show_credentials(creds)
# rspec
- rspec_file = self.get_rspec_file(args[1])
- rspec = open(rspec_file).read()
api_options = {}
api_options ['call_id'] = unique_call_id()
# users
api_options['sfa_users'] = sfa_users
api_options['geni_users'] = geni_users
- allocate = server.Allocate(slice_urn, creds, rspec, api_options)
+ with open(rspec_file) as rspec:
+ rspec_xml = rspec.read()
+ allocate = server.Allocate(slice_urn, creds, rspec_xml, api_options)
value = ReturnValue.get_value(allocate)
if self.options.raw:
save_raw_to_file(allocate, self.options.raw, self.options.rawformat, self.options.rawbanner)
if options.file:
filename = options.file
else:
- filename = os.sep.join([self.options.sfi_dir, '%s.gid' % target_hrn])
- self.logger.info("writing %s gid to %s" % (target_hrn, filename))
+ filename = os.sep.join([self.options.sfi_dir, '{}.gid'.format(target_hrn)])
+ self.logger.info("writing {} gid to {}".format(target_hrn, filename))
GID(string=gid).save_to_file(filename)
# xxx should analyze result
return 0
to_hrn = args[0]
# support for several delegations in the same call
# so first we gather the things to do
- tuples=[]
+ tuples = []
for slice_hrn in options.delegate_slices:
- message="%s.slice"%slice_hrn
+ message = "{}.slice".format(slice_hrn)
original = self.slice_credential_string(slice_hrn)
tuples.append ( (message, original,) )
if options.delegate_pi:
my_authority=self.authority
- message="%s.pi"%my_authority
+ message = "{}.pi".format(my_authority)
original = self.my_authority_credential_string()
tuples.append ( (message, original,) )
for auth_hrn in options.delegate_auths:
- message="%s.auth"%auth_hrn
- original=self.authority_credential_string(auth_hrn)
+ message = "{}.auth".format(auth_hrn)
+ original = self.authority_credential_string(auth_hrn)
tuples.append ( (message, original, ) )
# if nothing was specified at all at this point, let's assume -u
if not tuples: options.delegate_user=True
# this user cred
if options.delegate_user:
- message="%s.user"%self.user
+ message = "{}.user".format(self.user)
original = self.my_credential_string
tuples.append ( (message, original, ) )
for (message,original) in tuples:
delegated_string = self.client_bootstrap.delegate_credential_string(original, to_hrn, to_type)
delegated_credential = Credential (string=delegated_string)
- filename = os.path.join ( self.options.sfi_dir,
- "%s_for_%s.%s.cred"%(message,to_hrn,to_type))
+ filename = os.path.join(self.options.sfi_dir,
+ "{}_for_{}.{}.cred".format(message, to_hrn, to_type))
delegated_credential.save_to_file(filename, save_parents=True)
- self.logger.info("delegated credential for %s to %s and wrote to %s"%(message,to_hrn,filename))
+ self.logger.info("delegated credential for {} to {} and wrote to {}"
+ .format(message, to_hrn, filename))
####################
@declare_command("","""$ less +/myslice sfi_config
else:
full_key="MYSLICE_" + key.upper()
value=getattr(self.config_instance,full_key,None)
- if value: myslice_dict[key]=value
- else: print "Unsufficient config, missing key %s in [myslice] section of sfi_config"%key
+ if value:
+ myslice_dict[key]=value
+ else:
+ print "Unsufficient config, missing key {} in [myslice] section of sfi_config"\
+ .format(key)
if len(myslice_dict) != len(myslice_keys):
sys.exit(1)
# (b) figure whether we are PI for the authority where we belong
- self.logger.info("Resolving our own id %s"%self.user)
+ self.logger.info("Resolving our own id {}".format(self.user))
my_records=self.registry().Resolve(self.user,self.my_credential_string)
- if len(my_records)!=1: print "Cannot Resolve %s -- exiting"%self.user; sys.exit(1)
- my_record=my_records[0]
+ if len(my_records) != 1:
+ print "Cannot Resolve {} -- exiting".format(self.user)
+ sys.exit(1)
+ my_record = my_records[0]
my_auths_all = my_record['reg-pi-authorities']
- self.logger.info("Found %d authorities that we are PI for"%len(my_auths_all))
- self.logger.debug("They are %s"%(my_auths_all))
+ self.logger.info("Found {} authorities that we are PI for".format(len(my_auths_all)))
+ self.logger.debug("They are {}".format(my_auths_all))
my_auths = my_auths_all
if options.delegate_auths:
my_auths = list(set(my_auths_all).intersection(set(options.delegate_auths)))
- self.logger.debug("Restricted to user-provided auths"%(my_auths))
+ self.logger.debug("Restricted to user-provided auths {}".format(my_auths))
# (c) get the set of slices that we are in
my_slices_all=my_record['reg-slices']
- self.logger.info("Found %d slices that we are member of"%len(my_slices_all))
- self.logger.debug("They are: %s"%(my_slices_all))
+ self.logger.info("Found {} slices that we are member of".format(len(my_slices_all)))
+ self.logger.debug("They are: {}".format(my_slices_all))
my_slices = my_slices_all
# if user provided slices, deal only with these - if they are found
if options.delegate_slices:
my_slices = list(set(my_slices_all).intersection(set(options.delegate_slices)))
- self.logger.debug("Restricted to user-provided slices: %s"%(my_slices))
+ self.logger.debug("Restricted to user-provided slices: {}".format(my_slices))
# (d) make sure we have *valid* credentials for all these
hrn_credentials=[]
delegated_credential = self.client_bootstrap.delegate_credential_string (credential, delegatee_hrn, delegatee_type)
# save these so user can monitor what she's uploaded
filename = os.path.join ( self.options.sfi_dir,
- "%s.%s_for_%s.%s.cred"%(hrn,htype,delegatee_hrn,delegatee_type))
+ "{}.{}_for_{}.{}.cred"\
+ .format(hrn, htype, delegatee_hrn, delegatee_type))
with file(filename,'w') as f:
f.write(delegated_credential)
- self.logger.debug("(Over)wrote %s"%filename)
+ self.logger.debug("(Over)wrote {}".format(filename))
hrn_delegated_credentials.append ((hrn, htype, delegated_credential, filename, ))
# (f) and finally upload them to manifold server
# xxx todo add an option so the password can be set on the command line
# (but *NOT* in the config file) so other apps can leverage this
- self.logger.info("Uploading on backend at %s"%myslice_dict['backend'])
+ self.logger.info("Uploading on backend at {}".format(myslice_dict['backend']))
uploader = ManifoldUploader (logger=self.logger,
url=myslice_dict['backend'],
platform=myslice_dict['platform'],
# inspect
inspect=Credential(string=delegated_credential)
expire_datetime=inspect.get_expiration()
- message="%s (%s) [exp:%s]"%(hrn,htype,expire_datetime)
+ message="{} ({}) [exp:{}]".format(hrn, htype, expire_datetime)
if uploader.upload(delegated_credential,message=message):
count_success+=1
count_all+=1
- self.logger.info("Successfully uploaded %d/%d credentials"%(count_success,count_all))
+ self.logger.info("Successfully uploaded {}/{} credentials"
+ .format(count_success, count_all))
# at first I thought we would want to save these,
# like 'sfi delegate does' but on second thought
gid = GID(string=trusted_cert)
gid.dump()
cert = Certificate(string=trusted_cert)
- self.logger.debug('Sfi.trusted -> %r'%cert.get_subject())
- print "Certificate:\n%s\n\n"%trusted_cert
+ self.logger.debug('Sfi.trusted -> {}'.format(cert.get_subject()))
+ print "Certificate:\n{}\n\n".format(trusted_cert)
# xxx should analyze result
return 0
def __init__ (self, auth_hierarchy, logger):
self.auth_hierarchy = auth_hierarchy
- self.logger=logger
+ self.logger = logger
def add_options (self, parser):
# we don't have any options for now
def remember_record_by_hrn (self, record):
tuple = (record.type, record.hrn)
if tuple in self.records_by_type_hrn:
- self.logger.warning ("PlImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
+ self.logger.warning ("PlImporter.remember_record_by_hrn: duplicate {}".format(tuple))
return
self.records_by_type_hrn [ tuple ] = record
return
tuple = (record.type, record.pointer)
if tuple in self.records_by_type_pointer:
- self.logger.warning ("PlImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
+ self.logger.warning ("PlImporter.remember_record_by_pointer: duplicate {}".format(tuple))
return
self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
auth_record.just_created()
global_dbsession.add(auth_record)
global_dbsession.commit()
- self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
+ self.logger.info("PlImporter: Imported authority (vini site) {}".format(auth_record))
self.remember_record ( site_record )
def run (self, options):
if record.pointer != -1] )
# initialize record.stale to True by default, then mark stale=False on the ones that are in use
- for record in all_records: record.stale=True
+ for record in all_records:
+ record.stale = True
######## retrieve PLC data
# Get all plc sites
key = keys_by_id[key_id]
pubkeys.append(key)
except:
- self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
+ self.logger.warning("Could not spot key {} - probably non-ssh".format(key_id))
keys_by_person_id[person['person_id']] = pubkeys
# Get all plc nodes
nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
self.create_special_vini_record (interface_hrn)
# Get top authority record
- top_auth_record=self.locate_by_type_hrn ('authority', root_auth)
+ top_auth_record = self.locate_by_type_hrn ('authority', root_auth)
admins = []
# start importing
site_hrn = site['hrn']
# import if hrn is not in list of existing hrns or if the hrn exists
# but its not a site record
- site_record=self.locate_by_type_hrn ('authority', site_hrn)
+ site_record = self.locate_by_type_hrn ('authority', site_hrn)
if not site_record:
try:
urn = hrn_to_urn(site_hrn, 'authority')
auth_info = self.auth_hierarchy.get_auth_info(urn)
site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
pointer=site['site_id'],
- authority=get_authority(site_hrn))
+ authority=get_authority(site_hrn),
+ name=site['name'])
site_record.just_created()
global_dbsession.add(site_record)
global_dbsession.commit()
- self.logger.info("PlImporter: imported authority (site) : %s" % site_record)
- self.remember_record (site_record)
+ self.logger.info("PlImporter: imported authority (site) : {}".format(site_record))
+ self.remember_record(site_record)
except:
# if the site import fails then there is no point in trying to import the
# site's child records (node, slices, persons), so skip them.
- self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn)
+ self.logger.log_exc("PlImporter: failed to import site {}. Skipping child records"\
+ .format(site_hrn))
continue
else:
# xxx update the record ...
+ site_record.name = site['name']
pass
- site_record.stale=False
+ site_record.stale = False
# import node records
for node_id in site['node_ids']:
try:
node = nodes_by_id[node_id]
except:
- self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
+ self.logger.warning ("PlImporter: cannot find node_id {} - ignored"
+ .format(node_id))
continue
site_auth = get_authority(site_hrn)
site_name = site['login_base']
node_record.just_created()
global_dbsession.add(node_record)
global_dbsession.commit()
- self.logger.info("PlImporter: imported node: %s" % node_record)
+ self.logger.info("PlImporter: imported node: {}".format(node_record))
self.remember_record (node_record)
except:
- self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn)
+ self.logger.log_exc("PlImporter: failed to import node {}".format(node_hrn))
continue
else:
# xxx update the record ...
pass
- node_record.stale=False
+ node_record.stale = False
- site_pis=[]
+ site_pis = []
# import persons
for person_id in site['person_ids']:
- proceed=False
+ proceed = False
if person_id in persons_by_id:
- person=persons_by_id[person_id]
- proceed=True
+ person = persons_by_id[person_id]
+ proceed = True
elif person_id in disabled_person_ids:
pass
else:
- self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
+ self.logger.warning ("PlImporter: cannot locate person_id {} in site {} - ignored"\
+ .format(person_id, site_hrn))
# make sure to NOT run this if anything is wrong
if not proceed: continue
#person_hrn = email_to_hrn(site_hrn, person['email'])
person_hrn = person['hrn']
if person_hrn is None:
- self.logger.warn("Person %s has no hrn - skipped"%person['email'])
+ self.logger.warn("Person {} has no hrn - skipped".format(person['email']))
continue
# xxx suspicious again
- if len(person_hrn) > 64: person_hrn = person_hrn[:64]
+ if len(person_hrn) > 64:
+ person_hrn = person_hrn[:64]
person_urn = hrn_to_urn(person_hrn, 'user')
user_record = self.locate_by_type_hrn ( 'user', person_hrn)
# return a tuple pubkey (a plc key object) and pkey (a Keypair object)
def init_person_key (person, plc_keys):
- pubkey=None
+ pubkey = None
if person['key_ids']:
# randomly pick first key in set
pubkey = plc_keys[0]
try:
pkey = convert_public_key(pubkey['key'])
except:
- self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
+ self.logger.warn('PlImporter: unable to convert public key for {}'
+ .format(person_hrn))
pkey = Keypair(create=True)
else:
# the user has no keys. Creating a random keypair for the user's gid
- self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
+ self.logger.warn("PlImporter: person {} does not have a PL public key"
+ .format(person_hrn))
pkey = Keypair(create=True)
return (pubkey, pkey)
try:
plc_keys = keys_by_person_id.get(person['person_id'],[])
if not user_record:
- (pubkey,pkey) = init_person_key (person, plc_keys )
- person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
+ (pubkey, pkey) = init_person_key (person, plc_keys )
+ person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey,
+ email=person['email'])
user_record = RegUser (hrn=person_hrn, gid=person_gid,
pointer=person['person_id'],
authority=get_authority(person_hrn),
if pubkey:
user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
else:
- self.logger.warning("No key found for user %s"%user_record)
+ self.logger.warning("No key found for user {}".format(user_record))
user_record.just_created()
global_dbsession.add (user_record)
global_dbsession.commit()
- self.logger.info("PlImporter: imported person: %s" % user_record)
+ self.logger.info("PlImporter: imported person: {}".format(user_record))
self.remember_record ( user_record )
else:
# update the record ?
sfa_keys = user_record.reg_keys
def sfa_key_in_list (sfa_key,plc_keys):
for plc_key in plc_keys:
- if plc_key['key']==sfa_key.key:
+ if plc_key['key'] == sfa_key.key:
return True
return False
# are all the SFA keys known to PLC ?
- new_keys=False
+ new_keys = False
if not sfa_keys and plc_keys:
- new_keys=True
+ new_keys = True
else:
for sfa_key in sfa_keys:
if not sfa_key_in_list (sfa_key,plc_keys):
person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
person_gid.set_email(person['email'])
if not pubkey:
- user_record.reg_keys=[]
+ user_record.reg_keys = []
else:
- user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
+ user_record.reg_keys = [ RegKey (pubkey['key'], pubkey['key_id'])]
user_record.gid = person_gid
user_record.just_updated()
- self.logger.info("PlImporter: updated person: %s" % user_record)
+ self.logger.info("PlImporter: updated person: {}".format(user_record))
user_record.email = person['email']
global_dbsession.commit()
- user_record.stale=False
+ user_record.stale = False
# accumulate PIs - PLCAPI has a limitation that when someone has PI role
# this is valid for all sites she is in..
- # PI is coded with role_id==20
+ # PI is coded with role_id == 20
if 20 in person['role_ids']:
site_pis.append (user_record)
admins.append(user_record)
except:
- self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
+ self.logger.log_exc("PlImporter: failed to import person {} {}"
+ .format(person['person_id'], person['email']))
# maintain the list of PIs for a given site
# for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
try:
slice = slices_by_id[slice_id]
except:
- self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
+ self.logger.warning ("PlImporter: cannot locate slice_id {} - ignored"
+ .format(slice_id))
continue
#slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
slice_hrn = slice['hrn']
if slice_hrn is None:
- self.logger.warning("Slice %s has no hrn - skipped"%slice['name'])
+ self.logger.warning("Slice {} has no hrn - skipped"
+ .format(slice['name']))
continue
slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
if not slice_record:
slice_record.just_created()
global_dbsession.add(slice_record)
global_dbsession.commit()
- self.logger.info("PlImporter: imported slice: %s" % slice_record)
+ self.logger.info("PlImporter: imported slice: {}".format(slice_record))
self.remember_record ( slice_record )
except:
- self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
+ self.logger.log_exc("PlImporter: failed to import slice {} ({})"
+ .format(slice_hrn, slice['name']))
else:
# xxx update the record ...
# given that we record the current set of users anyways, there does not seem to be much left to do here
- # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
+ # self.logger.warning ("Slice update not yet implemented on slice {} ({})"
+ # .format(slice_hrn, slice['name']))
pass
# record current users affiliated with the slice
slice_record.reg_researchers = \
- [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
+ [ self.locate_by_type_pointer ('user', user_id) for user_id in slice['person_ids'] ]
+ # remove any weird value (looks like we can get 'None' here
+ slice_record.reg_researchers = [ x for x in slice_record.reg_researchers if x ]
global_dbsession.commit()
- slice_record.stale=False
+ slice_record.stale = False
# Set PL Admins as PI's of the top authority
if admins:
top_auth_record.reg_pis = list(set(admins))
global_dbsession.commit()
- self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn))
+ self.logger.info('PlImporter: set PL admins {} as PIs of {}'
+ .format(admins, top_auth_record.hrn))
### remove stale records
# special records must be preserved
system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
for record in all_records:
if record.hrn in system_hrns:
- record.stale=False
+ record.stale = False
if record.peer_authority:
- record.stale=False
+ record.stale = False
if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
record.hrn.endswith("internet2"):
- record.stale=False
+ record.stale = False
for record in all_records:
- try: stale=record.stale
+ try: stale = record.stale
except:
- stale=True
- self.logger.warning("stale not found with %s"%record)
+ stale = True
+ self.logger.warning("stale not found with {}".format(record))
if stale:
- self.logger.info("PlImporter: deleting stale record: %s" % record)
+ self.logger.info("PlImporter: deleting stale record: {}".format(record))
global_dbsession.delete(record)
global_dbsession.commit()
# e.g. registry calls this 'reg-researchers'
# while some drivers call this 'researcher'
# we need to make sure that both keys appear and are the same
-def _normalize_input (record, reg_key, driver_key):
+def _normalize_input(record, reg_key, driver_key):
# this looks right, use this for both keys
if reg_key in record:
# and issue a warning if they were both set and different
# as we're overwriting some user data here
if driver_key in record:
- logger.warning ("normalize_input: incoming record has both values, using %s"%reg_key)
- record[driver_key]=record[reg_key]
+ logger.warning ("normalize_input: incoming record has both values, using {}"
+ .format(reg_key))
+ record[driver_key] = record[reg_key]
# we only have one key set, duplicate for the other one
elif driver_key in record:
- logger.warning ("normalize_input: you should use '%s' instead of '%s'"%(reg_key,driver_key))
- record[reg_key]=record[driver_key]
+ logger.warning ("normalize_input: you should use '{}' instead of '{}'"
+ .format(reg_key, driver_key))
+ record[reg_key] = record[driver_key]
def normalize_input_record (record):
_normalize_input (record, 'reg-researchers','researcher')
# xxx the keys thing could use a little bit more attention:
# some parts of the code are using 'keys' while they should use 'reg-keys'
# but I run out of time for now
- if 'reg-keys' in record: record['keys']=record['reg-keys']
+ if 'reg-keys' in record:
+ record['keys'] = record['reg-keys']
return record
class RegistryManager:
def __init__ (self, config):
- logger.info("Creating RegistryManager[%s]"%id(self))
+ logger.info("Creating RegistryManager[{}]".format(id(self)))
# The GENI GetVersion call
def GetVersion(self, api, options):
# get record info
dbsession = api.dbsession()
- record=dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).first()
+ record = dbsession.query(RegRecord).filter_by(type=type, hrn=hrn).first()
if not record:
- raise RecordNotFound("hrn=%s, type=%s"%(hrn,type))
+ raise RecordNotFound("hrn={}, type={}".format(hrn, type))
# get the callers gid
# if caller_xrn is not specified assume the caller is the record
else:
caller_record = dbsession.query(RegRecord).filter_by(hrn=caller_hrn).first()
if not caller_record:
- raise RecordNotFound("Unable to associated caller (hrn=%s, type=%s) with credential for (hrn: %s, type: %s)"%(caller_hrn, caller_type, hrn, type))
+ raise RecordNotFound(
+ "Unable to associated caller (hrn={}, type={}) with credential for (hrn: {}, type: {})"
+ .format(caller_hrn, caller_type, hrn, type))
caller_gid = GID(string=caller_record.gid)
object_hrn = record.get_gid_object().get_hrn()
rights = api.auth.determine_user_rights(caller_hrn, record)
# make sure caller has rights to this object
if rights.is_empty():
- raise PermissionError("%s has no rights to %s (%s)" % \
- (caller_hrn, object_hrn, xrn))
+ raise PermissionError("{} has no rights to {} ({})"
+ .format(caller_hrn, object_hrn, xrn))
object_gid = GID(string=record.gid)
new_cred = Credential(subject = object_gid.get_subject())
new_cred.set_gid_caller(caller_gid)
local_records = dbsession.query(RegRecord).filter(RegRecord.hrn.in_(local_hrns))
if type:
local_records = local_records.filter_by(type=type)
- local_records=local_records.all()
+ local_records = local_records.all()
for local_record in local_records:
- augment_with_sfa_builtins (local_record)
+ augment_with_sfa_builtins(local_record)
- logger.info("Resolve, (details=%s,type=%s) local_records=%s "%(details,type,local_records))
+ logger.info("Resolve, (details={}, type={}) local_records={} "
+ .format(details, type, local_records))
local_dicts = [ record.__dict__ for record in local_records ]
if details:
# used to be in the driver code, sounds like a poorman thing though
def solve_neighbour_url (record):
if not record.type.startswith('authority'): return
- hrn=record.hrn
+ hrn = record.hrn
for neighbour_dict in [ api.aggregates, api.registries ]:
if hrn in neighbour_dict:
record.url=neighbour_dict[hrn].get_url()
return
- for record in local_records: solve_neighbour_url (record)
+ for record in local_records:
+ solve_neighbour_url (record)
# convert local record objects to dicts for xmlrpc
# xxx somehow here calling dict(record) issues a weird error
# however record.todict() seems to work fine
# records.extend( [ dict(record) for record in local_records ] )
- records.extend( [ record.todict(exclude_types=[InstrumentedList]) for record in local_records ] )
+ records.extend( [ record.record_to_dict(exclude_types=(InstrumentedList,)) for record in local_records ] )
if not records:
raise RecordNotFound(str(hrns))
def List (self, api, xrn, origin_hrn=None, options=None):
if options is None: options={}
- dbsession=api.dbsession()
+ dbsession = api.dbsession()
# load all know registry names into a prefix tree and attempt to find
# the longest matching prefix
hrn, type = urn_to_hrn(xrn)
record_dicts = record_list
# if we still have not found the record yet, try the local registry
-# logger.debug("before trying local records, %d foreign records"% len(record_dicts))
+# logger.debug("before trying local records, {} foreign records".format(len(record_dicts)))
if not record_dicts:
recursive = False
if ('recursive' in options and options['recursive']):
raise MissingAuthority(hrn)
if recursive:
records = dbsession.query(RegRecord).filter(RegRecord.hrn.startswith(hrn)).all()
-# logger.debug("recursive mode, found %d local records"%(len(records)))
+# logger.debug("recursive mode, found {} local records".format(len(records)))
else:
records = dbsession.query(RegRecord).filter_by(authority=hrn).all()
-# logger.debug("non recursive mode, found %d local records"%(len(records)))
+# logger.debug("non recursive mode, found {} local records".format(len(records)))
# so that sfi list can show more than plain names...
- for record in records: augment_with_sfa_builtins (record)
- record_dicts=[ record.todict(exclude_types=[InstrumentedList]) for record in records ]
+ for record in records:
+ # xxx mystery - see also the bottom of model.py
+ # resulting records have been observed to not always have
+ # their __dict__ actually in line with the object's contents;
+ # was first observed with authorities' 'name' column
+ # that would be missing from result as received by client
+ augment_with_sfa_builtins(record)
+ record_dicts = [ record.record_to_dict(exclude_types=(InstrumentedList,)) for record in records ]
return record_dicts
# Add the email of the user to SubjectAltName in the GID
email = None
hrn = Xrn(xrn).get_hrn()
- dbsession=api.dbsession()
- record=dbsession.query(RegUser).filter_by(hrn=hrn).first()
+ dbsession = api.dbsession()
+ record = dbsession.query(RegUser).filter_by(hrn=hrn).first()
if record:
- email=getattr(record,'email',None)
- gid = api.auth.hierarchy.create_gid(xrn, create_uuid(), pkey, email = email)
+ email = getattr(record,'email',None)
+ gid = api.auth.hierarchy.create_gid(xrn, create_uuid(), pkey, email=email)
return gid.save_to_string(save_parents=True)
####################
# hrns is the list of hrns that should be linked to the subject from now on
# target_type would be e.g. 'user' in the 'slice' x 'researcher' example
def update_driver_relation (self, api, record_obj, hrns, target_type, relation_name):
- dbsession=api.dbsession()
+ dbsession = api.dbsession()
# locate the linked objects in our db
subject_type=record_obj.type
subject_id=record_obj.pointer
def Register(self, api, record_dict):
- logger.debug("Register: entering with record_dict=%s"%printable(record_dict))
+ logger.debug("Register: entering with record_dict={}".format(printable(record_dict)))
normalize_input_record (record_dict)
- logger.debug("Register: normalized record_dict=%s"%printable(record_dict))
+ logger.debug("Register: normalized record_dict={}".format(printable(record_dict)))
- dbsession=api.dbsession()
+ dbsession = api.dbsession()
hrn, type = record_dict['hrn'], record_dict['type']
urn = hrn_to_urn(hrn,type)
# validate the type
raise UnknownSfaType(type)
# check if record_dict already exists
- existing_records = dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).all()
+ existing_records = dbsession.query(RegRecord).filter_by(type=type, hrn=hrn).all()
if existing_records:
raise ExistingRecord(hrn)
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pkey = convert_public_key(pub_key)
- email=getattr(record,'email',None)
+ email = getattr(record,'email',None)
gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
keys=getattr(record,'reg-keys')
# some people send the key as a string instead of a list of strings
if isinstance(keys,types.StringTypes): keys=[keys]
- logger.debug ("creating %d keys for user %s"%(len(keys),record.hrn))
+ logger.debug ("creating {} keys for user {}".format(len(keys), record.hrn))
record.reg_keys = [ RegKey (key) for key in keys ]
# update testbed-specific data if needed
def Update(self, api, record_dict):
- logger.debug("Update: entering with record_dict=%s"%printable(record_dict))
+ logger.debug("Update: entering with record_dict={}".format(printable(record_dict)))
normalize_input_record (record_dict)
- logger.debug("Update: normalized record_dict=%s"%printable(record_dict))
+ logger.debug("Update: normalized record_dict={}".format(printable(record_dict)))
- dbsession=api.dbsession()
+ dbsession = api.dbsession()
assert ('type' in record_dict)
- new_record=make_record(dict=record_dict)
- (type,hrn) = (new_record.type, new_record.hrn)
+ new_record = make_record(dict=record_dict)
+ (type, hrn) = (new_record.type, new_record.hrn)
# make sure the record exists
- record = dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).first()
+ record = dbsession.query(RegRecord).filter_by(type=type, hrn=hrn).first()
if not record:
- raise RecordNotFound("hrn=%s, type=%s"%(hrn,type))
+ raise RecordNotFound("hrn={}, type={}".format(hrn, type))
record.just_updated()
# Use the pointer from the existing record, not the one that the user
pointer = record.pointer
# is there a change in keys ?
- new_key=None
- if type=='user':
- if getattr(new_record,'keys',None):
- new_key=new_record.keys
- if isinstance (new_key,types.ListType):
- new_key=new_key[0]
+ new_key = None
+ if type == 'user':
+ if getattr(new_record, 'keys', None):
+ new_key = new_record.keys
+ if isinstance (new_key, types.ListType):
+ new_key = new_key[0]
# take new_key into account
if new_key:
uuid = create_uuid()
urn = hrn_to_urn(hrn,type)
- email=getattr(new_record,'email',None)
+ email = getattr(new_record, 'email', None)
if email is None:
- email=getattr(record,'email',None)
+ email = getattr(record, 'email', None)
gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
# not too big a deal with planetlab as the driver is authoritative, but...
# update native relations
- if isinstance (record, RegSlice):
- researcher_hrns = getattr(new_record,'reg-researchers',None)
- if researcher_hrns is not None: record.update_researchers (researcher_hrns, dbsession)
-
- elif isinstance (record, RegAuthority):
- pi_hrns = getattr(new_record,'reg-pis',None)
- if pi_hrns is not None: record.update_pis (pi_hrns, dbsession)
+ if isinstance(record, RegSlice):
+ researcher_hrns = getattr(new_record, 'reg-researchers', None)
+ if researcher_hrns is not None:
+ record.update_researchers (researcher_hrns, dbsession)
+
+ elif isinstance(record, RegAuthority):
+ pi_hrns = getattr(new_record, 'reg-pis', None)
+ if pi_hrns is not None:
+ record.update_pis(pi_hrns, dbsession)
+ name = getattr(new_record, 'name', None)
+ if name is not None:
+ record.name = name
+
+ elif isinstance(record, RegUser):
+ email = getattr(new_record, 'email', None)
+ if email is not None:
+ record.email = email
# update the PLC information that was specified with the record
- # xxx oddly enough, without this useless statement,
+ # xxx mystery -- see also the bottom of model.py,
+ # oddly enough, without this useless statement,
# record.__dict__ as received by the driver seems to be off
- # anyway the driver should receive an object
+ # anyway the driver should receive an object
# (and then extract __dict__ itself if needed)
- print "DO NOT REMOVE ME before driver.update, record=%s"%record
+ print "DO NOT REMOVE ME before driver.update, record={}".format(record)
+ # as of June 2015: I suspect we could remove that print line above and replace it with
+ # augment_with_sfa_builtins(record)
+ # instead, that checks for these fields, like it is done above in List()
+ # but that would need to be confirmed by more extensive tests
new_key_pointer = -1
try:
(pointer, new_key_pointer) = api.driver.update (record.__dict__, new_record.__dict__, hrn, new_key)
except:
pass
if new_key and new_key_pointer:
- record.reg_keys=[ RegKey (new_key, new_key_pointer)]
+ record.reg_keys = [ RegKey(new_key, new_key_pointer) ]
record.gid = gid
dbsession.commit()
# update membership for researchers, pis, owners, operators
- self.update_driver_relations (api, record, new_record)
+ self.update_driver_relations(api, record, new_record)
return 1
# expecting an Xrn instance
def Remove(self, api, xrn, origin_hrn=None):
- dbsession=api.dbsession()
- hrn=xrn.get_hrn()
- type=xrn.get_type()
- request=dbsession.query(RegRecord).filter_by(hrn=hrn)
+ dbsession = api.dbsession()
+ hrn = xrn.get_hrn()
+ type = xrn.get_type()
+ request = dbsession.query(RegRecord).filter_by(hrn=hrn)
if type and type not in ['all', '*']:
- request=request.filter_by(type=type)
+ request = request.filter_by(type=type)
record = request.first()
if not record:
- msg="Could not find hrn %s"%hrn
- if type: msg += " type=%s"%type
+ msg = "Could not find hrn {}".format(hrn)
+ if type: msg += " type={}".format(type)
raise RecordNotFound(msg)
type = record.type
# This is a PLC-specific thing, won't work with other platforms
def get_key_from_incoming_ip (self, api):
- dbsession=api.dbsession()
+ dbsession = api.dbsession()
# verify that the callers's ip address exist in the db and is an interface
# for a node in the db
(ip, port) = api.remote_addr
interfaces = api.driver.shell.GetInterfaces({'ip': ip}, ['node_id'])
if not interfaces:
- raise NonExistingRecord("no such ip %(ip)s" % locals())
+ raise NonExistingRecord("no such ip {}".format(ip))
nodes = api.driver.shell.GetNodes([interfaces[0]['node_id']], ['node_id', 'hostname'])
if not nodes:
- raise NonExistingRecord("no such node using ip %(ip)s" % locals())
+ raise NonExistingRecord("no such node using ip {}".format(ip))
node = nodes[0]
# look up the sfa record
- record=dbsession.query(RegRecord).filter_by(type='node',pointer=node['node_id']).first()
+ record = dbsession.query(RegRecord).filter_by(type='node', pointer=node['node_id']).first()
if not record:
- raise RecordNotFound("node with pointer %s"%node['node_id'])
+ raise RecordNotFound("node with pointer {}".format(node['node_id']))
# generate a new keypair and gid
uuid = create_uuid()
pkey = Keypair(create=True)
urn = hrn_to_urn(record.hrn, record.type)
- email=getattr(record,'email',None)
+ email = getattr(record, 'email', None)
gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey, email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
scp = "/usr/bin/scp"
#identity = "/etc/planetlab/root_ssh_key.rsa"
identity = "/etc/sfa/root_ssh_key"
- scp_options=" -i %(identity)s " % locals()
- scp_options+="-o StrictHostKeyChecking=no " % locals()
- scp_key_command="%(scp)s %(scp_options)s %(key_filename)s root@%(host)s:%(key_dest)s" %\
- locals()
- scp_gid_command="%(scp)s %(scp_options)s %(gid_filename)s root@%(host)s:%(gid_dest)s" %\
- locals()
+ scp_options = " -i {identity} ".format(**locals())
+ scp_options += "-o StrictHostKeyChecking=no "
+ scp_key_command = "{scp} {scp_options} {key_filename} root@{host}:{key_dest}"\
+ .format(**locals())
+ scp_gid_command = "{scp} {scp_options} {gid_filename} root@{host}:{gid_dest}"\
+ .format(**locals())
all_commands = [scp_key_command, scp_gid_command]
+++ /dev/null
-import types
-# for get_key_from_incoming_ip
-import tempfile
-import os
-import commands
-
-from sfa.util.faults import RecordNotFound, AccountNotEnabled, PermissionError, MissingAuthority, \
- UnknownSfaType, ExistingRecord, NonExistingRecord
-from sfa.util.sfatime import utcparse, datetime_to_epoch
-from sfa.util.prefixTree import prefixTree
-from sfa.util.xrn import Xrn, get_authority, hrn_to_urn, urn_to_hrn
-from sfa.util.version import version_core
-from sfa.util.sfalogging import logger
-
-from sfa.trust.gid import GID
-from sfa.trust.credential import Credential
-from sfa.trust.certificate import Certificate, Keypair, convert_public_key
-from sfa.trust.gid import create_uuid
-
-from sfa.storage.model import make_record,RegRecord
-from sfa.storage.alchemy import dbsession
-
-from sfa.managers.registry_manager import RegistryManager
-
-class RegistryManager(RegistryManager):
-
- def GetCredential(self, api, xrn, type, caller_xrn = None):
- # convert xrn to hrn
- if type:
- hrn = urn_to_hrn(xrn)[0]
- else:
- hrn, type = urn_to_hrn(xrn)
-
- # Is this a root or sub authority
- auth_hrn = api.auth.get_authority(hrn)
- if not auth_hrn or hrn == api.config.SFA_INTERFACE_HRN:
- auth_hrn = hrn
- auth_info = api.auth.get_auth_info(auth_hrn)
- # get record info
- filter = {'hrn': hrn}
- if type:
- filter['type'] = type
- record=dbsession.query(RegRecord).filter_by(**filter).first()
- if not record:
- raise RecordNotFound("hrn=%s, type=%s"%(hrn,type))
-
- # verify_cancreate_credential requires that the member lists
- # (researchers, pis, etc) be filled in
- logger.debug("get credential before augment dict, keys=%s"%record.__dict__.keys())
- self.driver.augment_records_with_testbed_info (record.__dict__)
- logger.debug("get credential after augment dict, keys=%s"%record.__dict__.keys())
- if not self.driver.is_enabled (record.__dict__):
- raise AccountNotEnabled(": PlanetLab account %s is not enabled. Please contact your site PI" %(record.email))
-
- # get the callers gid
- # if caller_xrn is not specified assume the caller is the record
- # object itself.
- if not caller_xrn:
- caller_hrn = hrn
- caller_gid = record.get_gid_object()
- else:
- caller_hrn, caller_type = urn_to_hrn(caller_xrn)
- caller_filter = {'hrn': caller_hrn}
- if caller_type:
- caller_filter['type'] = caller_type
- caller_record = dbsession.query(RegRecord).filter_by(**caller_filter).first()
- if not caller_record:
- raise RecordNotFound("Unable to associated caller (hrn=%s, type=%s) with credential for (hrn: %s, type: %s)"%(caller_hrn, caller_type, hrn, type))
- caller_gid = GID(string=caller_record.gid)
-
- object_hrn = record.get_gid_object().get_hrn()
- rights = api.auth.determine_user_rights(caller_hrn, record.todict())
- # make sure caller has rights to this object
- if rights.is_empty():
- raise PermissionError(caller_hrn + " has no rights to " + record.hrn)
-
- object_gid = GID(string=record.gid)
- new_cred = Credential(subject = object_gid.get_subject())
- new_cred.set_gid_caller(caller_gid)
- new_cred.set_gid_object(object_gid)
- new_cred.set_issuer_keys(auth_info.get_privkey_filename(), auth_info.get_gid_filename())
- #new_cred.set_pubkey(object_gid.get_pubkey())
- new_cred.set_privileges(rights)
- new_cred.get_privileges().delegate_all_privileges(True)
- if hasattr(record,'expires'):
- date = utcparse(record.expires)
- expires = datetime_to_epoch(date)
- new_cred.set_expiration(int(expires))
- auth_kind = "authority,ma,sa"
- # Parent not necessary, verify with certs
- #new_cred.set_parent(api.auth.hierarchy.get_auth_cred(auth_hrn, kind=auth_kind))
- new_cred.encode()
- new_cred.sign()
-
- return new_cred.save_to_string(save_parents=True)
-
-
- # subject_record describes the subject of the relationships
- # ref_record contains the target values for the various relationships we need to manage
- # (to begin with, this is just the slice x person relationship)
- def update_relations (self, subject_obj, ref_obj):
- type=subject_obj.type
- if type=='slice':
- self.update_relation(subject_obj, 'researcher', ref_obj.researcher, 'user')
-
- # field_key is the name of one field in the record, typically 'researcher' for a 'slice' record
- # hrns is the list of hrns that should be linked to the subject from now on
- # target_type would be e.g. 'user' in the 'slice' x 'researcher' example
- def update_relation (self, record_obj, field_key, hrns, target_type):
- # locate the linked objects in our db
- subject_type=record_obj.type
- subject_id=record_obj.pointer
- # get the 'pointer' field of all matching records
- link_id_tuples = dbsession.query(RegRecord.pointer).filter_by(type=target_type).filter(RegRecord.hrn.in_(hrns)).all()
- # sqlalchemy returns named tuples for columns
- link_ids = [ tuple.pointer for tuple in link_id_tuples ]
- self.driver.update_relation (subject_type, target_type, subject_id, link_ids)
-
@param slice_urn (string) URN of slice to allocate to
@param credentials (dict) of credentials
@param rspec (string) rspec to allocate
-
+ @param options (dict)
+
+ As of 3.1.16, the PL driver implements here an important option named
+ 'pltags' that affects the management of slice tags.
+
+ This option can take 3 values
+ (*) options['pltags'] == 'ignore' (default)
+ This is the recommended mode; in this mode all slice tags passed
+ here are ignore, which correspond to the <planetlab:attribute> XML tags in
+ the <sliver_type> areas of incoming rspec to Allocate.
+ In other words you are guaranteed to leave slice tags alone.
+ (*) options['pltags'] == 'append'
+ All incoming slice tags are added to corresponding slivers,
+ unless an exact match can be found in the PLC db
+ (*) options['pltags'] == 'sync'
+ The historical mode, that attempts to leave the PLC db in a state
+ in sync with the ones specified in incoming rspec.
+
+ See also http://svn.planet-lab.org/wiki/SFASliceTags
+
"""
interfaces = ['aggregate', 'slicemgr']
accepts = [
options['geni_rspec_version'] = options['rspec_version']
else:
raise SfaInvalidArgument('Must specify an rspec version option. geni_rspec_version cannot be null')
- valid_creds = self.api.auth.checkCredentialsSpeaksFor(creds, 'listnodes', urns,
- check_sliver_callback = self.api.driver.check_sliver_credentials,
- options=options)
+ valid_creds = self.api.auth.checkCredentialsSpeaksFor(
+ creds, 'listnodes', urns,
+ check_sliver_callback = self.api.driver.check_sliver_credentials,
+ options=options)
# get hrn of the original caller
origin_hrn = options.get('origin_hrn', None)
def get_node_tags(self, filter=None):
if filter is None: filter={}
node_tags = {}
- for node_tag in self.driver.shell.GetNodeTags(filter):
+ for node_tag in self.driver.shell.GetNodeTags(filter, ['tagname', 'value', 'node_id', 'node_tag_id'] ):
node_tags[node_tag['node_tag_id']] = node_tag
return node_tags
if node_ids:
node_ids = [node_id for node_id in node_ids if node_id in slice['node_ids']]
slice['node_ids'] = node_ids
- tags_dict = self.get_slice_tags(slice)
+ pltags_dict = self.get_pltags_by_node_id(slice)
nodes_dict = self.get_slice_nodes(slice, options)
slivers = []
for node in nodes_dict.values():
node.update(slice)
- node['tags'] = tags_dict[node['node_id']]
+ # slice-global tags
+ node['slice-tags'] = pltags_dict['slice-global']
+ # xxx
+ # this is where we chould maybe add the nodegroup slice tags,
+ # but it's tedious...
+ # xxx
+ # sliver tags
+ node['slice-tags'] += pltags_dict[node['node_id']]
sliver_hrn = '%s.%s-%s' % (self.driver.hrn, slice['slice_id'], node['node_id'])
node['sliver_id'] = Xrn(sliver_hrn, type='sliver').urn
node['urn'] = node['sliver_id']
interface['client_id'] = "%s:%s" % (node['node_id'], if_id)
rspec_node['interfaces'].append(interface)
if_count+=1
- tags = [PLTag(node_tags[tag_id]) for tag_id in node['node_tag_ids'] if tag_id in node_tags]
- rspec_node['tags'] = tags
+ # this is what describes a particular node
+ node_level_tags = [PLTag(node_tags[tag_id]) for tag_id in node['node_tag_ids'] if tag_id in node_tags]
+ rspec_node['tags'] = node_level_tags
return rspec_node
- def sliver_to_rspec_node(self, sliver, sites, interfaces, node_tags, \
+ def sliver_to_rspec_node(self, sliver, sites, interfaces, node_tags, sliver_pltags, \
pl_initscripts, sliver_allocations):
# get the granularity in second for the reservation system
grain = self.driver.shell.GetLeaseGranularity()
rspec_node = self.node_to_rspec_node(sliver, sites, interfaces, node_tags, pl_initscripts, grain)
+ for pltag in sliver_pltags:
+ logger.debug("Need to expose {}".format(pltag))
# xxx how to retrieve site['login_base']
rspec_node['expires'] = datetime_to_string(utcparse(sliver['expires']))
# remove interfaces from manifest
rspec_node['interfaces'] = []
# add sliver info
rspec_sliver = Sliver({'sliver_id': sliver['urn'],
- 'name': sliver['name'],
- 'type': 'plab-vserver',
- 'tags': []})
+ 'name': sliver['name'],
+ 'type': 'plab-vserver',
+ 'tags': sliver_pltags,
+ })
rspec_node['sliver_id'] = rspec_sliver['sliver_id']
if sliver['urn'] in sliver_allocations:
rspec_node['client_id'] = sliver_allocations[sliver['urn']].client_id
rspec_node['slivers'] = [rspec_sliver]
# slivers always provide the ssh service
- login = Login({'authentication': 'ssh-keys',
- 'hostname': sliver['hostname'],
+ login = Login({'authentication': 'ssh-keys',
+ 'hostname': sliver['hostname'],
'port':'22',
'username': sliver['name'],
'login': sliver['name']
})
service = ServicesElement({'login': login,
- 'services_user': sliver['services_user']})
- rspec_node['services'] = [service]
- return rspec_node
+ 'services_user': sliver['services_user']})
+ rspec_node['services'] = [service]
+ return rspec_node
- def get_slice_tags(self, slice):
+ def get_pltags_by_node_id(self, slice):
slice_tag_ids = []
slice_tag_ids.extend(slice['slice_tag_ids'])
- tags = self.driver.shell.GetSliceTags({'slice_tag_id': slice_tag_ids})
+ tags = self.driver.shell.GetSliceTags({'slice_tag_id': slice_tag_ids},
+ ['tagname', 'value', 'node_id', 'nodegroup_id'])
# sorted by node_id
- tags_dict = defaultdict(list)
+ pltags_dict = defaultdict(list)
for tag in tags:
- tags_dict[tag['node_id']] = tag
- return tags_dict
+ # specific to a node
+ if tag['node_id']:
+ tag['scope'] = 'sliver'
+ pltags_dict[tag['node_id']].append(PLTag(tag))
+ # restricted to a nodegroup
+ # for now such tags are not exposed to describe
+ # xxx we should also expose the nodegroup name in this case to be complete..
+ elif tag['nodegroup_id']:
+ tag['scope'] = 'nodegroup'
+ pltags_dict['nodegroup'].append(PLTag(tag))
+ # this tag is global to the slice
+ else:
+ tag['scope'] = 'slice'
+ pltags_dict['slice-global'].append(PLTag(tag))
+ return pltags_dict
def get_slice_nodes(self, slice, options=None):
if options is None: options={}
for sliver in slivers:
if sliver['slice_ids_whitelist'] and sliver['slice_id'] not in sliver['slice_ids_whitelist']:
continue
- rspec_node = self.sliver_to_rspec_node(sliver, sites, interfaces, node_tags,
+ sliver_pltags = sliver['slice-tags']
+ rspec_node = self.sliver_to_rspec_node(sliver, sites, interfaces, node_tags, sliver_pltags,
pl_initscripts, sliver_allocation_dict)
+ logger.debug('rspec of type {}'.format(rspec_node.__class__.__name__))
# manifest node element shouldn't contain available attribute
rspec_node.pop('available')
rspec_nodes.append(rspec_node)
convert a list of dictionaries into a dictionary keyed on the
specified dictionary key
"""
- return dict ( [ (rec[key],rec) for rec in recs ] )
+ return { rec[key] : rec for rec in recs }
#
# PlShell is just an xmlrpc serverproxy where methods
def __init__ (self, api):
Driver.__init__ (self, api)
- config=api.config
+ config = api.config
self.shell = PlShell (config)
- self.cache=None
+ self.cache = None
if config.SFA_AGGREGATE_CACHING:
if PlDriver.cache is None:
PlDriver.cache = Cache()
filter['slice_id'] = int(sliver_id_parts[0])
except ValueError:
filter['name'] = sliver_id_parts[0]
- slices = self.shell.GetSlices(filter,['hrn'])
+ slices = self.shell.GetSlices(filter, ['hrn'])
if not slices:
- raise Forbidden("Unable to locate slice record for sliver: %s" % xrn)
+ raise Forbidden("Unable to locate slice record for sliver: {}".format(xrn))
slice = slices[0]
slice_xrn = slice['hrn']
return slice_xrn
# make sure we have a credential for every specified sliver ierd
for sliver_name in sliver_names:
if sliver_name not in slice_cred_names:
- msg = "Valid credential not found for target: %s" % sliver_name
+ msg = "Valid credential not found for target: {}".format(sliver_name)
raise Forbidden(msg)
########################################
if not sites:
# xxx when a site gets registered through SFA we need to set its max_slices
if 'max_slices' not in pl_record:
- pl_record['max_slices']=2
+ pl_record['max_slices'] = 2
pointer = self.shell.AddSite(pl_record)
self.shell.SetSiteHrn(int(pointer), hrn)
else:
pointer = sites[0]['site_id']
elif type == 'slice':
- acceptable_fields=['url', 'instantiation', 'name', 'description']
+ acceptable_fields = ['url', 'instantiation', 'name', 'description']
for key in pl_record.keys():
if key not in acceptable_fields:
pl_record.pop(key)
persons = self.shell.GetPersons({'peer_id': None, 'email': sfa_record['email']})
if not persons:
for key in ['first_name','last_name']:
- if key not in sfa_record: sfa_record[key]='*from*sfa*'
+ if key not in sfa_record:
+ sfa_record[key] = '*from*sfa*'
# AddPerson does not allow everything to be set
can_add = ['first_name', 'last_name', 'title','email', 'password', 'phone', 'url', 'bio']
- add_person_dict=dict ( [ (k,sfa_record[k]) for k in sfa_record if k in can_add ] )
+ add_person_dict = { k : sfa_record[k] for k in sfa_record if k in can_add }
pointer = self.shell.AddPerson(add_person_dict)
self.shell.SetPersonHrn(int(pointer), hrn)
else:
self.shell.AddPersonToSite(pointer, login_base)
# What roles should this user have?
- roles=[]
+ roles = []
if 'roles' in sfa_record:
# if specified in xml, but only low-level roles
roles = [ role for role in sfa_record['roles'] if role in ['user','tech'] ]
# at least user if no other cluse could be found
if not roles:
- roles=['user']
+ roles = ['user']
for role in roles:
self.shell.AddRoleToPerson(role, pointer)
# Add the user's key
self.shell.AddPersonKey(pointer, {'key_type' : 'ssh', 'key' : pub_key})
elif type == 'node':
- login_base = PlXrn(xrn=sfa_record['authority'],type='authority').pl_login_base()
+ login_base = PlXrn(xrn=sfa_record['authority'], type='authority').pl_login_base()
nodes = self.shell.GetNodes({'peer_id': None, 'hostname': pl_record['hostname']})
if not nodes:
pointer = self.shell.AddNode(login_base, pl_record)
raise UnknownSfaType(type)
if (type == "authority"):
+ logger.debug("pldriver.update: calling UpdateSite with {}".format(new_sfa_record))
self.shell.UpdateSite(pointer, new_sfa_record)
self.shell.SetSiteHrn(pointer, hrn)
elif type == "slice":
- pl_record=self.sfa_fields_to_pl_fields(type, hrn, new_sfa_record)
+ pl_record = self.sfa_fields_to_pl_fields(type, hrn, new_sfa_record)
if 'name' in pl_record:
pl_record.pop('name')
self.shell.UpdateSlice(pointer, pl_record)
##########
def remove (self, sfa_record):
- type=sfa_record['type']
- pointer=sfa_record['pointer']
+ type = sfa_record['type']
+ pointer = sfa_record['pointer']
if type == 'user':
persons = self.shell.GetPersons({'peer_id': None, 'person_id': pointer})
# only delete this person if he has site ids. if he doesnt, it probably means
return True
-
-
-
##
# Convert SFA fields to PLC fields for use when registering or updating
# registry record in the PLC database
if type == "slice":
pl_record["name"] = hrn_to_pl_slicename(hrn)
if "instantiation" in sfa_record:
- pl_record['instantiation']=sfa_record['instantiation']
+ pl_record['instantiation'] = sfa_record['instantiation']
else:
pl_record["instantiation"] = "plc-instantiated"
if "url" in sfa_record:
elif type == "authority":
pl_record["login_base"] = PlXrn(xrn=hrn,type='authority').pl_login_base()
- if "name" not in sfa_record:
+ if "name" not in sfa_record or not sfa_record['name']:
pl_record["name"] = hrn
if "abbreviated_name" not in sfa_record:
pl_record["abbreviated_name"] = hrn
# continue
sfa_info = {}
type = record['type']
- logger.info("fill_record_sfa_info - incoming record typed %s"%type)
+ logger.info("fill_record_sfa_info - incoming record typed {}".format(type))
if (type == "slice"):
# all slice users are researchers
record['geni_urn'] = hrn_to_urn(record['hrn'], 'slice')
# plcapi works by changes, compute what needs to be added/deleted
def update_relation (self, subject_type, target_type, relation_name, subject_id, target_ids):
# hard-wire the code for slice/user for now, could be smarter if needed
- if subject_type =='slice' and target_type == 'user' and relation_name == 'researcher':
- subject=self.shell.GetSlices (subject_id)[0]
+ if subject_type == 'slice' and target_type == 'user' and relation_name == 'researcher':
+ subject = self.shell.GetSlices (subject_id)[0]
current_target_ids = subject['person_ids']
add_target_ids = list ( set (target_ids).difference(current_target_ids))
del_target_ids = list ( set (current_target_ids).difference(target_ids))
- logger.debug ("subject_id = %s (type=%s)"%(subject_id,type(subject_id)))
+ logger.debug ("subject_id = {} (type={})".format(subject_id, type(subject_id)))
for target_id in add_target_ids:
self.shell.AddPersonToSlice (target_id,subject_id)
- logger.debug ("add_target_id = %s (type=%s)"%(target_id,type(target_id)))
+ logger.debug ("add_target_id = {} (type={})".format(target_id, type(target_id)))
for target_id in del_target_ids:
- logger.debug ("del_target_id = %s (type=%s)"%(target_id,type(target_id)))
+ logger.debug ("del_target_id = {} (type={})".format(target_id, type(target_id)))
self.shell.DeletePersonFromSlice (target_id, subject_id)
elif subject_type == 'authority' and target_type == 'user' and relation_name == 'pi':
# due to the plcapi limitations this means essentially adding pi role to all people in the list
if 'pi' not in person['roles']:
self.shell.AddRoleToPerson('pi',person['person_id'])
else:
- logger.info('unexpected relation %s to maintain, %s -> %s'%(relation_name,subject_type,target_type))
+ logger.info('unexpected relation {} to maintain, {} -> {}'\
+ .format(relation_name, subject_type, target_type))
########################################
return status
def allocate (self, urn, rspec_string, expiration, options=None):
+ """
+ Allocate a PL slice
+
+ Supported options:
+ (*) geni_users
+ (*) append : if set to True, provided attributes are appended
+ to the current list of tags for the slice
+ otherwise, the set of provided attributes are meant to be the
+ the exact set of tags at the end of the call, meaning pre-existing tags
+ are deleted if not repeated in the incoming request
+ """
if options is None: options={}
xrn = Xrn(urn)
aggregate = PlAggregate(self)
slices = PlSlices(self)
sfa_peer = slices.get_sfa_peer(xrn.get_hrn())
- slice_record=None
+ slice_record = None
users = options.get('geni_users', [])
if users:
# ensure person records exists
persons = slices.verify_persons(xrn.hrn, slice, users, sfa_peer, options=options)
# ensure slice attributes exists
- slices.verify_slice_attributes(slice, requested_attributes, options=options)
+ slices.verify_slice_tags(slice, requested_attributes, options=options)
# add/remove slice from nodes
request_nodes = rspec.version.get_nodes_with_slivers()
filter['name'] = sliver_id_parts[0]
slices = self.shell.GetSlices(filter,['hrn'])
if not slices:
- raise Forbidden("Unable to locate slice record for sliver: %s" % xrn)
+ raise Forbidden("Unable to locate slice record for sliver: {}".format(xrn))
slice = slices[0]
slice_urn = hrn_to_urn(slice['hrn'], type='slice')
urns = [slice_urn]
persons = slices.verify_persons(slice['hrn'], slice, users, sfa_peer, options=options)
# update sliver allocation states and set them to geni_provisioned
sliver_ids = [sliver['sliver_id'] for sliver in slivers]
- dbsession=self.api.dbsession()
+ dbsession = self.api.dbsession()
SliverAllocation.set_allocations(sliver_ids, 'geni_provisioned',dbsession)
version_manager = VersionManager()
self.shell.DeleteLeases(leases_ids)
# delete sliver allocation states
- dbsession=self.api.dbsession()
- SliverAllocation.delete_allocations(sliver_ids,dbsession)
+ dbsession = self.api.dbsession()
+ SliverAllocation.delete_allocations(sliver_ids, dbsession)
finally:
pass
description = self.describe(urns, 'GENI 3', options)
for sliver in description['geni_slivers']:
if sliver['geni_operational_status'] == 'geni_pending_allocation':
- raise UnsupportedOperation(action, "Sliver must be fully allocated (operational status is not geni_pending_allocation)")
+ raise UnsupportedOperation\
+ (action, "Sliver must be fully allocated (operational status is not geni_pending_allocation)")
#
# Perform Operational Action Here
#
class PlSlices:
- rspec_to_slice_tag = {'max_rate':'net_max_rate'}
+ rspec_to_slice_tag = {'max_rate' : 'net_max_rate'}
def __init__(self, driver):
self.driver = driver
# XXX Sanity check; though technically this should be a system invariant
# checked with an assertion
- if slice['expires'] > MAXINT: slice['expires']= MAXINT
+ if slice['expires'] > MAXINT:
+ slice['expires'] = MAXINT
slivers.append({
'hrn': hrn,
for node in resulting_nodes:
client_id = slivers[node['hostname']]['client_id']
component_id = slivers[node['hostname']]['component_id']
- sliver_hrn = '%s.%s-%s' % (self.driver.hrn, slice['slice_id'], node['node_id'])
+ sliver_hrn = '{}.{}-{}'.format(self.driver.hrn, slice['slice_id'], node['node_id'])
sliver_id = Xrn(sliver_hrn, type='sliver').urn
record = SliverAllocation(sliver_id=sliver_id, client_id=client_id,
component_id=component_id,
slice_tags.append({'name': 'vini_topo', 'value': 'manual', 'node_id': node_id})
#self.driver.shell.AddSliceTag(slice['name'], 'topo_rspec', str([topo_rspec]), node_id)
- self.verify_slice_attributes(slice, slice_tags, {'append': True}, admin=True)
-
+ self.verify_slice_tags(slice, slice_tags, {'pltags':'append'}, admin=True)
def verify_site(self, slice_xrn, slice_record=None, sfa_peer=None, options=None):
site = sites[0]
else:
# create new site record
- site = {'name': 'sfa:%s' % site_hrn,
+ site = {'name': 'sfa:{}'.format(site_hrn),
'abbreviated_name': site_hrn,
'login_base': login_base,
'max_slices': 100,
( auth_hrn, _ , leaf ) = user_hrn.rpartition('.')
# somehow this has backslashes, get rid of them
auth_hrn = auth_hrn.replace('\\','')
- default_email = "%s@%s.stub"%(leaf,auth_hrn)
+ default_email = "{}@{}.stub".format(leaf, auth_hrn)
person_record = {
# required
'hrn': user_hrn,
}
- logger.debug ("about to attempt to AddPerson with %s"%person_record)
+ logger.debug ("about to attempt to AddPerson with {}".format(person_record))
try:
# the thing is, the PLE db has a limitation on re-using the same e-mail
# in the case where people have an account on ple.upmc and then then come
except:
logger.log_exc("caught during first attempt at AddPerson")
# and if that fails we start again with the email based on the hrn, which this time is unique..
- person_record['email']=default_email
- logger.debug ("second chance with email=%s"%person_record['email'])
+ person_record['email'] = default_email
+ logger.debug ("second chance with email={}".format(person_record['email']))
person_id = int (self.driver.shell.AddPerson(person_record))
self.driver.shell.AddRoleToPerson('user', person_id)
self.driver.shell.AddPersonToSite(person_id, site_id)
# this is for retrieving users from a hrn
users_by_hrn = { user['hrn'] : user for user in users }
- for user in users: logger.debug("incoming user %s"%user)
+ for user in users: logger.debug("incoming user {}".format(user))
# compute the hrn's for the authority and site
top_auth_hrn = top_auth(slice_hrn)
self.driver.shell.AddPersonKey(int(person_id), key)
- def verify_slice_attributes(self, slice, requested_slice_attributes, options=None, admin=False):
+ def verify_slice_tags(self, slice, requested_slice_attributes, options=None, admin=False):
+ """
+ This function deals with slice tags, and supports 3 modes described
+ in the 'pltags' option that can be either
+ (*) 'ignore' (default) - do nothing
+ (*) 'append' - only add incoming tags, that do not match an existing tag
+ (*) 'sync' - tries to do the plain wholesale thing,
+ i.e. to leave the db in sync with incoming tags
+ """
if options is None: options={}
- append = options.get('append', True)
- # get list of attributes users ar able to manage
+
+ # lookup 'pltags' in options to find out which mode is requested here
+ pltags = options.get('pltags', 'ignore')
+ # make sure the default is 'ignore'
+ if pltags not in ('ignore', 'append', 'sync'):
+ pltags = 'ignore'
+
+ if pltags == 'ignore':
+ logger.info('verify_slice_tags in ignore mode - leaving slice tags as-is')
+ return
+
+ # incoming data (attributes) have a (name, value) pair
+ # while PLC data (tags) have a (tagname, value) pair
+ # we must be careful not to mix these up
+
+ # get list of tags users are able to manage - based on category
filter = {'category': '*slice*'}
if not admin:
filter['|roles'] = ['user']
- slice_attributes = self.driver.shell.GetTagTypes(filter)
- valid_slice_attribute_names = [attribute['tagname'] for attribute in slice_attributes]
+ valid_tag_types = self.driver.shell.GetTagTypes(filter)
+ valid_tag_names = [ tag_type['tagname'] for tag_type in valid_tag_types ]
+ logger.debug("verify_slice_attributes: valid names={}".format(valid_tag_names))
- # get sliver attributes
- added_slice_attributes = []
- removed_slice_attributes = []
+ # get slice tags
+ slice_attributes_to_add = []
+ slice_tags_to_remove = []
# we need to keep the slice hrn anyway
- ignored_slice_attribute_names = ['hrn']
- existing_slice_attributes = self.driver.shell.GetSliceTags({'slice_id': slice['slice_id']})
+ ignored_slice_tag_names = ['hrn']
+ existing_slice_tags = self.driver.shell.GetSliceTags({'slice_id': slice['slice_id']})
- # get attributes that should be removed
- for slice_tag in existing_slice_attributes:
- if slice_tag['tagname'] in ignored_slice_attribute_names:
+ # get tags that should be removed
+ for slice_tag in existing_slice_tags:
+ if slice_tag['tagname'] in ignored_slice_tag_names:
# If a slice already has a admin only role it was probably given to them by an
# admin, so we should ignore it.
- ignored_slice_attribute_names.append(slice_tag['tagname'])
- attribute_found=True
+ ignored_slice_tag_names.append(slice_tag['tagname'])
+ tag_found = True
else:
- # If an existing slice attribute was not found in the request it should
+ # If an existing slice tag was not found in the request it should
# be removed
- attribute_found=False
+ tag_found = False
for requested_attribute in requested_slice_attributes:
if requested_attribute['name'] == slice_tag['tagname'] and \
requested_attribute['value'] == slice_tag['value']:
- attribute_found=True
+ tag_found = True
break
+ # remove tags only if not in append mode
+ if not tag_found and pltags != 'append':
+ slice_tags_to_remove.append(slice_tag)
- if not attribute_found and not append:
- removed_slice_attributes.append(slice_tag)
-
- # get attributes that should be added:
+ # get tags that should be added:
for requested_attribute in requested_slice_attributes:
# if the requested attribute wasn't found we should add it
- if requested_attribute['name'] in valid_slice_attribute_names:
- attribute_found = False
- for existing_attribute in existing_slice_attributes:
+ if requested_attribute['name'] in valid_tag_names:
+ tag_found = False
+ for existing_attribute in existing_slice_tags:
if requested_attribute['name'] == existing_attribute['tagname'] and \
requested_attribute['value'] == existing_attribute['value']:
- attribute_found=True
+ tag_found = True
break
- if not attribute_found:
- added_slice_attributes.append(requested_attribute)
-
-
- # remove stale attributes
- for attribute in removed_slice_attributes:
+ if not tag_found:
+ slice_attributes_to_add.append(requested_attribute)
+
+ def friendly_message (tag_or_att):
+ name = tag_or_att['tagname'] if 'tagname' in tag_or_att else tag_or_att['name']
+ return "SliceTag slice={}, tagname={} value={}, node_id={}"\
+ .format(slice['name'], tag_or_att['name'], tag_or_att['value'], tag_or_att.get('node_id'))
+
+ # remove stale tags
+ for tag in slice_tags_to_remove:
try:
- self.driver.shell.DeleteSliceTag(attribute['slice_tag_id'])
- except Exception, e:
- logger.warn('Failed to remove sliver attribute. name: %s, value: %s, node_id: %s\nCause:%s'\
- % (slice['name'], attribute['value'], attribute.get('node_id'), str(e)))
-
- # add requested_attributes
- for attribute in added_slice_attributes:
+ logger.info("Removing Slice Tag {}".format(friendly_message(tag)))
+ self.driver.shell.DeleteSliceTag(tag['slice_tag_id'])
+ except Exception as e:
+ logger.warn("Failed to remove slice tag {}\nCause:{}"\
+ .format(friendly_message(tag), e))
+
+ # add requested_tags
+ for attribute in slice_attributes_to_add:
try:
+ logger.info("Adding Slice Tag {}".format(friendly_message(attribute)))
self.driver.shell.AddSliceTag(slice['name'], attribute['name'],
attribute['value'], attribute.get('node_id', None))
- except Exception, e:
- logger.warn('Failed to add sliver attribute. name: %s, value: %s, node_id: %s\nCause:%s'\
- % (slice['name'], attribute['value'], attribute.get('node_id'), str(e)))
-
+ except Exception as e:
+ logger.warn("Failed to add slice tag {}\nCause:{}"\
+ .format(friendly_message(attribute), e))
elif hasattr(self.element, name):
return getattr(self.element, name)
else:
- raise AttributeError, "class Element has no attribute %s" % name
+ raise AttributeError("class Element of type {} has no attribute {}"
+ .format(self.__class__.__name__, name))
fields = [
'tagname',
'value',
+ 'scope',
]
from sfa.rspecs.elements.versions.pgv2DiskImage import PGv2DiskImage
from sfa.rspecs.elements.versions.plosv1FWRule import PLOSv1FWRule
+from sfa.util.sfalogging import logger
+
class PGv2SliverType:
@staticmethod
PGv2SliverType.add_sliver_attributes(sliver_elem, sliver.get('tags', []))
@staticmethod
- def add_sliver_attributes(xml, attributes):
- if attributes:
- for attribute in attributes:
- if attribute['name'] == 'initscript':
- xml.add_element('{%s}initscript' % xml.namespaces['planetlab'], name=attribute['value'])
- elif attribute['tagname'] == 'flack_info':
- attrib_elem = xml.add_element('{%s}info' % self.namespaces['flack'])
+ def add_sliver_attributes(xml, tags):
+ if tags is None:
+ return
+ for tag in tags:
+ tagname = tag['tagname'] if 'tagname' in tag else tag['name']
+ if tagname == 'flack_info':
+ attrib_elem = xml.add_element('{%s}info' % self.namespaces['flack'])
+ try:
attrib_dict = eval(tag['value'])
for (key, value) in attrib_dict.items():
- attrib_elem.set(key, value)
+ attrib_elem.set(key, value)
+ except Exception as e:
+ logger.warning("Could not parse dictionary in flack tag -- {}".format(e))
+ elif tagname == 'initscript':
+ xml.add_element('{%s}initscript' % xml.namespaces['planetlab'],
+ name=tag['value'])
+ else:
+ xml.add_element('{%s}attribute' % (xml.namespaces['planetlab']),
+ name = tagname,
+ value = tag['value'],
+ scope = tag.get('scope', 'unknown'),
+ )
+
@staticmethod
def get_slivers(xml, filter=None):
if filter is None: filter={}
if self.xml.schema:
self.version = self.version_manager.get_version_by_schema(self.xml.schema)
else:
- #raise InvalidRSpec('unknown rspec schema: %s' % schema)
+ #raise InvalidRSpec('unknown rspec schema: {}'.format(schema))
# TODO: Should start raising an exception once SFA defines a schema.
# for now we just default to sfa
self.version = self.version_manager.get_version({'type':'sfa','version': '1'})
def register_rspec_element(self, element_type, element_name, element_path):
if element_type not in RSpecElements:
- raise InvalidRSpecElement(element_type, extra="no such element type: %s. Must specify a valid RSpecElement" % element_type)
+ raise InvalidRSpecElement(element_type,
+ extra="no such element type: {}. Must specify a valid RSpecElement".format(element_type))
self.elements[element_type] = RSpecElement(element_type, element_name, element_path)
def get_rspec_element(self, element_type):
if element_type not in self.elements:
- msg = "ElementType %s not registerd for this rspec" % element_type
+ msg = "ElementType {} not registered for this rspec".format(element_type)
raise InvalidRSpecElement(element_type, extra=msg)
return self.elements[element_type]
"""
if filter is None: filter={}
if element_type not in self.elements:
- msg = "Unable to search for element %s in rspec, expath expression not found." % \
- element_type
+ msg = "Unable to search for element {} in rspec, expath expression not found."\
+ .format(element_type)
raise InvalidRSpecElement(element_type, extra=msg)
rspec_element = self.get_rspec_element(element_type)
xpath = rspec_element.path + XpathFilter.xpath(filter)
return self.xml.save(filename)
if __name__ == '__main__':
- rspec = RSpec('/tmp/resources.rspec')
+ import sys
+ input = sys.argv[1]
+ with open(input) as f:
+ rspec = RSpec(f.read())
print rspec
- rspec.register_rspec_element(RSpecElements.NETWORK, 'network', '//network')
- rspec.register_rspec_element(RSpecElements.NODE, 'node', '//node')
- print rspec.get(RSpecElements.NODE)[0]
- print rspec.get(RSpecElements.NODE, depth=1)[0]
+# rspec.register_rspec_element(RSpecElements.NETWORK, 'network', '//network')
+# rspec.register_rspec_element(RSpecElements.NODE, 'node', '//node')
+# print rspec.get(RSpecElements.NODE)[0]
+# print rspec.get(RSpecElements.NODE, depth=1)[0]
self.versions = []
self.load_versions()
+ def __repr__(self):
+ return "<VersionManager with {} flavours: [{}]>"\
+ .format(len(self.versions),
+ ", ".join( [ str(x) for x in self.versions ]))
+
def load_versions(self):
path = os.path.dirname(os.path.abspath( __file__ ))
versions_path = path + os.sep + 'versions'
raise InvalidRSpec("Unkwnown RSpec schema: %s" % schema)
return retval
-def show_by_string(string):
- try:
- print v.get_version(string)
- except Exception,e:
- print e
-def show_by_schema(string):
- try:
- print v.get_version_by_schema(string)
- except Exception,e:
- print e
+ def show_by_string(self, string):
+ try:
+ print self.get_version(string)
+ except Exception as e:
+ print e
+
+ def show_by_schema(self, string):
+ try:
+ print self.get_version_by_schema(string)
+ except Exception as e:
+ print e
if __name__ == '__main__':
- v = VersionManager()
- print v.versions
- show_by_string('sfa 1')
- show_by_string('protogeni 2')
- show_by_string('protogeni 2 advertisement')
- show_by_schema('http://www.protogeni.net/resources/rspec/2/ad.xsd')
- show_by_schema('http://sorch.netmode.ntua.gr/ws/RSpec/ad.xsd')
+ manager = VersionManager()
+ print manager
+ manager.show_by_string('sfa 1')
+ manager.show_by_string('protogeni 2')
+ manager.show_by_string('protogeni 2 advertisement')
+ manager.show_by_schema('http://www.protogeni.net/resources/rspec/2/ad.xsd')
+ manager.show_by_schema('http://sorch.netmode.ntua.gr/ws/RSpec/ad.xsd')
from sfa.rspecs.elements.versions.pgv2Node import PGv2Node
from sfa.rspecs.elements.versions.pgv2SliverType import PGv2SliverType
from sfa.rspecs.elements.versions.pgv2Lease import PGv2Lease
-
+from sfa.util.sfalogging import logger
+
class PGv2(RSpecVersion):
type = 'ProtoGENI'
content_type = 'ad'
# Networks
def get_networks(self):
network_names = set()
- nodes = self.xml.xpath('//default:node[@component_manager_id] | //node[@component_manager_id]', namespaces=self.namespaces)
+ nodes = self.xml.xpath('//default:node[@component_manager_id] | //node[@component_manager_id]',
+ namespaces=self.namespaces)
for node in nodes:
if 'component_manager_id' in node.attrib:
network_urn = node.get('component_manager_id')
# Slivers
- def get_sliver_attributes(self, hostname, network=None):
- nodes = self.get_nodes({'component_id': '*%s*' %hostname})
- attribs = []
- if nodes is not None and isinstance(nodes, list) and len(nodes) > 0:
+ def get_sliver_attributes(self, component_id, network=None):
+ nodes = self.get_nodes({'component_id': '*%s*' %component_id})
+ try:
node = nodes[0]
sliver = node.xpath('./default:sliver_type', namespaces=self.namespaces)
if sliver is not None and isinstance(sliver, list) and len(sliver) > 0:
sliver = sliver[0]
- #attribs = self.attributes_list(sliver)
- return attribs
+ return self.attributes_list(sliver)
+ else:
+ return []
+ except:
+ return []
def get_slice_attributes(self, network=None):
slice_attributes = []
# TODO: default sliver attributes in the PG rspec?
default_ns_prefix = self.namespaces['default']
for node in nodes_with_slivers:
- sliver_attributes = self.get_sliver_attributes(node, network)
+ sliver_attributes = self.get_sliver_attributes(node['component_id'], network)
for sliver_attribute in sliver_attributes:
name=str(sliver_attribute[0])
text =str(sliver_attribute[1])
from sqlalchemy import Table, MetaData, Column, ForeignKey
from sqlalchemy import Integer, String
-metadata=MetaData()
+metadata = MetaData()
-# this is needed my migrate so it can locate 'records.record_id'
+# this is needed by migrate so it can locate 'records.record_id'
records = \
Table ( 'records', metadata,
Column ('record_id', Integer, primary_key=True),
from sqlalchemy import Table, MetaData, Column, ForeignKey
from sqlalchemy import Integer, String
-metadata=MetaData()
+metadata = MetaData()
# this is needed by migrate so it can locate 'records.record_id'
records = \
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, String
-metadata=MetaData()
+metadata = MetaData()
+
sliver_allocation_table = \
Table ( 'sliver_allocation', metadata,
Column('sliver_id', String, primary_key=True),
--- /dev/null
+# this move is about adding a 'name' column in the 'authority' table
+
+#from sfa.util.sfalogging import logger
+
+from sqlalchemy import MetaData, Table, Column, String
+from migrate.changeset.schema import create_column, drop_column
+
+def upgrade(migrate_engine):
+ metadata = MetaData(bind = migrate_engine)
+ authorities = Table('authorities', metadata, autoload=True)
+ name_column = Column('name', String)
+ name_column.create(authorities)
+
+def downgrade(migrate_engine):
+ metadata = MetaData(bind = migrate_engine)
+ authorities = Table('authorities', metadata, autoload=True)
+ authorities.c.name.drop()
from sfa.trust.gid import GID
##############################
-Base=declarative_base()
+Base = declarative_base()
####################
# dicts vs objects
# but we had to define another more internal column (classtype) so we
# accomodate variants in types like authority+am and the like
-class RegRecord (Base,AlchemyObj):
+class RegRecord(Base, AlchemyObj):
__tablename__ = 'records'
record_id = Column (Integer, primary_key=True)
# this is the discriminator that tells which class to use
if dict: self.load_from_dict (dict)
def __repr__(self):
- result="<Record id=%s, type=%s, hrn=%s, authority=%s, pointer=%s" % \
- (self.record_id, self.type, self.hrn, self.authority, self.pointer)
+ result="<Record id=%s, type=%s, hrn=%s, authority=%s" % \
+ (self.record_id, self.type, self.hrn, self.authority)
+# for extra in ('pointer', 'email', 'name'):
+# for extra in ('email', 'name'):
+# displaying names at this point it too dangerous, because of unicode
+ for extra in ('email'):
+ if hasattr(self, extra):
+ result += " {}={},".format(extra, getattr(self, extra))
# skip the uniform '--- BEGIN CERTIFICATE --' stuff
- if self.gid: result+=" gid=%s..."%self.gid[28:36]
- else: result+=" nogid"
+ if self.gid:
+ result+=" gid=%s..."%self.gid[28:36]
+ else:
+ result+=" nogid"
result += ">"
return result
else: return gid.save_to_string(save_parents=True)
def validate_datetime (self, key, incoming):
- if isinstance (incoming, datetime): return incoming
- elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
- else: logger.info("Cannot validate datetime for key %s with input %s"%\
- (key,incoming))
+ if isinstance (incoming, datetime):
+ return incoming
+ elif isinstance (incoming, (int, float)):
+ return datetime.fromtimestamp (incoming)
+ else:
+ logger.info("Cannot validate datetime for key %s with input %s"%\
+ (key,incoming))
@validates ('date_created')
- def validate_date_created (self, key, incoming): return self.validate_datetime (key, incoming)
+ def validate_date_created (self, key, incoming):
+ return self.validate_datetime (key, incoming)
@validates ('last_updated')
- def validate_last_updated (self, key, incoming): return self.validate_datetime (key, incoming)
+ def validate_last_updated (self, key, incoming):
+ return self.validate_datetime (key, incoming)
# xxx - there might be smarter ways to handle get/set'ing gid using validation hooks
def get_gid_object (self):
- if not self.gid: return None
- else: return GID(string=self.gid)
+ if not self.gid: return None
+ else: return GID(string=self.gid)
def just_created (self):
- now=datetime.utcnow()
- self.date_created=now
- self.last_updated=now
+ now = datetime.utcnow()
+ self.date_created = now
+ self.last_updated = now
def just_updated (self):
- now=datetime.utcnow()
- self.last_updated=now
+ now = datetime.utcnow()
+ self.last_updated = now
#################### cross-relations tables
# authority x user (pis) association
# all subclasses define a convenience constructor with a default value for type,
# and when applicable a way to define local fields in a kwd=value argument
####################
-class RegAuthority (RegRecord):
+class RegAuthority(RegRecord):
__tablename__ = 'authorities'
__mapper_args__ = { 'polymorphic_identity' : 'authority' }
record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
#### extensions come here
+ name = Column ('name', String)
+ #### extensions come here
reg_pis = relationship \
('RegUser',
- secondary=authority_pi_table,
- primaryjoin=RegRecord.record_id==authority_pi_table.c.authority_id,
- secondaryjoin=RegRecord.record_id==authority_pi_table.c.pi_id,
- backref='reg_authorities_as_pi')
+ secondary = authority_pi_table,
+ primaryjoin = RegRecord.record_id==authority_pi_table.c.authority_id,
+ secondaryjoin = RegRecord.record_id==authority_pi_table.c.pi_id,
+ backref = 'reg_authorities_as_pi',
+ )
def __init__ (self, **kwds):
+ # handle local settings
+ if 'name' in kwds:
+ self.name = kwds.pop('name')
# fill in type if not previously set
- if 'type' not in kwds: kwds['type']='authority'
+ if 'type' not in kwds:
+ kwds['type']='authority'
# base class constructor
RegRecord.__init__(self, **kwds)
# no proper data yet, just hack the typename
def __repr__ (self):
- return RegRecord.__repr__(self).replace("Record","Authority")
+ result = RegRecord.__repr__(self).replace("Record", "Authority")
+# here again trying to display names that can be utf8 is too dangerous
+# result.replace(">", " name={}>".format(self.name))
+ return result
def update_pis (self, pi_hrns, dbsession):
# strip that in case we have <researcher> words </researcher>
pi_hrns = [ x.strip() for x in pi_hrns ]
- request = dbsession.query (RegUser).filter(RegUser.hrn.in_(pi_hrns))
- logger.info ("RegAuthority.update_pis: %d incoming pis, %d matches found"%(len(pi_hrns),request.count()))
- pis = dbsession.query (RegUser).filter(RegUser.hrn.in_(pi_hrns)).all()
+ request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns))
+ logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"\
+ % (len(pi_hrns), request.count()))
+ pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all()
self.reg_pis = pis
####################
-class RegSlice (RegRecord):
+class RegSlice(RegRecord):
__tablename__ = 'slices'
__mapper_args__ = { 'polymorphic_identity' : 'slice' }
record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
secondary=slice_researcher_table,
primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id,
secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id,
- backref='reg_slices_as_researcher')
+ backref='reg_slices_as_researcher',
+ )
def __init__ (self, **kwds):
- if 'type' not in kwds: kwds['type']='slice'
+ if 'type' not in kwds:
+ kwds['type']='slice'
RegRecord.__init__(self, **kwds)
def __repr__ (self):
- return RegRecord.__repr__(self).replace("Record","Slice")
+ return RegRecord.__repr__(self).replace("Record", "Slice")
def update_researchers (self, researcher_hrns, dbsession):
# strip that in case we have <researcher> words </researcher>
researcher_hrns = [ x.strip() for x in researcher_hrns ]
request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns))
- logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"%(len(researcher_hrns),request.count()))
+ logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"\
+ % (len(researcher_hrns), request.count()))
researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all()
self.reg_researchers = researchers
# helper function is called from the trust/ area that
def get_pis (self):
from sqlalchemy.orm import sessionmaker
- Session=sessionmaker()
- dbsession=Session.object_session(self)
+ Session = sessionmaker()
+ dbsession = Session.object_session(self)
from sfa.util.xrn import get_authority
authority_hrn = get_authority(self.hrn)
auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first()
return auth_record.reg_pis
@validates ('expires')
- def validate_expires (self, key, incoming): return self.validate_datetime (key, incoming)
+ def validate_expires (self, key, incoming):
+ return self.validate_datetime (key, incoming)
####################
-class RegNode (RegRecord):
+class RegNode(RegRecord):
__tablename__ = 'nodes'
__mapper_args__ = { 'polymorphic_identity' : 'node' }
record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
- def __init__ (self, **kwds):
- if 'type' not in kwds: kwds['type']='node'
+ def __init__(self, **kwds):
+ if 'type' not in kwds:
+ kwds['type']='node'
RegRecord.__init__(self, **kwds)
def __repr__ (self):
- return RegRecord.__repr__(self).replace("Record","Node")
+ return RegRecord.__repr__(self).replace("Record", "Node")
####################
-class RegUser (RegRecord):
+class RegUser(RegRecord):
__tablename__ = 'users'
# these objects will have type='user' in the records table
__mapper_args__ = { 'polymorphic_identity' : 'user' }
# a 'keys' tag, and assigning a list of strings in a reference column like this crashes
reg_keys = relationship \
('RegKey', backref='reg_user',
- cascade="all, delete, delete-orphan")
+ cascade = "all, delete, delete-orphan",
+ )
# so we can use RegUser (email=.., hrn=..) and the like
def __init__ (self, **kwds):
# handle local settings
- if 'email' in kwds: self.email=kwds.pop('email')
- if 'type' not in kwds: kwds['type']='user'
+ if 'email' in kwds:
+ self.email = kwds.pop('email')
+ if 'type' not in kwds:
+ kwds['type'] = 'user'
RegRecord.__init__(self, **kwds)
# append stuff at the end of the record __repr__
def __repr__ (self):
- result = RegRecord.__repr__(self).replace("Record","User")
- result.replace (">"," email=%s"%self.email)
- result += ">"
+ result = RegRecord.__repr__(self).replace("Record", "User")
+ result.replace(">", " email={}>".format(self.email))
return result
@validates('email')
# meaning, when querying the whole records, we expect there should
# be a single query to fetch all the keys
# or, is it enough that we issue a single query to retrieve all the keys
-class RegKey (Base):
+class RegKey(Base):
__tablename__ = 'keys'
key_id = Column (Integer, primary_key=True)
- record_id = Column (Integer, ForeignKey ("records.record_id"))
+ record_id = Column (Integer, ForeignKey ("records.record_id"))
key = Column (String)
pointer = Column (Integer, default = -1)
def __init__ (self, key, pointer=None):
- self.key=key
- if pointer: self.pointer=pointer
+ self.key = key
+ if pointer:
+ self.pointer = pointer
def __repr__ (self):
- result="<key id=%s key=%s..."%(self.key_id,self.key[8:16],)
- try: result += " user=%s"%self.reg_user.record_id
+ result = "<key id=%s key=%s..." % (self.key_id, self.key[8:16],)
+ try: result += " user=%s" % self.reg_user.record_id
except: result += " no-user"
result += ">"
return result
self.allocation_state = kwds['allocation_state']
def __repr__(self):
- result = "<sliver_allocation sliver_id=%s allocation_state=%s" % \
- (self.sliver_id, self.allocation_state)
+ result = "<sliver_allocation sliver_id=%s allocation_state=%s"\
+ % (self.sliver_id, self.allocation_state)
return result
@validates('allocation_state')
dbsession.commit()
def sync(self, dbsession):
- constraints = [SliverAllocation.sliver_id==self.sliver_id]
+ constraints = [SliverAllocation.sliver_id == self.sliver_id]
results = dbsession.query(SliverAllocation).filter(and_(*constraints))
records = []
for result in results:
# convert an incoming record - typically from xmlrpc - into an object
def make_record_dict (record_dict):
assert ('type' in record_dict)
- type=record_dict['type'].split('+')[0]
- if type=='authority':
- result=RegAuthority (dict=record_dict)
- elif type=='user':
- result=RegUser (dict=record_dict)
- elif type=='slice':
- result=RegSlice (dict=record_dict)
- elif type=='node':
- result=RegNode (dict=record_dict)
+ type = record_dict['type'].split('+')[0]
+ if type == 'authority':
+ result = RegAuthority (dict=record_dict)
+ elif type == 'user':
+ result = RegUser (dict=record_dict)
+ elif type == 'slice':
+ result = RegSlice (dict=record_dict)
+ elif type == 'node':
+ result = RegNode (dict=record_dict)
else:
logger.debug("Untyped RegRecord instance")
- result=RegRecord (dict=record_dict)
- logger.info ("converting dict into Reg* with type=%s"%type)
- logger.info ("returning=%s"%result)
+ result = RegRecord (dict=record_dict)
+ logger.info("converting dict into Reg* with type=%s"%type)
+ logger.info("returning=%s"%result)
# xxx todo
# register non-db attributes in an extensions field
return result
-def make_record_xml (xml):
- xml_record = XML(xml)
- xml_dict = xml_record.todict()
+def make_record_xml (xml_str):
+ xml = XML(xml_str)
+ xml_dict = xml.todict()
logger.info("load from xml, keys=%s"%xml_dict.keys())
return make_record_dict (xml_dict)
# were the relationships data came from the testbed side
# for each type, a dict of the form {<field-name-exposed-in-record>:<alchemy_accessor_name>}
# so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users
-augment_map={'authority': {'reg-pis':'reg_pis',},
- 'slice': {'reg-researchers':'reg_researchers',},
- 'user': {'reg-pi-authorities':'reg_authorities_as_pi',
- 'reg-slices':'reg_slices_as_researcher',},
- }
-
-def augment_with_sfa_builtins (local_record):
+augment_map = {'authority': {'reg-pis' : 'reg_pis',},
+ 'slice': {'reg-researchers' : 'reg_researchers',},
+ 'user': {'reg-pi-authorities' : 'reg_authorities_as_pi',
+ 'reg-slices' : 'reg_slices_as_researcher',},
+ }
+
+
+# xxx mystery
+# the way we use sqlalchemy might be a little wrong
+# in any case what has been observed is that (Reg)Records as returned by an sqlalchemy
+# query not always have their __dict__ properly adjusted
+# typically a RegAuthority object would have its object.name set properly, but
+# object.__dict__ has no 'name' key
+# which is an issue because we rely on __dict__ for many things, in particular this
+# is what gets exposed to the drivers (this is historical and dates back before sqlalchemy)
+# so it is recommended to always run this function that will make sure
+# that such built-in fields are properly set in __dict__ too
+#
+def augment_with_sfa_builtins(local_record):
# don't ruin the import of that file in a client world
from sfa.util.xrn import Xrn
# add a 'urn' field
- setattr(local_record,'reg-urn',Xrn(xrn=local_record.hrn,type=local_record.type).urn)
+ setattr(local_record, 'reg-urn', Xrn(xrn=local_record.hrn, type=local_record.type).urn)
# users have keys and this is needed to synthesize 'users' sent over to CreateSliver
- if local_record.type=='user':
+ fields_to_check = []
+ if local_record.type == 'user':
user_keys = [ key.key for key in local_record.reg_keys ]
setattr(local_record, 'reg-keys', user_keys)
+ fields_to_check = ['email']
+ elif local_record.type == 'authority':
+ fields_to_check = ['name']
+ for field in fields_to_check:
+ if not field in local_record.__dict__:
+ logger.debug("augment_with_sfa_builtins: hotfixing missing '{}' in {}"
+ .format(field, local_record.hrn))
+ local_record.__dict__[field] = getattr(local_record, field)
# search in map according to record type
- type_map=augment_map.get(local_record.type,{})
+ type_map = augment_map.get(local_record.type, {})
# use type-dep. map to do the job
- for (field_name,attribute) in type_map.items():
+ for (field_name, attribute) in type_map.items():
# get related objects
- related_records = getattr(local_record,attribute,[])
+ related_records = getattr(local_record, attribute, [])
hrns = [ r.hrn for r in related_records ]
setattr (local_record, field_name, hrns)
from sfa.util.xml import XML
from sfa.trust.gid import GID
+from sfa.util.sfalogging import logger
+
class Record:
- def __init__(self, dict=None, xml=None):
+ def __init__(self, dict=None, xml_str=None):
if dict:
self.load_from_dict(dict)
- elif xml:
- xml_record = XML(xml)
- xml_dict = xml_record.todict()
+ elif xml_str:
+ xml = XML(xml_str)
+ xml_dict = xml.todict()
self.load_from_dict(xml_dict)
-
def get_field(self, field):
return self.__dict__.get(field, None)
# (and 'last_updated' does not make it at all)
# let's be flexible
def date_repr (self,fields):
- if not isinstance(fields,list): fields=[fields]
+ if not isinstance(fields,list):
+ fields = [fields]
for field in fields:
- value=getattr(self,field,None)
+ value = getattr(self,field,None)
if isinstance (value,datetime):
return datetime_to_string (value)
elif isinstance (value,(int,float)):
# fallback
return "** undef_datetime **"
- # it may be important to exclude relationships, which fortunately
+ #
+ # need to filter out results, esp. wrt relationships
+ # exclude_types must be a tuple so we can use isinstance
#
- def todict (self, exclude_types=None):
- if exclude_types is None: exclude_types=[]
- d=self.__dict__
- def exclude (k,v):
- if k.startswith('_'): return True
- if exclude_types:
- for exclude_type in exclude_types:
- if isinstance (v,exclude_type): return True
- return False
- keys=[k for (k,v) in d.items() if not exclude(k,v)]
- return dict ( [ (k,d[k]) for k in keys ] )
+ def record_to_dict (self, exclude_types=None):
+ if exclude_types is None:
+ exclude_types = ()
+ d = self.__dict__
+ def exclude (k, v):
+ return k.startswith('_') or isinstance (v, exclude_types)
+ keys = [ k for k, v in d.items() if not exclude(k, v) ]
+ return { k : d[k] for k in keys }
def toxml(self):
return self.save_as_xml()
def load_from_dict (self, d):
for (k,v) in d.iteritems():
# experimental
- if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
- if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
- setattr(self,k,v)
+ if isinstance(v, StringTypes) and v.lower() in ['true']:
+ v = True
+ if isinstance(v, StringTypes) and v.lower() in ['false']:
+ v = False
+ setattr(self, k, v)
# in addition we provide convenience for converting to and from xml records
# for this purpose only, we need the subclasses to define 'fields' as either
def save_as_xml (self):
# xxx not sure about the scope here
input_dict = dict( [ (key, getattr(self,key)) for key in self.fields() if getattr(self,key,None) ] )
- xml_record=XML("<record />")
- xml_record.parse_dict (input_dict)
+ xml_record = XML("<record />")
+ xml_record.parse_dict(input_dict)
return xml_record.toxml()
def dump(self, format=None, dump_parents=False, sort=False):
print 40*'='
print "RECORD"
# print remaining fields
- fields=self.fields()
+ fields = self.fields()
if sort: fields.sort()
for attrib_name in fields:
attrib = getattr(self, attrib_name)