from sfa.util.version import version_core
from sfa.util.cache import Cache
-from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
-from sfa.storage.model import make_record
+from sfa.storage.record import Record
from sfa.rspecs.rspec import RSpec
from sfa.rspecs.rspec_converter import RSpecConverter
f = open(filename, "w")
f.write("<recordlist>\n")
for record_dict in record_dicts:
- record_obj=make_record (dict=record_dict)
+ record_obj=Record(dict=record_dict)
f.write('<record hrn="' + record_obj.hrn + '" type="' + record_obj.type + '" />\n')
f.write("</recordlist>\n")
f.close()
elif format == "hrnlist":
f = open(filename, "w")
for record_dict in record_dicts:
- record_obj=make_record (dict=record_dict)
+ record_obj=Record(dict=record_dict)
f.write(record_obj.hrn + "\n")
f.close()
else:
print "unknown output format", format
def save_record_to_file(filename, record_dict):
- rec_record = make_record (dict=record_dict)
+ rec_record = Record(dict=record_dict)
str = record.save_to_string()
f=codecs.open(filename, encoding='utf-8',mode="w")
f.write(str)
f=codecs.open(filename, encoding="utf-8", mode="r")
xml_string = f.read()
f.close()
- return make_record (xml=xml_string)
+ return Record(xml=xml_string)
import uuid
record_dicts = filter_records(options.type, record_dicts)
if not record_dicts:
self.logger.error("No record of type %s"% options.type)
- records = [ make_record (dict=record_dict) for record_dict in record_dicts ]
+ records = [ Record(dict=record_dict) for record_dict in record_dicts ]
for record in records:
if (options.format == "text"): record.dump()
else: print record.save_as_xml()
from optparse import OptionParser
from pprint import PrettyPrinter
from sfa.util.xrn import Xrn
-from sfa.storage.record import SfaRecord
+from sfa.storage.record import Record
from sfa.client.sfi import save_records_to_file
pprinter = PrettyPrinter(indent=4)
def show(self, xrn, type=None, format=None, outfile=None):
records = self.api.manager.Resolve(self.api, xrn, type, True)
for record in records:
- sfa_record = SfaRecord(dict=record)
+ sfa_record = Record(dict=record)
sfa_record.dump(format)
if outfile:
save_records_to_file(outfile, records)
resources = self.api.manager.ListResources(self.api, [], options)
pprinter.pprint(resources)
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
+ @args('-r', '--rspec', dest='rspec', metavar='<rspec>', help='rspec file')
def create(self, xrn, rspec):
pass
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
def delete(self, xrn):
- pass
-
+ self.api.manager.DeleteSliver(self.api, xrn, [], {})
+
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
def start(self, xrn):
- pass
+ self.api.manager.start_slice(self.api, xrn, [])
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
def stop(self, xrn):
- pass
+ self.api.manager.stop_slice(self.api, xrn, [])
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
def reset(self, xrn):
- pass
+ self.api.manager.reset_slice(self.api, xrn)
- def ticket(self):
+
+ @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
+ @args('-r', '--rspec', dest='rspec', metavar='<rspec>', help='request rspec', default=None)
+ def ticket(self, xrn, rspec):
pass
class SliceManagerCommands(AggregateCommands):
def __init__(self, *args, **kwds):
- self.api= Generic().make_api(interface='slicemgr')
+ self.api= Generic.the_flavour().make_api(interface='slicemgr')
CATEGORIES = {'registry': RegistryCommands,
existing_hrns.append(record.hrn)
# Get all users
- persons = shell.user_get_all()
+ persons = shell.auth_manager.get_users()
persons_dict = {}
keys_filename = config.config_path + os.sep + 'person_keys.py'
old_person_keys = load_keys(keys_filename)
hrn = config.SFA_INTERFACE_HRN + "." + person.id
persons_dict[hrn] = person
old_keys = old_person_keys.get(person.id, [])
- keys = [k.public_key for k in shell.key_pair_get_all_by_user(person.id)]
+ keys = [k.public_key for k in shell.db.key_pair_get_all_by_user(person.id)]
person_keys[person.id] = keys
update_record = False
if old_keys != keys:
try:
pkey = convert_public_key(keys[0])
except:
- logger.log_exc('unable to convert public key for %s' % hrn)
+ self.logger.log_exc('unable to convert public key for %s' % hrn)
pkey = Keypair(create=True)
else:
- logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn)
+ self.logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn)
pkey = Keypair(create=True)
- person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
+ person_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
person_record = RegUser ()
person_record.type='user'
person_record.hrn=hrn
person_record.authority=get_authority(hrn)
dbsession.add(person_record)
dbsession.commit()
- logger.info("OpenstackImporter: imported person %s" % person_record)
+ self.logger.info("OpenstackImporter: imported person %s" % person_record)
# Get all projects
- projects = shell.project_get_all()
+ projects = shell.auth_manager.get_projects()
projects_dict = {}
for project in projects:
hrn = config.SFA_INTERFACE_HRN + '.' + project.id
(hrn, 'slice') not in existing_records:
pkey = Keypair(create=True)
urn = hrn_to_urn(hrn, 'slice')
- project_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
+ project_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
project_record = RegSlice ()
project_record.type='slice'
project_record.hrn=hrn
project_record.authority=get_authority(hrn)
dbsession.add(project_record)
dbsession.commit()
- logger.info("OpenstackImporter: imported slice: %s" % project_record)
+ self.logger.info("OpenstackImporter: imported slice: %s" % project_record)
# remove stale records
system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
continue
record_object = existing_records[ (record_hrn, type) ]
- logger.info("OpenstackImporter: removing %s " % record)
+ self.logger.info("OpenstackImporter: removing %s " % record)
dbsession.delete(record_object)
dbsession.commit()
# save pub keys
- logger.info('OpenstackImporter: saving current pub keys')
+ self.logger.info('OpenstackImporter: saving current pub keys')
save_keys(keys_filename, person_keys)
instance_type=instance_type,
key_name=key_name,
user_data = user_data)
+ #security_groups=group_names,
+ #placement=zone,
+ #min_count=min_count,
+ #max_count=max_count,
+
except EC2ResponseError, ec2RespError:
logger.log_exc(ec2RespError)
ari_id = CloudController.image_ec2_id(disk_image['ari']['id'])
# start the instance
self.reserve_instance(ami_id, aki_id, ari_id, \
- instance_type['name'], keyname, pubkeys)
+ instance_type['name'], keyname, pubkeys)
+
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Column, Integer, String
-from sqlalchemy.orm import relationship, backref
from sqlalchemy import ForeignKey
from sfa.util.sfalogging import logger
from sqlalchemy.orm import validates
from sqlalchemy.ext.declarative import declarative_base
+from sfa.storage.record import Record
from sfa.util.sfalogging import logger
from sfa.util.sfatime import utcparse, datetime_to_string
from sfa.util.xml import XML
# (*) finally for converting a dictionary into an sqlalchemy object, we provide
# obj.load_from_dict(dict)
-class AlchemyObj:
+class AlchemyObj(Record):
def __iter__(self):
self._i = iter(object_mapper(self).columns)
return self
def next(self):
n = self._i.next().name
return n, getattr(self, n)
- def todict (self):
- d=self.__dict__
- keys=[k for k in d.keys() if not k.startswith('_')]
- return dict ( [ (k,d[k]) for k in keys ] )
- def load_from_dict (self, d):
- for (k,v) in d.iteritems():
- # experimental
- if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
- if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
- setattr(self,k,v)
- def validate_datetime (self, key, incoming):
- if isinstance (incoming, datetime): return incoming
- elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
-
- # in addition we provide convenience for converting to and from xml records
- # for this purpose only, we need the subclasses to define 'fields' as either
- # a list or a dictionary
- def xml_fields (self):
- fields=self.fields
- if isinstance(fields,dict): fields=fields.keys()
- return fields
-
- def save_as_xml (self):
- # xxx not sure about the scope here
- input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] )
- xml_record=XML("<record />")
- xml_record.parse_dict (input_dict)
- return xml_record.toxml()
-
- def dump(self, format=None, dump_parents=False):
- if not format:
- format = 'text'
- else:
- format = format.lower()
- if format == 'text':
- self.dump_text(dump_parents)
- elif format == 'xml':
- print self.save_to_string()
- elif format == 'simple':
- print self.dump_simple()
- else:
- raise Exception, "Invalid format %s" % format
-
- # xxx fixme
- # turns out the date_created field is received by the client as a 'created' int
- # (and 'last_updated' does not make it at all)
- # let's be flexible
- def date_repr (self,fields):
- if not isinstance(fields,list): fields=[fields]
- for field in fields:
- value=getattr(self,field,None)
- if isinstance (value,datetime):
- return datetime_to_string (value)
- elif isinstance (value,(int,float)):
- return datetime_to_string(utcparse(value))
- # fallback
- return "** undef_datetime **"
-
- def dump_text(self, dump_parents=False):
- # print core fields in this order
- core_fields = [ 'hrn', 'type', 'authority', 'date_created', 'created', 'last_updated', 'gid', ]
- print "".join(['=' for i in range(40)])
- print "RECORD"
- print " hrn:", self.hrn
- print " type:", self.type
- print " authority:", self.authority
- print " date created:", self.date_repr( ['date_created','created'] )
- print " last updated:", self.date_repr('last_updated')
- print " gid:"
- print self.get_gid_object().dump_string(8, dump_parents)
-
- # print remaining fields
- for attrib_name in dir(self):
- attrib = getattr(self, attrib_name)
- # skip internals
- if attrib_name.startswith('_'): continue
- # skip core fields
- if attrib_name in core_fields: continue
- # skip callables
- if callable (attrib): continue
- print " %s: %s" % (attrib_name, attrib)
-
- def dump_simple(self):
- return "%s"%self
-
# # only intended for debugging
# def inspect (self, logger, message=""):
# logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
elif isinstance(gid, StringTypes): return gid
else: return gid.save_to_string(save_parents=True)
+ def validate_datetime (self, key, incoming):
+ if isinstance (incoming, datetime): return incoming
+ elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
+
@validates ('date_created')
def validate_date_created (self, key, incoming): return self.validate_datetime (key, incoming)
--- /dev/null
+from sfa.util.sfatime import utcparse, datetime_to_string
+from types import StringTypes
+from datetime import datetime
+from sfa.util.xml import XML
+from sfa.trust.gid import GID
+
+class Record:
+
+ def __init__(self, dict=None, xml=None):
+ if dict:
+ self.load_from_dict(dict)
+ elif xml:
+ xml_record = XML(xml)
+ xml_dict = xml_record.todict()
+ self.load_from_dict(xml_dict)
+
+ # xxx fixme
+ # turns out the date_created field is received by the client as a 'created' int
+ # (and 'last_updated' does not make it at all)
+ # let's be flexible
+ def date_repr (self,fields):
+ if not isinstance(fields,list): fields=[fields]
+ for field in fields:
+ value=getattr(self,field,None)
+ if isinstance (value,datetime):
+ return datetime_to_string (value)
+ elif isinstance (value,(int,float)):
+ return datetime_to_string(utcparse(value))
+ # fallback
+ return "** undef_datetime **"
+
+ def todict (self):
+ d=self.__dict__
+ keys=[k for k in d.keys() if not k.startswith('_')]
+ return dict ( [ (k,d[k]) for k in keys ] )
+
+ def load_from_dict (self, d):
+ for (k,v) in d.iteritems():
+ # experimental
+ if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
+ if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
+ setattr(self,k,v)
+
+ # in addition we provide convenience for converting to and from xml records
+ # for this purpose only, we need the subclasses to define 'fields' as either
+ # a list or a dictionary
+ def xml_fields (self):
+ fields=self.fields
+ if isinstance(fields,dict): fields=fields.keys()
+ return fields
+
+ def save_as_xml (self):
+ # xxx not sure about the scope here
+ input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] )
+ xml_record=XML("<record />")
+ xml_record.parse_dict (input_dict)
+ return xml_record.toxml()
+
+ def dump(self, format=None, dump_parents=False):
+ if not format:
+ format = 'text'
+ else:
+ format = format.lower()
+ if format == 'text':
+ self.dump_text(dump_parents)
+ elif format == 'xml':
+ print self.save_to_string()
+ elif format == 'simple':
+ print self.dump_simple()
+ else:
+ raise Exception, "Invalid format %s" % format
+
+ def dump_text(self, dump_parents=False):
+ # print core fields in this order
+ core_fields = [ 'hrn', 'type', 'authority', 'date_created', 'created', 'last_updated', 'gid', ]
+ print "".join(['=' for i in range(40)])
+ print "RECORD"
+ print " hrn:", self.hrn
+ print " type:", self.type
+ print " authority:", self.authority
+ print " date created:", self.date_repr( ['date_created','created'] )
+ print " last updated:", self.date_repr('last_updated')
+ print " gid:"
+ if self.gid:
+ print GID(self.gid).dump_string(8, dump_parents)
+
+ # print remaining fields
+ for attrib_name in dir(self):
+ attrib = getattr(self, attrib_name)
+ # skip internals
+ if attrib_name.startswith('_'): continue
+ # skip core fields
+ if attrib_name in core_fields: continue
+ # skip callables
+ if callable (attrib): continue
+ print " %s: %s" % (attrib_name, attrib)
+
+ def dump_simple(self):
+ return "%s"%self