Merge branch 'master' into senslab2
authorSandrine Avakian <sandrine.avakian@inria.fr>
Wed, 7 Mar 2012 09:13:54 +0000 (10:13 +0100)
committerSandrine Avakian <sandrine.avakian@inria.fr>
Wed, 7 Mar 2012 09:13:54 +0000 (10:13 +0100)
sfa.spec
sfa/client/sfi.py
sfa/clientbin/sfaadmin.py
sfa/importer/openstackimporter.py
sfa/openstack/osaggregate.py
sfa/plc/plaggregate.py
sfa/storage/alchemy.py
sfa/storage/model.py
sfa/storage/record.py [new file with mode: 0644]

index c97cdf2..001dda4 100644 (file)
--- a/sfa.spec
+++ b/sfa.spec
@@ -148,6 +148,7 @@ rm -rf $RPM_BUILD_ROOT
 %{python_sitelib}/sfa
 /etc/init.d/sfa
 %{_bindir}/sfa-start.py*
+%{_bindir}/sfaadmin.py*
 %{_bindir}/keyconvert.py*
 %{_bindir}/sfa-config-tty
 %config /etc/sfa/default_config.xml
index c2dc9ba..a3b3260 100644 (file)
@@ -29,8 +29,7 @@ from sfa.util.config import Config
 from sfa.util.version import version_core
 from sfa.util.cache import Cache
 
-from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
-from sfa.storage.model import make_record
+from sfa.storage.record import Record
 
 from sfa.rspecs.rspec import RSpec
 from sfa.rspecs.rspec_converter import RSpecConverter
@@ -133,14 +132,14 @@ def save_records_to_file(filename, record_dicts, format="xml"):
         f = open(filename, "w")
         f.write("<recordlist>\n")
         for record_dict in record_dicts:
-            record_obj=make_record (dict=record_dict)
+            record_obj=Record(dict=record_dict)
             f.write('<record hrn="' + record_obj.hrn + '" type="' + record_obj.type + '" />\n')
         f.write("</recordlist>\n")
         f.close()
     elif format == "hrnlist":
         f = open(filename, "w")
         for record_dict in record_dicts:
-            record_obj=make_record (dict=record_dict)
+            record_obj=Record(dict=record_dict)
             f.write(record_obj.hrn + "\n")
         f.close()
     else:
@@ -148,7 +147,7 @@ def save_records_to_file(filename, record_dicts, format="xml"):
         print "unknown output format", format
 
 def save_record_to_file(filename, record_dict):
-    rec_record = make_record (dict=record_dict)
+    rec_record = Record(dict=record_dict)
     str = record.save_to_string()
     f=codecs.open(filename, encoding='utf-8',mode="w")
     f.write(str)
@@ -161,7 +160,7 @@ def load_record_from_file(filename):
     f=codecs.open(filename, encoding="utf-8", mode="r")
     xml_string = f.read()
     f.close()
-    return make_record (xml=xml_string)
+    return Record(xml=xml_string)
 
 
 import uuid
@@ -717,7 +716,7 @@ or version information about sfi itself
         record_dicts = filter_records(options.type, record_dicts)
         if not record_dicts:
             self.logger.error("No record of type %s"% options.type)
-        records = [ make_record (dict=record_dict) for record_dict in record_dicts ]
+        records = [ Record(dict=record_dict) for record_dict in record_dicts ]
         for record in records:
             if (options.format == "text"):      record.dump()  
             else:                               print record.save_as_xml() 
index 1531886..90c3f74 100755 (executable)
@@ -6,7 +6,7 @@ from sfa.generic import Generic
 from optparse import OptionParser
 from pprint import PrettyPrinter
 from sfa.util.xrn import Xrn
-from sfa.storage.record import SfaRecord 
+from sfa.storage.record import Record 
 from sfa.client.sfi import save_records_to_file
 pprinter = PrettyPrinter(indent=4)
 
@@ -53,7 +53,7 @@ class RegistryCommands(Commands):
     def show(self, xrn, type=None, format=None, outfile=None):
         records = self.api.manager.Resolve(self.api, xrn, type, True)
         for record in records:
-            sfa_record = SfaRecord(dict=record)
+            sfa_record = Record(dict=record)
             sfa_record.dump(format) 
         if outfile:
             save_records_to_file(outfile, records)                
@@ -120,29 +120,38 @@ class AggregateCommands(Commands):
         resources = self.api.manager.ListResources(self.api, [], options)
         pprinter.pprint(resources)
         
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
+    @args('-r', '--rspec', dest='rspec', metavar='<rspec>', help='rspec file')  
     def create(self, xrn, rspec):
         pass
 
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
     def delete(self, xrn):
-        pass 
-    
+        self.api.manager.DeleteSliver(self.api, xrn, [], {})
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
     def start(self, xrn):
-        pass
+        self.api.manager.start_slice(self.api, xrn, [])
 
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
     def stop(self, xrn):
-        pass      
+        self.api.manager.stop_slice(self.api, xrn, [])      
 
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
     def reset(self, xrn):
-        pass
+        self.api.manager.reset_slice(self.api, xrn)
 
-    def ticket(self):
+
+    @args('-x', '--xrn', dest='xrn', metavar='<xrn>', help='object hrn/urn', default=None)
+    @args('-r', '--rspec', dest='rspec', metavar='<rspec>', help='request rspec', default=None)
+    def ticket(self, xrn, rspec):
         pass
 
 
 class SliceManagerCommands(AggregateCommands):
     
     def __init__(self, *args, **kwds):
-        self.api= Generic().make_api(interface='slicemgr')
+        self.api= Generic.the_flavour().make_api(interface='slicemgr')
 
 
 CATEGORIES = {'registry': RegistryCommands,
index 2bf1da3..32def38 100644 (file)
@@ -56,7 +56,7 @@ class OpenstackImporter:
             existing_hrns.append(record.hrn) 
             
         # Get all users
-        persons = shell.user_get_all()
+        persons = shell.auth_manager.get_users()
         persons_dict = {}
         keys_filename = config.config_path + os.sep + 'person_keys.py' 
         old_person_keys = load_keys(keys_filename)
@@ -65,7 +65,7 @@ class OpenstackImporter:
             hrn = config.SFA_INTERFACE_HRN + "." + person.id
             persons_dict[hrn] = person
             old_keys = old_person_keys.get(person.id, [])
-            keys = [k.public_key for k in shell.key_pair_get_all_by_user(person.id)]
+            keys = [k.public_key for k in shell.db.key_pair_get_all_by_user(person.id)]
             person_keys[person.id] = keys
             update_record = False
             if old_keys != keys:
@@ -78,12 +78,12 @@ class OpenstackImporter:
                     try:
                         pkey = convert_public_key(keys[0])
                     except:
-                        logger.log_exc('unable to convert public key for %s' % hrn)
+                        self.logger.log_exc('unable to convert public key for %s' % hrn)
                         pkey = Keypair(create=True)
                 else:
-                    logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn)
+                    self.logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn)
                     pkey = Keypair(create=True) 
-                person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
+                person_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                 person_record = RegUser ()
                 person_record.type='user'
                 person_record.hrn=hrn
@@ -91,10 +91,10 @@ class OpenstackImporter:
                 person_record.authority=get_authority(hrn)
                 dbsession.add(person_record)
                 dbsession.commit()
-                logger.info("OpenstackImporter: imported person %s" % person_record)
+                self.logger.info("OpenstackImporter: imported person %s" % person_record)
 
         # Get all projects
-        projects = shell.project_get_all()
+        projects = shell.auth_manager.get_projects()
         projects_dict = {}
         for project in projects:
             hrn = config.SFA_INTERFACE_HRN + '.' + project.id
@@ -103,7 +103,7 @@ class OpenstackImporter:
             (hrn, 'slice') not in existing_records:
                 pkey = Keypair(create=True)
                 urn = hrn_to_urn(hrn, 'slice')
-                project_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
+                project_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                 project_record = RegSlice ()
                 project_record.type='slice'
                 project_record.hrn=hrn
@@ -111,7 +111,7 @@ class OpenstackImporter:
                 project_record.authority=get_authority(hrn)
                 dbsession.add(project_record)
                 dbsession.commit()
-                logger.info("OpenstackImporter: imported slice: %s" % project_record)  
+                self.logger.info("OpenstackImporter: imported slice: %s" % project_record)  
     
         # remove stale records    
         system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
@@ -133,11 +133,11 @@ class OpenstackImporter:
                 continue 
         
             record_object = existing_records[ (record_hrn, type) ]
-            logger.info("OpenstackImporter: removing %s " % record)
+            self.logger.info("OpenstackImporter: removing %s " % record)
             dbsession.delete(record_object)
             dbsession.commit()
                                    
         # save pub keys
-        logger.info('OpenstackImporter: saving current pub keys')
+        self.logger.info('OpenstackImporter: saving current pub keys')
         save_keys(keys_filename, person_keys)                
         
index cf5a3b4..061ce13 100644 (file)
@@ -232,6 +232,11 @@ class OSAggregate:
                                              instance_type=instance_type,
                                              key_name=key_name,
                                              user_data = user_data)
+                                             #security_groups=group_names,
+                                             #placement=zone,
+                                             #min_count=min_count,
+                                             #max_count=max_count,           
+                                              
         except EC2ResponseError, ec2RespError:
             logger.log_exc(ec2RespError)
                
@@ -270,4 +275,5 @@ class OSAggregate:
                             ari_id = CloudController.image_ec2_id(disk_image['ari']['id'])
                     # start the instance
                     self.reserve_instance(ami_id, aki_id, ari_id, \
-                                          instance_type['name'], keyname, pubkeys) 
+                                          instance_type['name'], keyname, pubkeys)
+
index 55e6291..b0c78d1 100644 (file)
@@ -138,7 +138,7 @@ class PlAggregate:
     def get_nodes_and_links(self, slice=None,slivers=[], options={}):
         # if we are dealing with a slice that has no node just return 
         # and empty list    
-        if slice is not None and not slice['node_ids']:
+        if not slice or  not slice['node_ids']:
             return ([],[])
 
         filter = {}
index 7e00116..30d3e56 100644 (file)
@@ -4,7 +4,6 @@ from sqlalchemy import create_engine
 from sqlalchemy.orm import sessionmaker
 
 from sqlalchemy import Column, Integer, String
-from sqlalchemy.orm import relationship, backref
 from sqlalchemy import ForeignKey
 
 from sfa.util.sfalogging import logger
index 702c182..780853c 100644 (file)
@@ -9,6 +9,7 @@ from sqlalchemy.orm import object_mapper
 from sqlalchemy.orm import validates
 from sqlalchemy.ext.declarative import declarative_base
 
+from sfa.storage.record import Record
 from sfa.util.sfalogging import logger
 from sfa.util.sfatime import utcparse, datetime_to_string
 from sfa.util.xml import XML 
@@ -46,99 +47,14 @@ Base=declarative_base()
 # (*) finally for converting a dictionary into an sqlalchemy object, we provide
 # obj.load_from_dict(dict)
 
-class AlchemyObj:
+class AlchemyObj(Record):
     def __iter__(self): 
         self._i = iter(object_mapper(self).columns)
         return self 
     def next(self): 
         n = self._i.next().name
         return n, getattr(self, n)
-    def todict (self):
-        d=self.__dict__
-        keys=[k for k in d.keys() if not k.startswith('_')]
-        return dict ( [ (k,d[k]) for k in keys ] )
-    def load_from_dict (self, d):
-        for (k,v) in d.iteritems():
-            # experimental
-            if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
-            if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
-            setattr(self,k,v)
 
-    def validate_datetime (self, key, incoming):
-        if isinstance (incoming, datetime):     return incoming
-        elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
-
-    # in addition we provide convenience for converting to and from xml records
-    # for this purpose only, we need the subclasses to define 'fields' as either 
-    # a list or a dictionary
-    def xml_fields (self):
-        fields=self.fields
-        if isinstance(fields,dict): fields=fields.keys()
-        return fields
-
-    def save_as_xml (self):
-        # xxx not sure about the scope here
-        input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] )
-        xml_record=XML("<record />")
-        xml_record.parse_dict (input_dict)
-        return xml_record.toxml()
-
-    def dump(self, format=None, dump_parents=False):
-        if not format:
-            format = 'text'
-        else:
-            format = format.lower()
-        if format == 'text':
-            self.dump_text(dump_parents)
-        elif format == 'xml':
-            print self.save_to_string()
-        elif format == 'simple':
-            print self.dump_simple()
-        else:
-            raise Exception, "Invalid format %s" % format
-   
-    # xxx fixme 
-    # turns out the date_created field is received by the client as a 'created' int
-    # (and 'last_updated' does not make it at all)
-    # let's be flexible
-    def date_repr (self,fields):
-        if not isinstance(fields,list): fields=[fields]
-        for field in fields:
-            value=getattr(self,field,None)
-            if isinstance (value,datetime): 
-                return datetime_to_string (value)
-            elif isinstance (value,(int,float)):
-                return datetime_to_string(utcparse(value))
-        # fallback
-        return "** undef_datetime **"
-
-    def dump_text(self, dump_parents=False):
-        # print core fields in this order
-        core_fields = [ 'hrn', 'type', 'authority', 'date_created', 'created', 'last_updated', 'gid',  ]
-        print "".join(['=' for i in range(40)])
-        print "RECORD"
-        print "    hrn:", self.hrn
-        print "    type:", self.type
-        print "    authority:", self.authority
-        print "    date created:", self.date_repr( ['date_created','created'] )
-        print "    last updated:", self.date_repr('last_updated')
-        print "    gid:"
-        print self.get_gid_object().dump_string(8, dump_parents)  
-        
-        # print remaining fields
-        for attrib_name in dir(self):
-            attrib = getattr(self, attrib_name)
-            # skip internals
-            if attrib_name.startswith('_'):     continue
-            # skip core fields
-            if attrib_name in core_fields:      continue
-            # skip callables 
-            if callable (attrib):               continue
-            print "     %s: %s" % (attrib_name, attrib)
-    
-    def dump_simple(self):
-        return "%s"%self
-      
 #    # only intended for debugging 
 #    def inspect (self, logger, message=""):
 #        logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
@@ -204,6 +120,10 @@ class RegRecord (Base,AlchemyObj):
         elif isinstance(gid, StringTypes):  return gid
         else:                               return gid.save_to_string(save_parents=True)
 
+    def validate_datetime (self, key, incoming):
+        if isinstance (incoming, datetime):     return incoming
+        elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
+
     @validates ('date_created')
     def validate_date_created (self, key, incoming): return self.validate_datetime (key, incoming)
 
diff --git a/sfa/storage/record.py b/sfa/storage/record.py
new file mode 100644 (file)
index 0000000..b31ed26
--- /dev/null
@@ -0,0 +1,99 @@
+from sfa.util.sfatime import utcparse, datetime_to_string
+from types import StringTypes
+from datetime import datetime
+from sfa.util.xml import XML
+from sfa.trust.gid import GID
+
+class Record:
+
+    def __init__(self, dict=None, xml=None):
+        if dict:
+            self.load_from_dict(dict)
+        elif xml:
+            xml_record = XML(xml)
+            xml_dict = xml_record.todict()
+            self.load_from_dict(xml_dict)  
+
+    # xxx fixme
+    # turns out the date_created field is received by the client as a 'created' int
+    # (and 'last_updated' does not make it at all)
+    # let's be flexible
+    def date_repr (self,fields):
+        if not isinstance(fields,list): fields=[fields]
+        for field in fields:
+            value=getattr(self,field,None)
+            if isinstance (value,datetime):
+                return datetime_to_string (value)
+            elif isinstance (value,(int,float)):
+                return datetime_to_string(utcparse(value))
+        # fallback
+        return "** undef_datetime **"
+    
+    def todict (self):
+        d=self.__dict__
+        keys=[k for k in d.keys() if not k.startswith('_')]
+        return dict ( [ (k,d[k]) for k in keys ] )
+
+    def load_from_dict (self, d):
+        for (k,v) in d.iteritems():
+            # experimental
+            if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
+            if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
+            setattr(self,k,v)
+
+    # in addition we provide convenience for converting to and from xml records
+    # for this purpose only, we need the subclasses to define 'fields' as either
+    # a list or a dictionary
+    def xml_fields (self):
+        fields=self.fields
+        if isinstance(fields,dict): fields=fields.keys()
+        return fields
+
+    def save_as_xml (self):
+        # xxx not sure about the scope here
+        input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] )
+        xml_record=XML("<record />")
+        xml_record.parse_dict (input_dict)
+        return xml_record.toxml()
+
+    def dump(self, format=None, dump_parents=False):
+        if not format:
+            format = 'text'
+        else:
+            format = format.lower()
+        if format == 'text':
+            self.dump_text(dump_parents)
+        elif format == 'xml':
+            print self.save_to_string()
+        elif format == 'simple':
+            print self.dump_simple()
+        else:
+            raise Exception, "Invalid format %s" % format
+
+    def dump_text(self, dump_parents=False):
+        # print core fields in this order
+        core_fields = [ 'hrn', 'type', 'authority', 'date_created', 'created', 'last_updated', 'gid',  ]
+        print "".join(['=' for i in range(40)])
+        print "RECORD"
+        print "    hrn:", self.hrn
+        print "    type:", self.type
+        print "    authority:", self.authority
+        print "    date created:", self.date_repr( ['date_created','created'] )
+        print "    last updated:", self.date_repr('last_updated')
+        print "    gid:"
+        if self.gid:
+            print GID(self.gid).dump_string(8, dump_parents)    
+
+        # print remaining fields
+        for attrib_name in dir(self):
+            attrib = getattr(self, attrib_name)
+            # skip internals
+            if attrib_name.startswith('_'):     continue
+            # skip core fields
+            if attrib_name in core_fields:      continue
+            # skip callables
+            if callable (attrib):               continue
+            print "     %s: %s" % (attrib_name, attrib)
+
+    def dump_simple(self):
+        return "%s"%self