importer revisited - sfa-import.py sfa-nuke.py (no more -plc)
authorThierry Parmentelat <thierry.parmentelat@sophia.inria.fr>
Fri, 27 Jan 2012 14:03:20 +0000 (15:03 +0100)
committerThierry Parmentelat <thierry.parmentelat@sophia.inria.fr>
Fri, 27 Jan 2012 14:03:20 +0000 (15:03 +0100)
testbed-dependant importer located through the FLAVOUR mechanism

16 files changed:
Makefile
cron.d/sfa.cron
setup.py
sfa.spec
sfa/generic/__init__.py
sfa/generic/max.py
sfa/generic/openstack.py
sfa/generic/pl.py
sfa/importer/openstackimporter.py [new file with mode: 0644]
sfa/importer/plimporter.py [new file with mode: 0644]
sfa/importer/sfa-import-openstack.py [deleted file]
sfa/importer/sfa-import-plc.py [deleted file]
sfa/importer/sfa-import.py [new file with mode: 0755]
sfa/importer/sfa-nuke.py [moved from sfa/importer/sfa-nuke-plc.py with 100% similarity]
sfa/importer/sfaimporter.py [moved from sfa/importer/sfaImport.py with 84% similarity]
sfa/server/sfa-start.py

index 79b71c5..63e0255 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -135,7 +135,8 @@ RSYNC                       := rsync -a -v $(RSYNC_COND_DRY_RUN) --no-owner $(RSYNC_EXCLUDES)
 CLIENTS = $(shell ls sfa/clientbin/*.py)
 
 BINS = ./config/sfa-config-tty ./config/gen-sfa-cm-config.py \
-       ./sfa/importer/sfa-import-plc.py ./sfa/importer/sfa-nuke-plc.py ./sfa/server/sfa-start.py \
+       ./sfa/server/sfa-start.py \
+       ./sfa/importer/sfa-import.py ./sfa/importer/sfa-nuke.py \
        $(CLIENTS)
 
 synccheck: 
index 00d27b6..c541334 100644 (file)
@@ -9,6 +9,6 @@ HOME=/
 #
 # minute hour day-of-month month day-of-week user command
 # once or twice an hour makes sense
-0 * * * * root /usr/bin/sfa-import-plc.py         >> /var/log/sfa_import.log 2>&1
+0 * * * * root /usr/bin/sfa-import.py >> /var/log/sfa_import.log 2>&1
 # this is needed only if you run RefreshPeer
 #0 0 * * * root /usr/bin/sfa-clean-peer-records.py >> /var/log/sfa_import.log 2>&1
index c1e8453..e3bca57 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -13,8 +13,8 @@ scripts = glob("sfa/clientbin/*.py") + \
     [ 
     'config/sfa-config-tty',
     'config/gen-sfa-cm-config.py',
-    'sfa/importer/sfa-import-plc.py', 
-    'sfa/importer/sfa-nuke-plc.py', 
+    'sfa/importer/sfa-import.py', 
+    'sfa/importer/sfa-nuke.py', 
     'sfa/server/sfa-ca.py', 
     'sfa/server/sfa-start.py', 
     'sfa/server/sfa-clean-peer-records.py', 
index 963abef..a835e11 100644 (file)
--- a/sfa.spec
+++ b/sfa.spec
@@ -166,8 +166,8 @@ rm -rf $RPM_BUILD_ROOT
 /etc/sfa/xml.xsd
 /etc/sfa/protogeni-rspec-common.xsd
 /etc/sfa/topology
-%{_bindir}/sfa-import-plc.py*
-%{_bindir}/sfa-nuke-plc.py*
+%{_bindir}/sfa-import.py*
+%{_bindir}/sfa-nuke.py*
 %{_bindir}/sfa-clean-peer-records.py*
 %{_bindir}/gen-sfa-cm-config.py*
 %{_bindir}/sfa-ca.py*
index de1a9e1..99d15bc 100644 (file)
@@ -42,6 +42,10 @@ class Generic:
         except:
             logger.log_exc("Cannot locate generic instance with flavour=%s"%flavour)
 
+    # provide default for importer_class
+    def importer_class (self): 
+        return None
+
     # in the simplest case these can be redefined to the class/module objects to be used
     # see pl.py for an example
     # some descendant of SfaApi
@@ -111,4 +115,3 @@ class Generic:
         except:
             logger.log_exc_critical(message)
         
-        
index 8920ae7..d54afc5 100644 (file)
@@ -3,13 +3,12 @@
 # 
 from sfa.generic.pl import pl
 
-import sfa.managers.aggregate_manager_max
-
 class max (pl):
 
 # the max flavour behaves like pl, except for 
 # the aggregate
     def aggregate_manager_class (self) :
+        import sfa.managers.aggregate_manager_max
         return sfa.managers.aggregate_manager_max.AggregateManagerMax
 
 # I believe the component stuff is not implemented
index 5c99d23..024f291 100644 (file)
@@ -6,17 +6,19 @@ import sfa.managers.registry_manager_openstack
 import sfa.managers.slice_manager
 import sfa.managers.aggregate_manager_openstack
 
-class openstack (Generic):
-    
-    # use the standard api class
-    def api_class (self):
-        return sfa.server.sfaapi.SfaApi
+# use pl as a model so we only redefine what's different
+from sfa.generic.pl import pl
 
+class openstack (pl):
+    
+    # the importer class
+    def importer_class (self): 
+        import sfa.importer.openstackimporter
+        return sfa.importer.openstackimporter.OpenstackImporter
+        
     # the manager classes for the server-side services
     def registry_manager_class (self) : 
         return sfa.managers.registry_manager_openstack.RegistryManager
-    def slicemgr_manager_class (self) : 
-        return sfa.managers.slice_manager.SliceManager
     def aggregate_manager_class (self) :
         return sfa.managers.aggregate_manager_openstack.AggregateManager
 
@@ -24,12 +26,5 @@ class openstack (Generic):
     def driver_class (self):
         return sfa.openstack.openstack_driver.OpenstackDriver
 
-    # for the component mode, to be run on board planetlab nodes
-    # manager class
-    def component_manager_class (self):
-        return sfa.managers.component_manager_pl
-    # driver_class
-    def component_driver_class (self):
-        return sfa.plc.plcomponentdriver.PlComponentDriver
 
 
index 92b7266..c8b1bc6 100644 (file)
@@ -1,8 +1,12 @@
 from sfa.generic import Generic
 
-
 class pl (Generic):
     
+    # the importer class
+    def importer_class (self): 
+        import sfa.importer.plimporter
+        return sfa.importer.plimporter.PlImporter
+        
     # use the standard api class
     def api_class (self):
         import sfa.server.sfaapi
@@ -27,9 +31,10 @@ class pl (Generic):
     # for the component mode, to be run on board planetlab nodes
     # manager class
     def component_manager_class (self):
+        import sfa.managers
         return sfa.managers.component_manager_pl
     # driver_class
     def component_driver_class (self):
+        import sfa.plc.plcomponentdriver
         return sfa.plc.plcomponentdriver.PlComponentDriver
 
-
diff --git a/sfa/importer/openstackimporter.py b/sfa/importer/openstackimporter.py
new file mode 100644 (file)
index 0000000..249e2c0
--- /dev/null
@@ -0,0 +1 @@
+class OpenstackImporter
diff --git a/sfa/importer/plimporter.py b/sfa/importer/plimporter.py
new file mode 100644 (file)
index 0000000..d5d3d3b
--- /dev/null
@@ -0,0 +1,340 @@
+import os
+
+from sfa.util.config import Config
+from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
+from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
+
+from sfa.trust.gid import create_uuid    
+from sfa.trust.certificate import convert_public_key, Keypair
+
+from sfa.storage.alchemy import dbsession
+from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
+
+from sfa.plc.plshell import PlShell    
+
+def load_keys(filename):
+    keys = {}
+    tmp_dict = {}
+    try:
+        execfile(filename, tmp_dict)
+        if 'keys' in tmp_dict:
+            keys = tmp_dict['keys']
+        return keys
+    except:
+        return keys
+
+def save_keys(filename, keys):
+    f = open(filename, 'w')
+    f.write("keys = %s" % str(keys))
+    f.close()
+
+def _get_site_hrn(interface_hrn, site):
+    # Hardcode 'internet2' into the hrn for sites hosting
+    # internet2 nodes. This is a special operation for some vini
+    # sites only
+    hrn = ".".join([interface_hrn, site['login_base']]) 
+    if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
+        if site['login_base'].startswith("i2") or site['login_base'].startswith("nlr"):
+            hrn = ".".join([interface_hrn, "internet2", site['login_base']])
+    return hrn
+
+
+class PlImporter:
+
+    def __init__ (self, auth_hierarchy, logger):
+        self.auth_hierarchy = auth_hierarchy
+        self.logger=logger
+
+    def record_options (self, parser):
+        self.logger.debug ("PlImporter no options yet")
+        pass
+
+    def run (self, options):
+        # we don't have any options for now
+        self.logger.info ("PlImporter.run : to do")
+
+        config = Config ()
+        interface_hrn = config.SFA_INTERFACE_HRN
+        root_auth = config.SFA_REGISTRY_ROOT_AUTH
+        shell = PlShell (config)
+
+        # create dict of all existing sfa records
+        existing_records = {}
+        existing_hrns = []
+        key_ids = []
+        for record in dbsession.query(RegRecord):
+            existing_records[ (record.hrn, record.type,) ] = record
+            existing_hrns.append(record.hrn) 
+            
+        # Get all plc sites
+        sites = shell.GetSites({'peer_id': None})
+        sites_dict = {}
+        for site in sites:
+            sites_dict[site['login_base']] = site 
+    
+        # Get all plc users
+        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
+                                   ['person_id', 'email', 'key_ids', 'site_ids'])
+        persons_dict = {}
+        for person in persons:
+            persons_dict[person['person_id']] = person
+            key_ids.extend(person['key_ids'])
+
+        # Get all public keys
+        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids})
+        keys_dict = {}
+        for key in keys:
+            keys_dict[key['key_id']] = key['key']
+
+        # create a dict of person keys keyed on key_id 
+        keys_filename = config.config_path + os.sep + 'person_keys.py' 
+        old_person_keys = load_keys(keys_filename)
+        person_keys = {} 
+        for person in persons:
+            pubkeys = []
+            for key_id in person['key_ids']:
+                pubkeys.append(keys_dict[key_id])
+            person_keys[person['person_id']] = pubkeys
+
+        # Get all plc nodes  
+        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
+        nodes_dict = {}
+        for node in nodes:
+            nodes_dict[node['node_id']] = node
+
+        # Get all plc slices
+        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name'])
+        slices_dict = {}
+        for slice in slices:
+            slices_dict[slice['slice_id']] = slice
+
+        # special case for vini
+        if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
+            # create a fake internet2 site first
+            i2site = {'name': 'Internet2', 'abbreviated_name': 'I2',
+                        'login_base': 'internet2', 'site_id': -1}
+            site_hrn = _get_site_hrn(interface_hrn, i2site)
+            # import if hrn is not in list of existing hrns or if the hrn exists
+            # but its not a site record
+            if site_hrn not in existing_hrns or \
+               (site_hrn, 'authority') not in existing_records:
+                urn = hrn_to_urn(site_hrn, 'authority')
+                if not self.auth_hierarchy.auth_exists(urn):
+                    self.auth_hierarchy.create_auth(urn)
+                auth_info = self.auth_hierarchy.get_auth_info(urn)
+                auth_record = RegAuthority()
+                auth_record.type='authority'
+                auth_record.hrn=site_hrn
+                auth_record.gid=auth_info.get_gid_object()
+                auth_record.pointer=site['site_id']
+                auth_record.authority=get_authority(site_hrn)
+                dbsession.add(auth_record)
+                dbsession.commit()
+                self.logger.info("Import: Imported authority (vini site) %s"%auth_record)
+
+        # start importing 
+        for site in sites:
+            site_hrn = _get_site_hrn(interface_hrn, site)
+    
+            # import if hrn is not in list of existing hrns or if the hrn exists
+            # but its not a site record
+            if site_hrn not in existing_hrns or \
+               (site_hrn, 'authority') not in existing_records:
+                try:
+                    urn = hrn_to_urn(site_hrn, 'authority')
+                    if not self.auth_hierarchy.auth_exists(urn):
+                        self.auth_hierarchy.create_auth(urn)
+                    auth_info = self.auth_hierarchy.get_auth_info(urn)
+                    auth_record = RegAuthority()
+                    auth_record.type='authority'
+                    auth_record.hrn=site_hrn
+                    auth_record.gid=auth_info.get_gid_object()
+                    auth_record.pointer=site['site_id']
+                    auth_record.authority=get_authority(site_hrn)
+                    dbsession.add(auth_record)
+                    dbsession.commit()
+                    self.logger.info("Import: imported authority (site) : %s" % auth_record)  
+                except:
+                    # if the site import fails then there is no point in trying to import the
+                    # site's child records (node, slices, persons), so skip them.
+                    self.logger.log_exc("Import: failed to import site. Skipping child records") 
+                    continue 
+             
+            # import node records
+            for node_id in site['node_ids']:
+                if node_id not in nodes_dict:
+                    continue 
+                node = nodes_dict[node_id]
+                site_auth = get_authority(site_hrn)
+                site_name = get_leaf(site_hrn)
+                hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
+                if len(hrn) > 64:
+                    hrn = hrn[:64]
+                if hrn not in existing_hrns or \
+                   (hrn, 'node') not in existing_records:
+                    try:
+                        pkey = Keypair(create=True)
+                        urn = hrn_to_urn(hrn, 'node')
+                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
+                        node_record = RegNode ()
+                        node_record.type='node'
+                        node_record.hrn=hrn
+                        node_record.gid=node_gid
+                        node_record.pointer =node['node_id']
+                        node_record.authority=get_authority(hrn)
+                        dbsession.add(node_record)
+                        dbsession.commit()
+                        self.logger.info("Import: imported node: %s" % node_record)  
+                    except:
+                        self.logger.log_exc("Import: failed to import node") 
+                    
+
+            # import slices
+            for slice_id in site['slice_ids']:
+                if slice_id not in slices_dict:
+                    continue 
+                slice = slices_dict[slice_id]
+                hrn = slicename_to_hrn(interface_hrn, slice['name'])
+                if hrn not in existing_hrns or \
+                   (hrn, 'slice') not in existing_records:
+                    try:
+                        pkey = Keypair(create=True)
+                        urn = hrn_to_urn(hrn, 'slice')
+                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
+                        slice_record = RegSlice ()
+                        slice_record.type='slice'
+                        slice_record.hrn=hrn
+                        slice_record.gid=slice_gid
+                        slice_record.pointer=slice['slice_id']
+                        slice_record.authority=get_authority(hrn)
+                        dbsession.add(slice_record)
+                        dbsession.commit()
+                        self.logger.info("Import: imported slice: %s" % slice_record)  
+                    except:
+                        self.logger.log_exc("Import: failed to  import slice")
+
+            # import persons
+            for person_id in site['person_ids']:
+                if person_id not in persons_dict:
+                    continue 
+                person = persons_dict[person_id]
+                hrn = email_to_hrn(site_hrn, person['email'])
+                if len(hrn) > 64:
+                    hrn = hrn[:64]
+    
+                # if user's primary key has changed then we need to update the 
+                # users gid by forcing an update here
+                old_keys = []
+                new_keys = []
+                if person_id in old_person_keys:
+                    old_keys = old_person_keys[person_id]
+                if person_id in person_keys:
+                    new_keys = person_keys[person_id]
+                update_record = False
+                for key in new_keys:
+                    if key not in old_keys:
+                        update_record = True 
+    
+                if hrn not in existing_hrns or \
+                   (hrn, 'user') not in existing_records or update_record:
+                    try:
+                        if 'key_ids' in person and person['key_ids']:
+                            key = new_keys[0]
+                            try:
+                                pkey = convert_public_key(key)
+                            except:
+                                self.logger.warn('unable to convert public key for %s' % hrn)
+                                pkey = Keypair(create=True)
+                        else:
+                            # the user has no keys. Creating a random keypair for the user's gid
+                            self.logger.warn("Import: person %s does not have a PL public key"%hrn)
+                            pkey = Keypair(create=True) 
+                        urn = hrn_to_urn(hrn, 'user')
+                        person_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
+                        person_record = RegUser ()
+                        person_record.type='user'
+                        person_record.hrn=hrn
+                        person_record.gid=person_gid
+                        person_record.pointer=person['person_id']
+                        person_record.authority=get_authority(hrn)
+                        dbsession.add (person_record)
+                        dbsession.commit()
+                        self.logger.info("Import: imported person: %s" % person_record)
+                    except:
+                        self.logger.log_exc("Import: failed to import person.") 
+    
+        # remove stale records    
+        system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
+        for (record_hrn, type) in existing_records.keys():
+            if record_hrn in system_records:
+                continue
+            
+            record = existing_records[(record_hrn, type)]
+            if record.peer_authority:
+                continue
+    
+            # dont delete vini's internet2 placeholdder record
+            # normally this would be deleted becuase it does not have a plc record 
+            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
+               record_hrn.endswith("internet2"):     
+                continue
+    
+            found = False
+            
+            if type == 'authority':    
+                for site in sites:
+                    site_hrn = interface_hrn + "." + site['login_base']
+                    if site_hrn == record_hrn and site['site_id'] == record.pointer:
+                        found = True
+                        break
+
+            elif type == 'user':
+                login_base = get_leaf(get_authority(record_hrn))
+                username = get_leaf(record_hrn)
+                if login_base in sites_dict:
+                    site = sites_dict[login_base]
+                    for person in persons:
+                        tmp_username = person['email'].split("@")[0]
+                        alt_username = person['email'].split("@")[0].replace(".", "_").replace("+", "_")
+                        if username in [tmp_username, alt_username] and \
+                           site['site_id'] in person['site_ids'] and \
+                           person['person_id'] == record.pointer:
+                            found = True
+                            break
+        
+            elif type == 'slice':
+                slicename = hrn_to_pl_slicename(record_hrn)
+                for slice in slices:
+                    if slicename == slice['name'] and \
+                       slice['slice_id'] == record.pointer:
+                        found = True
+                        break    
+            elif type == 'node':
+                login_base = get_leaf(get_authority(record_hrn))
+                nodename = Xrn.unescape(get_leaf(record_hrn))
+                if login_base in sites_dict:
+                    site = sites_dict[login_base]
+                    for node in nodes:
+                        tmp_nodename = node['hostname']
+                        if tmp_nodename == nodename and \
+                           node['site_id'] == site['site_id'] and \
+                           node['node_id'] == record.pointer:
+                            found = True
+                            break  
+            else:
+                continue 
+        
+            if not found:
+                try:
+                    record_object = existing_records[(record_hrn, type)]
+                    self.logger.info("Import: deleting record: %s" % record)
+                    dbsession.delete(record_object)
+                    dbsession.commit()
+                except:
+                    self.logger.log_exc("Import: failded to delete record")                    
+
+        # save pub keys
+        self.logger.info('Import: saving current pub keys')
+        save_keys(keys_filename, person_keys)                
+        
diff --git a/sfa/importer/sfa-import-openstack.py b/sfa/importer/sfa-import-openstack.py
deleted file mode 100755 (executable)
index 78032e2..0000000
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/python
-#
-##
-# Import PLC records into the SFA database. It is indended that this tool be
-# run once to create SFA records that reflect the current state of the
-# planetlab database.
-#
-# The import tool assumes that the existing PLC hierarchy should all be part
-# of "planetlab.us" (see the root_auth and level1_auth variables below).
-#
-# Public keys are extracted from the users' SSH keys automatically and used to
-# create GIDs. This is relatively experimental as a custom tool had to be
-# written to perform conversion from SSH to OpenSSL format. It only supports
-# RSA keys at this time, not DSA keys.
-##
-
-import os
-import getopt
-import sys
-
-from sfa.util.config import Config
-from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
-from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
-from sfa.util.sfalogging import logger
-
-from sfa.trust.gid import create_uuid    
-from sfa.trust.certificate import convert_public_key, Keypair
-
-from sfa.openstack.openstack_shell import OpenstackShell    
-
-from sfa.storage.alchemy import dbsession
-from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
-
-from sfa.importer.sfaImport import sfaImport, _cleanup_string
-
-def process_options():
-
-   (options, args) = getopt.getopt(sys.argv[1:], '', [])
-   for opt in options:
-       name = opt[0]
-       val = opt[1]
-
-
-def load_keys(filename):
-    keys = {}
-    tmp_dict = {}
-    try:
-        execfile(filename, tmp_dict)
-        if 'keys' in tmp_dict:
-            keys = tmp_dict['keys']
-        return keys
-    except:
-        return keys
-
-def save_keys(filename, keys):
-    f = open(filename, 'w')
-    f.write("keys = %s" % str(keys))
-    f.close()
-
-def main():
-
-    process_options()
-    config = Config()
-    sfaImporter = sfaImport()
-    logger=sfaImporter.logger
-    logger.setLevelFromOptVerbose(config.SFA_API_LOGLEVEL)
-    if not config.SFA_REGISTRY_ENABLED:
-        sys.exit(0)
-    root_auth = config.SFA_REGISTRY_ROOT_AUTH
-    interface_hrn = config.SFA_INTERFACE_HRN
-    shell = OpenstackShell(config)
-    sfaImporter.create_top_level_records()
-    
-    # create dict of all existing sfa records
-    existing_records = {}
-    existing_hrns = []
-    key_ids = []
-    for record in dbsession.query(RegRecord):
-        existing_records[ (record.hrn, record.type,) ] = record
-        existing_hrns.append(record.hrn) 
-            
-        
-    # Get all users
-    persons = shell.user_get_all()
-    persons_dict = {}
-    keys_filename = config.config_path + os.sep + 'person_keys.py' 
-    old_person_keys = load_keys(keys_filename)
-    person_keys = {} 
-    for person in persons:
-        hrn = config.SFA_INTERFACE_HRN + "." + person.id
-        persons_dict[hrn] = person
-        old_keys = old_person_keys.get(person.id, [])
-        keys = [k.public_key for k in shell.key_pair_get_all_by_user(person.id)]
-        person_keys[person.id] = keys
-        update_record = False
-        if old_keys != keys:
-            update_record = True
-        if hrn not in existing_hrns or \
-               (hrn, 'user') not in existing_records or update_record:    
-            urn = hrn_to_urn(hrn, 'user')
-            
-            if keys:
-                try:
-                    pkey = convert_public_key(keys[0])
-                except:
-                    logger.log_exc('unable to convert public key for %s' % hrn)
-                    pkey = Keypair(create=True)
-            else:
-                logger.warn("Import: person %s does not have a PL public key"%hrn)
-                pkey = Keypair(create=True) 
-            person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
-            person_record = RegUser ()
-            person_record.type='user'
-            person_record.hrn=hrn
-            person_record.gid=person_gid
-            person_record.authority=get_authority(hrn)
-            dbsession.add(person_record)
-            dbsession.commit()
-            logger.info("Import: imported person %s" % person_record)
-
-    # Get all projects
-    projects = shell.project_get_all()
-    projects_dict = {}
-    for project in projects:
-        hrn = config.SFA_INTERFACE_HRN + '.' + project.id
-        projects_dict[hrn] = project
-        if hrn not in existing_hrns or \
-        (hrn, 'slice') not in existing_records:
-            pkey = Keypair(create=True)
-            urn = hrn_to_urn(hrn, 'slice')
-            project_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
-            project_record = RegSlice ()
-            project_record.type='slice'
-            project_record.hrn=hrn
-            project_record.gid=project_gid
-            project_record.authority=get_authority(hrn)
-            dbsession.add(project_record)
-            dbsession.commit()
-            logger.info("Import: imported slice: %s" % project_record)  
-    
-    # remove stale records    
-    system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
-    for (record_hrn, type) in existing_records.keys():
-        if record_hrn in system_records:
-            continue
-        
-        record = existing_records[(record_hrn, type)]
-        if record.peer_authority:
-            continue
-
-        if type == 'user':
-            if record_hrn in persons_dict:
-                continue  
-        elif type == 'slice':
-            if record_hrn in projects_dict:
-                continue
-        else:
-            continue 
-        
-        record_object = existing_records[ (record_hrn, type) ]
-        logger.info("Import: removing %s " % record)
-        dbsession.delete(record_object)
-        dbsession.commit()
-                                   
-    # save pub keys
-    logger.info('Import: saving current pub keys')
-    save_keys(keys_filename, person_keys)                
-        
-if __name__ == "__main__":
-    main()
diff --git a/sfa/importer/sfa-import-plc.py b/sfa/importer/sfa-import-plc.py
deleted file mode 100755 (executable)
index 36617bb..0000000
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/python
-#
-##
-# Import PLC records into the SFA database. It is indended that this tool be
-# run once to create SFA records that reflect the current state of the
-# planetlab database.
-#
-# The import tool assumes that the existing PLC hierarchy should all be part
-# of "planetlab.us" (see the root_auth and level1_auth variables below).
-#
-# Public keys are extracted from the users' SSH keys automatically and used to
-# create GIDs. This is relatively experimental as a custom tool had to be
-# written to perform conversion from SSH to OpenSSL format. It only supports
-# RSA keys at this time, not DSA keys.
-##
-
-import os
-import getopt
-import sys
-
-from sfa.util.config import Config
-from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
-from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
-
-from sfa.trust.gid import create_uuid    
-from sfa.trust.certificate import convert_public_key, Keypair
-
-from sfa.plc.plshell import PlShell    
-
-from sfa.storage.alchemy import dbsession
-from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
-
-from sfa.importer.sfaImport import sfaImport, _cleanup_string
-
-def process_options():
-
-   (options, args) = getopt.getopt(sys.argv[1:], '', [])
-   for opt in options:
-       name = opt[0]
-       val = opt[1]
-
-
-def load_keys(filename):
-    keys = {}
-    tmp_dict = {}
-    try:
-        execfile(filename, tmp_dict)
-        if 'keys' in tmp_dict:
-            keys = tmp_dict['keys']
-        return keys
-    except:
-        return keys
-
-def save_keys(filename, keys):
-    f = open(filename, 'w')
-    f.write("keys = %s" % str(keys))
-    f.close()
-
-def _get_site_hrn(interface_hrn, site):
-    # Hardcode 'internet2' into the hrn for sites hosting
-    # internet2 nodes. This is a special operation for some vini
-    # sites only
-    hrn = ".".join([interface_hrn, site['login_base']]) 
-    if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
-        if site['login_base'].startswith("i2") or site['login_base'].startswith("nlr"):
-            hrn = ".".join([interface_hrn, "internet2", site['login_base']])
-    return hrn
-
-# one would think this code could use a call to DBSchema 
-# however now import s expected to be done after service creation
-def main():
-
-    process_options()
-    config = Config()
-    sfaImporter = sfaImport()
-    logger=sfaImporter.logger
-    logger.setLevelFromOptVerbose(config.SFA_API_LOGLEVEL)
-    if not config.SFA_REGISTRY_ENABLED:
-        sys.exit(0)
-    root_auth = config.SFA_REGISTRY_ROOT_AUTH
-    interface_hrn = config.SFA_INTERFACE_HRN
-    shell = PlShell (config)
-    sfaImporter.create_top_level_records()
-    
-    # create dict of all existing sfa records
-    existing_records = {}
-    existing_hrns = []
-    key_ids = []
-    for record in dbsession.query(RegRecord):
-        existing_records[ (record.hrn, record.type,) ] = record
-        existing_hrns.append(record.hrn) 
-            
-    # Get all plc sites
-    sites = shell.GetSites({'peer_id': None})
-    sites_dict = {}
-    for site in sites:
-        sites_dict[site['login_base']] = site 
-    
-    # Get all plc users
-    persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
-                               ['person_id', 'email', 'key_ids', 'site_ids'])
-    persons_dict = {}
-    for person in persons:
-        persons_dict[person['person_id']] = person
-        key_ids.extend(person['key_ids'])
-
-    # Get all public keys
-    keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids})
-    keys_dict = {}
-    for key in keys:
-        keys_dict[key['key_id']] = key['key']
-
-    # create a dict of person keys keyed on key_id 
-    keys_filename = config.config_path + os.sep + 'person_keys.py' 
-    old_person_keys = load_keys(keys_filename)
-    person_keys = {} 
-    for person in persons:
-        pubkeys = []
-        for key_id in person['key_ids']:
-            pubkeys.append(keys_dict[key_id])
-        person_keys[person['person_id']] = pubkeys
-
-    # Get all plc nodes  
-    nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
-    nodes_dict = {}
-    for node in nodes:
-        nodes_dict[node['node_id']] = node
-
-    # Get all plc slices
-    slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name'])
-    slices_dict = {}
-    for slice in slices:
-        slices_dict[slice['slice_id']] = slice
-
-    # special case for vini
-    if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
-        # create a fake internet2 site first
-        i2site = {'name': 'Internet2', 'abbreviated_name': 'I2',
-                    'login_base': 'internet2', 'site_id': -1}
-        site_hrn = _get_site_hrn(interface_hrn, i2site)
-        # import if hrn is not in list of existing hrns or if the hrn exists
-        # but its not a site record
-        if site_hrn not in existing_hrns or \
-           (site_hrn, 'authority') not in existing_records:
-            urn = hrn_to_urn(site_hrn, 'authority')
-            if not sfaImporter.AuthHierarchy.auth_exists(urn):
-                sfaImporter.AuthHierarchy.create_auth(urn)
-            auth_info = sfaImporter.AuthHierarchy.get_auth_info(urn)
-            auth_record = RegAuthority()
-            auth_record.type='authority'
-            auth_record.hrn=site_hrn
-            auth_record.gid=auth_info.get_gid_object()
-            auth_record.pointer=site['site_id']
-            auth_record.authority=get_authority(site_hrn)
-            dbsession.add(auth_record)
-            dbsession.commit()
-            logger.info("Import: Imported authority (vini site) %s"%auth_record)
-
-    # start importing 
-    for site in sites:
-        site_hrn = _get_site_hrn(interface_hrn, site)
-
-        # import if hrn is not in list of existing hrns or if the hrn exists
-        # but its not a site record
-        if site_hrn not in existing_hrns or \
-           (site_hrn, 'authority') not in existing_records:
-            try:
-                urn = hrn_to_urn(site_hrn, 'authority')
-                if not sfaImporter.AuthHierarchy.auth_exists(urn):
-                    sfaImporter.AuthHierarchy.create_auth(urn)
-                auth_info = sfaImporter.AuthHierarchy.get_auth_info(urn)
-                auth_record = RegAuthority()
-                auth_record.type='authority'
-                auth_record.hrn=site_hrn
-                auth_record.gid=auth_info.get_gid_object()
-                auth_record.pointer=site['site_id']
-                auth_record.authority=get_authority(site_hrn)
-                dbsession.add(auth_record)
-                dbsession.commit()
-                logger.info("Import: imported authority (site) : %s" % auth_record)  
-            except:
-                # if the site import fails then there is no point in trying to import the
-                # site's child records (node, slices, persons), so skip them.
-                logger.log_exc("Import: failed to import site. Skipping child records") 
-                continue 
-             
-        # import node records
-        for node_id in site['node_ids']:
-            if node_id not in nodes_dict:
-                continue 
-            node = nodes_dict[node_id]
-            site_auth = get_authority(site_hrn)
-            site_name = get_leaf(site_hrn)
-            hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
-            if len(hrn) > 64:
-                hrn = hrn[:64]
-            if hrn not in existing_hrns or \
-               (hrn, 'node') not in existing_records:
-                try:
-                    pkey = Keypair(create=True)
-                    urn = hrn_to_urn(hrn, 'node')
-                    node_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
-                    node_record = RegNode ()
-                    node_record.type='node'
-                    node_record.hrn=hrn
-                    node_record.gid=node_gid
-                    node_record.pointer =node['node_id']
-                    node_record.authority=get_authority(hrn)
-                    dbsession.add(node_record)
-                    dbsession.commit()
-                    logger.info("Import: imported node: %s" % node_record)  
-                except:
-                    logger.log_exc("Import: failed to import node") 
-                    
-
-        # import slices
-        for slice_id in site['slice_ids']:
-            if slice_id not in slices_dict:
-                continue 
-            slice = slices_dict[slice_id]
-            hrn = slicename_to_hrn(interface_hrn, slice['name'])
-            #slicename = slice['name'].split("_",1)[-1]
-            #slicename = _cleanup_string(slicename)
-            if hrn not in existing_hrns or \
-               (hrn, 'slice') not in existing_records:
-                try:
-                    pkey = Keypair(create=True)
-                    urn = hrn_to_urn(hrn, 'slice')
-                    slice_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
-                    slice_record = RegSlice ()
-                    slice_record.type='slice'
-                    slice_record.hrn=hrn
-                    slice_record.gid=slice_gid
-                    slice_record.pointer=slice['slice_id']
-                    slice_record.authority=get_authority(hrn)
-                    dbsession.add(slice_record)
-                    dbsession.commit()
-                    logger.info("Import: imported slice: %s" % slice_record)  
-                except:
-                    logger.log_exc("Import: failed to  import slice")
-
-        # import persons
-        for person_id in site['person_ids']:
-            if person_id not in persons_dict:
-                continue 
-            person = persons_dict[person_id]
-            hrn = email_to_hrn(site_hrn, person['email'])
-            if len(hrn) > 64:
-                hrn = hrn[:64]
-
-            # if user's primary key has changed then we need to update the 
-            # users gid by forcing an update here
-            old_keys = []
-            new_keys = []
-            if person_id in old_person_keys:
-                old_keys = old_person_keys[person_id]
-            if person_id in person_keys:
-                new_keys = person_keys[person_id]
-            update_record = False
-            for key in new_keys:
-                if key not in old_keys:
-                    update_record = True 
-
-            if hrn not in existing_hrns or \
-               (hrn, 'user') not in existing_records or update_record:
-                try:
-                    if 'key_ids' in person and person['key_ids']:
-                        key = new_keys[0]
-                        try:
-                            pkey = convert_public_key(key)
-                        except:
-                            logger.warn('unable to convert public key for %s' % hrn)
-                            pkey = Keypair(create=True)
-                    else:
-                        # the user has no keys. Creating a random keypair for the user's gid
-                        logger.warn("Import: person %s does not have a PL public key"%hrn)
-                        pkey = Keypair(create=True) 
-                    urn = hrn_to_urn(hrn, 'user')
-                    person_gid = sfaImporter.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
-                    person_record = RegUser ()
-                    person_record.type='user'
-                    person_record.hrn=hrn
-                    person_record.gid=person_gid
-                    person_record.pointer=person['person_id']
-                    person_record.authority=get_authority(hrn)
-                    dbsession.add (person_record)
-                    dbsession.commit()
-                    logger.info("Import: imported person: %s" % person_record)
-                except:
-                    logger.log_exc("Import: failed to import person.") 
-    
-    # remove stale records    
-    system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
-    for (record_hrn, type) in existing_records.keys():
-        if record_hrn in system_records:
-            continue
-        
-        record = existing_records[(record_hrn, type)]
-        if record.peer_authority:
-            continue
-
-        # dont delete vini's internet2 placeholdder record
-        # normally this would be deleted becuase it does not have a plc record 
-        if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
-           record_hrn.endswith("internet2"):     
-            continue
-
-        found = False
-        
-        if type == 'authority':    
-            for site in sites:
-                site_hrn = interface_hrn + "." + site['login_base']
-                if site_hrn == record_hrn and site['site_id'] == record.pointer:
-                    found = True
-                    break
-
-        elif type == 'user':
-            login_base = get_leaf(get_authority(record_hrn))
-            username = get_leaf(record_hrn)
-            if login_base in sites_dict:
-                site = sites_dict[login_base]
-                for person in persons:
-                    tmp_username = person['email'].split("@")[0]
-                    alt_username = person['email'].split("@")[0].replace(".", "_").replace("+", "_")
-                    if username in [tmp_username, alt_username] and \
-                       site['site_id'] in person['site_ids'] and \
-                       person['person_id'] == record.pointer:
-                        found = True
-                        break
-        
-        elif type == 'slice':
-            slicename = hrn_to_pl_slicename(record_hrn)
-            for slice in slices:
-                if slicename == slice['name'] and \
-                   slice['slice_id'] == record.pointer:
-                    found = True
-                    break    
-        elif type == 'node':
-            login_base = get_leaf(get_authority(record_hrn))
-            nodename = Xrn.unescape(get_leaf(record_hrn))
-            if login_base in sites_dict:
-                site = sites_dict[login_base]
-                for node in nodes:
-                    tmp_nodename = node['hostname']
-                    if tmp_nodename == nodename and \
-                       node['site_id'] == site['site_id'] and \
-                       node['node_id'] == record.pointer:
-                        found = True
-                        break  
-        else:
-            continue 
-        
-        if not found:
-            try:
-                record_object = existing_records[(record_hrn, type)]
-                logger.info("Import: deleting record: %s" % record)
-                dbsession.delete(record_object)
-                dbsession.commit()
-            except:
-                logger.log_exc("Import: failded to delete record")                    
-    # save pub keys
-    logger.info('Import: saving current pub keys')
-    save_keys(keys_filename, person_keys)                
-        
-if __name__ == "__main__":
-    main()
diff --git a/sfa/importer/sfa-import.py b/sfa/importer/sfa-import.py
new file mode 100755 (executable)
index 0000000..aeddaeb
--- /dev/null
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+import sys
+
+from optparse import OptionParser
+
+from sfa.generic import Generic
+
+from sfa.util.config import Config
+from sfa.util.sfalogging import _SfaLogger
+
+from sfa.trust.hierarchy import Hierarchy
+
+from sfa.importer.sfaimporter import SfaImporter
+
+COMMAND=sys.argv[0]
+
+def main ():
+
+    config = Config()
+    logger = _SfaLogger(logfile='/var/log/sfa_import.log', loggername='importlog')
+    logger.setLevelFromOptVerbose(config.SFA_API_LOGLEVEL)
+    if not config.SFA_REGISTRY_ENABLED:
+        logger.critical("COMMAND: need SFA_REGISTRY_ENABLED to run import")
+
+    # testbed-neutral : create local certificates and the like
+    auth_hierarchy = Hierarchy ()
+    sfa_importer = SfaImporter(auth_hierarchy, logger)
+    # testbed-specific
+    testbed_importer = None
+    generic=Generic.the_flavour()
+    importer_class = generic.importer_class()
+    if importer_class:
+        logger.info ("Using flavour %s for importing (class %s)"%\
+                         (generic.flavour,importer_class.__name__))
+        testbed_importer = importer_class (auth_hierarchy, logger)
+
+    parser = OptionParser ()
+    sfa_importer.record_options (parser)
+    if testbed_importer:
+        testbed_importer.record_options (parser)
+
+    (options, args) = parser.parse_args ()
+    # no args supported ?
+    if args:
+        parser.print_help()
+        sys.exit(1)
+
+    sfa_importer.run (options)
+    if testbed_importer:
+        testbed_importer.run (parser)
+    
+
+if __name__ == '__main__':
+    main()
+
similarity index 84%
rename from sfa/importer/sfaImport.py
rename to sfa/importer/sfaimporter.py
index e2101fc..00e9908 100644 (file)
@@ -1,20 +1,15 @@
 #
-# The import tool assumes that the existing PLC hierarchy should all be part
-# of "planetlab.us" (see the root_auth and level1_auth variables below).
-#
 # Public keys are extracted from the users' SSH keys automatically and used to
 # create GIDs. This is relatively experimental as a custom tool had to be
 # written to perform conversion from SSH to OpenSSL format. It only supports
 # RSA keys at this time, not DSA keys.
 ##
 
-from sfa.util.sfalogging import _SfaLogger
 from sfa.util.xrn import get_authority, hrn_to_urn
 from sfa.util.plxrn import email_to_hrn
 from sfa.util.config import Config
 from sfa.trust.certificate import convert_public_key, Keypair
 from sfa.trust.trustedroots import TrustedRoots
-from sfa.trust.hierarchy import Hierarchy
 from sfa.trust.gid import create_uuid
 from sfa.storage.model import RegRecord, RegAuthority, RegUser
 from sfa.storage.alchemy import dbsession
@@ -43,15 +38,24 @@ def _cleanup_string(str):
     str = str.replace('"', "_")
     return str
 
-class sfaImport:
+class SfaImporter:
 
-    def __init__(self):
-       self.logger = _SfaLogger(logfile='/var/log/sfa_import.log', loggername='importlog')
-       self.AuthHierarchy = Hierarchy()
+    def __init__(self, auth_hierarchy, logger):
+       self.logger=logger
+       self.auth_hierarchy = auth_hierarchy
        self.config = Config()
        self.TrustedRoots = TrustedRoots(Config.get_trustedroots_dir(self.config))
        self.root_auth = self.config.SFA_REGISTRY_ROOT_AUTH
 
+    # record options into an OptionParser
+    def record_options (self, parser):
+       self.logger.info ("SfaImporter.record_options : to do")
+       pass
+
+    def run (self, options):
+       self.logger.info ("SfaImporter.run : no options used")
+       self.create_top_level_records()
+
     def create_top_level_records(self):
         """
         Create top level and interface records
@@ -72,7 +76,7 @@ class sfaImport:
 
         # add local root authority's cert  to trusted list
         self.logger.info("Import: adding " + interface_hrn + " to trusted list")
-        authority = self.AuthHierarchy.get_auth_info(interface_hrn)
+        authority = self.auth_hierarchy.get_auth_info(interface_hrn)
         self.TrustedRoots.add_gid(authority.get_gid_object())
 
     def create_top_level_auth_records(self, hrn):
@@ -87,9 +91,9 @@ class sfaImport:
             self.create_top_level_auth_records(parent_hrn)
 
         # ensure key and cert exists:
-        self.AuthHierarchy.create_top_level_auth(hrn)    
+        self.auth_hierarchy.create_top_level_auth(hrn)    
         # create the db record if it doesnt already exist    
-        auth_info = self.AuthHierarchy.get_auth_info(hrn)
+        auth_info = self.auth_hierarchy.get_auth_info(hrn)
         auth_record = RegAuthority()
         auth_record.type='authority'
         auth_record.hrn=hrn
@@ -106,11 +110,11 @@ class sfaImport:
         """
         hrn = self.config.SFA_INTERFACE_HRN + '.slicemanager'
         urn = hrn_to_urn(hrn, 'user')
-        if not self.AuthHierarchy.auth_exists(urn):
+        if not self.auth_hierarchy.auth_exists(urn):
             self.logger.info("Import: creating Slice Manager user")
-            self.AuthHierarchy.create_auth(urn)
+            self.auth_hierarchy.create_auth(urn)
 
-        auth_info = self.AuthHierarchy.get_auth_info(hrn)
+        auth_info = self.auth_hierarchy.get_auth_info(hrn)
         user_record = RegUser()
         user_record.type='user'
         user_record.hrn=hrn
@@ -128,11 +132,11 @@ class sfaImport:
         # just create certs for all sfa interfaces even if they
         # aren't enabled
         hrn = self.config.SFA_INTERFACE_HRN
-        auth_info = self.AuthHierarchy.get_auth_info(hrn)
+        auth_info = self.auth_hierarchy.get_auth_info(hrn)
         pkey = auth_info.get_pkey_object()
         for type in  [ 'authority+sa', 'authority+am', 'authority+sm', ]:
             urn = hrn_to_urn(hrn, type)
-            gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey)
+            gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
             # xxx this should probably use a RegAuthority, or a to-be-defined RegPeer object
             # but for now we have to preserve the authority+<> stuff
             interface_record = RegAuthority()
index e996f3b..1596cc3 100755 (executable)
@@ -14,7 +14,7 @@
 # is up to date and accurate.
 #
 # 1) Import the existing planetlab database, creating the
-#    appropriate SFA records. This is done by running the "sfa-import-plc.py" tool.
+#    appropriate SFA records. This is done by running the "sfa-import.py" tool.
 #
 # 2) Create a "trusted_roots" directory and place the certificate of the root
 #    authority in that directory. Given the defaults in sfa-import-plc.py, this