Merge Master in geni-v3 conflict resolution
[sfa.git] / sfa / importer / plimporter.py
index a902d93..8d197b6 100644 (file)
@@ -1,32 +1,34 @@
+#
+# PlanetLab importer
+# 
+# requirements
+# 
+# read the planetlab database and update the local registry database accordingly
+# (in other words, with this testbed, the SFA registry is *not* authoritative)
+# so we update the following collections
+# . authorities                 (from pl sites)
+# . node                        (from pl nodes)
+# . users+keys                  (from pl persons and attached keys)
+#                       known limitation : *one* of the ssh keys is chosen at random here
+#                       xxx todo/check xxx at the very least, when a key is known to the registry 
+#                       and is still current in plc
+#                       then we should definitely make sure to keep that one in sfa...
+# . slice+researchers           (from pl slices and attached users)
+# 
+
 import os
 
 from sfa.util.config import Config
 from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
-from sfa.util.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
 
 from sfa.trust.gid import create_uuid    
 from sfa.trust.certificate import convert_public_key, Keypair
 
 from sfa.storage.alchemy import dbsession
-from sfa.storage.model import RegRecord, RegAuthority, RegUser, RegSlice, RegNode
-
-from sfa.plc.plshell import PlShell    
-
-def load_keys(filename):
-    keys = {}
-    tmp_dict = {}
-    try:
-        execfile(filename, tmp_dict)
-        if 'keys' in tmp_dict:
-            keys = tmp_dict['keys']
-        return keys
-    except:
-        return keys
-
-def save_keys(filename, keys):
-    f = open(filename, 'w')
-    f.write("keys = %s" % str(keys))
-    f.close()
+from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
+
+from sfa.planetlab.plshell import PlShell    
+from sfa.planetlab.plxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_pl_slicename
 
 def _get_site_hrn(interface_hrn, site):
     # Hardcode 'internet2' into the hrn for sites hosting
@@ -45,294 +47,385 @@ class PlImporter:
         self.auth_hierarchy = auth_hierarchy
         self.logger=logger
 
-    def record_options (self, parser):
-        self.logger.debug ("PlImporter no options yet")
+    def add_options (self, parser):
+        # we don't have any options for now
         pass
 
-    def run (self, options):
-        # we don't have any options for now
-        config = Config ()
-        interface_hrn = config.SFA_INTERFACE_HRN
-        root_auth = config.SFA_REGISTRY_ROOT_AUTH
-        shell = PlShell (config)
+    # hrn hash is initialized from current db
+    # remember just-created records as we go
+    # xxx might make sense to add a UNIQUE constraint in the db itself
+    def remember_record_by_hrn (self, record):
+        tuple = (record.type, record.hrn)
+        if tuple in self.records_by_type_hrn:
+            self.logger.warning ("PlImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
+            return
+        self.records_by_type_hrn [ tuple ] = record
 
-        # create dict of all existing sfa records
-        existing_records = {}
-        existing_hrns = []
-        key_ids = []
-        for record in dbsession.query(RegRecord):
-            existing_records[ (record.hrn, record.type,) ] = record
-            existing_hrns.append(record.hrn) 
-            
-        # Get all plc sites
-        sites = shell.GetSites({'peer_id': None})
-        sites_dict = {}
-        for site in sites:
-            sites_dict[site['login_base']] = site 
-    
-        # Get all plc users
-        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
-                                   ['person_id', 'email', 'key_ids', 'site_ids'])
-        persons_dict = {}
-        for person in persons:
-            persons_dict[person['person_id']] = person
-            key_ids.extend(person['key_ids'])
+    # ditto for pointer hash
+    def remember_record_by_pointer (self, record):
+        if record.pointer == -1:
+            self.logger.warning ("PlImporter.remember_record_by_pointer: pointer is void")
+            return
+        tuple = (record.type, record.pointer)
+        if tuple in self.records_by_type_pointer:
+            self.logger.warning ("PlImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
+            return
+        self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
 
-        # Get all public keys
-        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids})
-        keys_dict = {}
-        for key in keys:
-            keys_dict[key['key_id']] = key['key']
+    def remember_record (self, record):
+        self.remember_record_by_hrn (record)
+        self.remember_record_by_pointer (record)
 
-        # create a dict of person keys keyed on key_id 
-        keys_filename = config.config_path + os.sep + 'person_keys.py' 
-        old_person_keys = load_keys(keys_filename)
-        person_keys = {} 
-        for person in persons:
-            pubkeys = []
-            for key_id in person['key_ids']:
-                pubkeys.append(keys_dict[key_id])
-            person_keys[person['person_id']] = pubkeys
+    def locate_by_type_hrn (self, type, hrn):
+        return self.records_by_type_hrn.get ( (type, hrn), None)
 
-        # Get all plc nodes  
-        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
-        nodes_dict = {}
-        for node in nodes:
-            nodes_dict[node['node_id']] = node
+    def locate_by_type_pointer (self, type, pointer):
+        return self.records_by_type_pointer.get ( (type, pointer), None)
 
-        # Get all plc slices
-        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name'])
-        slices_dict = {}
-        for slice in slices:
-            slices_dict[slice['slice_id']] = slice
+    # a convenience/helper function to see if a record is already known
+    # a former, broken, attempt (in 2.1-9) had been made 
+    # to try and use 'pointer' as a first, most significant attempt
+    # the idea being to preserve stuff as much as possible, and thus 
+    # to avoid creating a new gid in the case of a simple hrn rename
+    # however this of course doesn't work as the gid depends on the hrn...
+    #def locate (self, type, hrn=None, pointer=-1):
+    #    if pointer!=-1:
+    #        attempt = self.locate_by_type_pointer (type, pointer)
+    #        if attempt : return attempt
+    #    if hrn is not None:
+    #        attempt = self.locate_by_type_hrn (type, hrn,)
+    #        if attempt : return attempt
+    #    return None
 
+    # this makes the run method a bit abtruse - out of the way
+    def create_special_vini_record (self, interface_hrn):
         # special case for vini
         if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
             # create a fake internet2 site first
-            i2site = {'name': 'Internet2', 'abbreviated_name': 'I2',
-                        'login_base': 'internet2', 'site_id': -1}
+            i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
             site_hrn = _get_site_hrn(interface_hrn, i2site)
             # import if hrn is not in list of existing hrns or if the hrn exists
             # but its not a site record
-            if site_hrn not in existing_hrns or \
-               (site_hrn, 'authority') not in existing_records:
+            if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
                 urn = hrn_to_urn(site_hrn, 'authority')
                 if not self.auth_hierarchy.auth_exists(urn):
                     self.auth_hierarchy.create_auth(urn)
                 auth_info = self.auth_hierarchy.get_auth_info(urn)
-                auth_record = RegAuthority()
-                auth_record.type='authority'
-                auth_record.hrn=site_hrn
-                auth_record.gid=auth_info.get_gid_object()
-                auth_record.pointer=site['site_id']
-                auth_record.authority=get_authority(site_hrn)
+                auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
+                                           pointer=site['site_id'],
+                                           authority=get_authority(site_hrn))
+                auth_record.just_created()
                 dbsession.add(auth_record)
                 dbsession.commit()
                 self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
+                self.remember_record ( site_record )
+
+    def run (self, options):
+        config = Config ()
+        interface_hrn = config.SFA_INTERFACE_HRN
+        root_auth = config.SFA_REGISTRY_ROOT_AUTH
+        shell = PlShell (config)
+
+        ######## retrieve all existing SFA objects
+        all_records = dbsession.query(RegRecord).all()
+
+        # create hash by (type,hrn) 
+        # we essentially use this to know if a given record is already known to SFA 
+        self.records_by_type_hrn = \
+            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
+        # create hash by (type,pointer) 
+        self.records_by_type_pointer = \
+            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
+                     if record.pointer != -1] )
+
+        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
+        for record in all_records: record.stale=True
+
+        ######## retrieve PLC data
+        # Get all plc sites
+        # retrieve only required stuf
+        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
+                               ['site_id','login_base','node_ids','slice_ids','person_ids',])
+        # create a hash of sites by login_base
+#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
+        # Get all plc users
+        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
+                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
+        # create a hash of persons by person_id
+        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
+        # also gather non-enabled user accounts so as to issue relevant warnings
+        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
+        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
+        # Get all plc public keys
+        # accumulate key ids for keys retrieval
+        key_ids = []
+        for person in persons:
+            key_ids.extend(person['key_ids'])
+        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
+                               'key_type': 'ssh'} )
+        # create a hash of keys by key_id
+        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
+        # create a dict person_id -> [ (plc)keys ]
+        keys_by_person_id = {} 
+        for person in persons:
+            pubkeys = []
+            for key_id in person['key_ids']:
+                # by construction all the keys we fetched are ssh keys
+                # so gpg keys won't be in there
+                try:
+                    key = keys_by_id[key_id]
+                    pubkeys.append(key)
+                except:
+                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
+            keys_by_person_id[person['person_id']] = pubkeys
+        # Get all plc nodes  
+        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
+        # create hash by node_id
+        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
+        # Get all plc slices
+        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids'])
+        # create hash by slice_id
+        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
+
+        # isolate special vini case in separate method
+        self.create_special_vini_record (interface_hrn)
 
         # start importing 
         for site in sites:
             site_hrn = _get_site_hrn(interface_hrn, site)
-    
             # import if hrn is not in list of existing hrns or if the hrn exists
             # but its not a site record
-            if site_hrn not in existing_hrns or \
-               (site_hrn, 'authority') not in existing_records:
+            site_record=self.locate_by_type_hrn ('authority', site_hrn)
+            if not site_record:
                 try:
                     urn = hrn_to_urn(site_hrn, 'authority')
                     if not self.auth_hierarchy.auth_exists(urn):
                         self.auth_hierarchy.create_auth(urn)
                     auth_info = self.auth_hierarchy.get_auth_info(urn)
-                    auth_record = RegAuthority()
-                    auth_record.type='authority'
-                    auth_record.hrn=site_hrn
-                    auth_record.gid=auth_info.get_gid_object()
-                    auth_record.pointer=site['site_id']
-                    auth_record.authority=get_authority(site_hrn)
-                    dbsession.add(auth_record)
+                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
+                                               pointer=site['site_id'],
+                                               authority=get_authority(site_hrn))
+                    site_record.just_created()
+                    dbsession.add(site_record)
                     dbsession.commit()
-                    self.logger.info("PlImporter: imported authority (site) : %s" % auth_record)  
+                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
+                    self.remember_record (site_record)
                 except:
                     # if the site import fails then there is no point in trying to import the
                     # site's child records (node, slices, persons), so skip them.
-                    self.logger.log_exc("PlImporter: failed to import site. Skipping child records"
+                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn
                     continue 
+            else:
+                # xxx update the record ...
+                pass
+            site_record.stale=False
              
             # import node records
             for node_id in site['node_ids']:
-                if node_id not in nodes_dict:
+                try:
+                    node = nodes_by_id[node_id]
+                except:
+                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                     continue 
-                node = nodes_dict[node_id]
                 site_auth = get_authority(site_hrn)
-                site_name = get_leaf(site_hrn)
-                hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
-                if len(hrn) > 64:
-                    hrn = hrn[:64]
-                if hrn not in existing_hrns or \
-                   (hrn, 'node') not in existing_records:
+                site_name = site['login_base']
+                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
+                # xxx this sounds suspicious
+                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
+                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
+                if not node_record:
                     try:
                         pkey = Keypair(create=True)
-                        urn = hrn_to_urn(hrn, 'node')
+                        urn = hrn_to_urn(node_hrn, 'node')
                         node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
-                        node_record = RegNode ()
-                        node_record.type='node'
-                        node_record.hrn=hrn
-                        node_record.gid=node_gid
-                        node_record.pointer =node['node_id']
-                        node_record.authority=get_authority(hrn)
+                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
+                                               pointer =node['node_id'],
+                                               authority=get_authority(node_hrn))
+                        node_record.just_created()
                         dbsession.add(node_record)
                         dbsession.commit()
                         self.logger.info("PlImporter: imported node: %s" % node_record)  
+                        self.remember_record (node_record)
                     except:
-                        self.logger.log_exc("PlImporter: failed to import node") 
-                    
+                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
+                        continue
+                else:
+                    # xxx update the record ...
+                    pass
+                node_record.stale=False
+
+            site_pis=set()
+            # import persons
+            for person_id in site['person_ids']:
+                proceed=False
+                if person_id in persons_by_id:
+                    person=persons_by_id[person_id]
+                    proceed=True
+                elif person_id in disabled_person_ids:
+                    pass
+                else:
+                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
+                # make sure to NOT run this if anything is wrong
+                if not proceed: continue
+
+                person_hrn = email_to_hrn(site_hrn, person['email'])
+                # xxx suspicious again
+                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
+                person_urn = hrn_to_urn(person_hrn, 'user')
+
+                user_record = self.locate_by_type_hrn ( 'user', person_hrn)
+
+                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
+                def init_person_key (person, plc_keys):
+                    pubkey=None
+                    if  person['key_ids']:
+                        # randomly pick first key in set
+                        pubkey = plc_keys[0]
+                        try:
+                            pkey = convert_public_key(pubkey['key'])
+                        except:
+                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
+                            pkey = Keypair(create=True)
+                    else:
+                        # the user has no keys. Creating a random keypair for the user's gid
+                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
+                        pkey = Keypair(create=True)
+                    return (pubkey, pkey)
+
+                # new person
+                try:
+                    plc_keys = keys_by_person_id.get(person['person_id'],[])
+                    if not user_record:
+                        (pubkey,pkey) = init_person_key (person, plc_keys )
+                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
+                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
+                                               pointer=person['person_id'], 
+                                               authority=get_authority(person_hrn),
+                                               email=person['email'])
+                        if pubkey: 
+                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
+                        else:
+                            self.logger.warning("No key found for user %s"%user_record)
+                        user_record.just_created()
+                        dbsession.add (user_record)
+                        dbsession.commit()
+                        self.logger.info("PlImporter: imported person: %s" % user_record)
+                        self.remember_record ( user_record )
+                    else:
+                        # update the record ?
+                        #
+                        # if a user key has changed then we need to update the
+                        # users gid by forcing an update here
+                        #
+                        # right now, SFA only has *one* key attached to a user, and this is
+                        # the key that the GID was made with
+                        # so the logic here is, we consider that things are OK (unchanged) if
+                        # all the SFA keys are present as PLC keys
+                        # otherwise we trigger the creation of a new gid from *some* plc key
+                        # and record this on the SFA side
+                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
+                        # key but this is not available on the myplc side for now
+                        # = or = it would be much better to support several keys in SFA but that
+                        # does not seem doable without a major overhaul in the data model as
+                        # a GID is attached to a hrn, but it's also linked to a key, so...
+                        # NOTE: with this logic, the first key entered in PLC remains the one
+                        # current in SFA until it is removed from PLC
+                        sfa_keys = user_record.reg_keys
+                        def sfa_key_in_list (sfa_key,plc_keys):
+                            for plc_key in plc_keys:
+                                if plc_key['key']==sfa_key.key:
+                                    return True
+                            return False
+                        # are all the SFA keys known to PLC ?
+                        new_keys=False
+                        if not sfa_keys and plc_keys:
+                            new_keys=True
+                        else: 
+                            for sfa_key in sfa_keys:
+                                 if not sfa_key_in_list (sfa_key,plc_keys):
+                                     new_keys = True
+                        if new_keys:
+                            (pubkey,pkey) = init_person_key (person, plc_keys)
+                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
+                            person_gid.set_email(person['email'])
+                            if not pubkey:
+                                user_record.reg_keys=[]
+                            else:
+                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
+                            user_record.gid = person_gid
+                            user_record.just_updated()
+                            self.logger.info("PlImporter: updated person: %s" % user_record)
+                    user_record.email = person['email']
+                    dbsession.commit()
+                    user_record.stale=False
+                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
+                    # this is valid for all sites she is in..
+                    # PI is coded with role_id==20
+                    if 20 in person['role_ids']:
+                        site_pis.add (user_record)
+                except:
+                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
+    
+            # maintain the list of PIs for a given site
+            site_record.reg_pis = list(site_pis)
+            site_record.reg_pis = site_pis
+            dbsession.commit()
 
             # import slices
             for slice_id in site['slice_ids']:
-                if slice_id not in slices_dict:
-                    continue 
-                slice = slices_dict[slice_id]
-                hrn = slicename_to_hrn(interface_hrn, slice['name'])
-                if hrn not in existing_hrns or \
-                   (hrn, 'slice') not in existing_records:
+                try:
+                    slice = slices_by_id[slice_id]
+                except:
+                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
+                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
+                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
+                if not slice_record:
                     try:
                         pkey = Keypair(create=True)
-                        urn = hrn_to_urn(hrn, 'slice')
+                        urn = hrn_to_urn(slice_hrn, 'slice')
                         slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
-                        slice_record = RegSlice ()
-                        slice_record.type='slice'
-                        slice_record.hrn=hrn
-                        slice_record.gid=slice_gid
-                        slice_record.pointer=slice['slice_id']
-                        slice_record.authority=get_authority(hrn)
+                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
+                                                 pointer=slice['slice_id'],
+                                                 authority=get_authority(slice_hrn))
+                        slice_record.just_created()
                         dbsession.add(slice_record)
                         dbsession.commit()
                         self.logger.info("PlImporter: imported slice: %s" % slice_record)  
+                        self.remember_record ( slice_record )
                     except:
-                        self.logger.log_exc("PlImporter: failed to  import slice")
+                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
+                else:
+                    # update the pointer if it has changed
+                    if slice_id != slice_record.pointer:
+                        self.logger.info("updating record (slice) pointer")
+                        slice_record.pointer = slice_id
+                        dbsession.commit()             
+                    # xxx update the record ...
+                    #self.logger.warning ("Slice update not yet implemented")
+                # record current users affiliated with the slice
+                slice_record.reg_researchers = \
+                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
+                dbsession.commit()
+                slice_record.stale=False
 
-            # import persons
-            for person_id in site['person_ids']:
-                if person_id not in persons_dict:
-                    continue 
-                person = persons_dict[person_id]
-                hrn = email_to_hrn(site_hrn, person['email'])
-                if len(hrn) > 64:
-                    hrn = hrn[:64]
-    
-                # if user's primary key has changed then we need to update the 
-                # users gid by forcing an update here
-                old_keys = []
-                new_keys = []
-                if person_id in old_person_keys:
-                    old_keys = old_person_keys[person_id]
-                if person_id in person_keys:
-                    new_keys = person_keys[person_id]
-                update_record = False
-                for key in new_keys:
-                    if key not in old_keys:
-                        update_record = True 
-    
-                if hrn not in existing_hrns or \
-                   (hrn, 'user') not in existing_records or update_record:
-                    try:
-                        if 'key_ids' in person and person['key_ids']:
-                            key = new_keys[0]
-                            try:
-                                pkey = convert_public_key(key)
-                            except:
-                                self.logger.warn('unable to convert public key for %s' % hrn)
-                                pkey = Keypair(create=True)
-                        else:
-                            # the user has no keys. Creating a random keypair for the user's gid
-                            self.logger.warn("PlImporter: person %s does not have a PL public key"%hrn)
-                            pkey = Keypair(create=True) 
-                        urn = hrn_to_urn(hrn, 'user')
-                        person_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
-                        person_record = RegUser ()
-                        person_record.type='user'
-                        person_record.hrn=hrn
-                        person_record.gid=person_gid
-                        person_record.pointer=person['person_id']
-                        person_record.authority=get_authority(hrn)
-                        dbsession.add (person_record)
-                        dbsession.commit()
-                        self.logger.info("PlImporter: imported person: %s" % person_record)
-                    except:
-                        self.logger.log_exc("PlImporter: failed to import person.") 
-    
-        # remove stale records    
-        system_records = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
-        for (record_hrn, type) in existing_records.keys():
-            if record_hrn in system_records:
-                continue
-            
-            record = existing_records[(record_hrn, type)]
+        ### remove stale records
+        # special records must be preserved
+        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
+        for record in all_records: 
+            if record.hrn in system_hrns: 
+                record.stale=False
             if record.peer_authority:
-                continue
-    
-            # dont delete vini's internet2 placeholdder record
-            # normally this would be deleted becuase it does not have a plc record 
+                record.stale=False
             if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
-               record_hrn.endswith("internet2"):     
-                continue
-    
-            found = False
-            
-            if type == 'authority':    
-                for site in sites:
-                    site_hrn = interface_hrn + "." + site['login_base']
-                    if site_hrn == record_hrn and site['site_id'] == record.pointer:
-                        found = True
-                        break
-
-            elif type == 'user':
-                login_base = get_leaf(get_authority(record_hrn))
-                username = get_leaf(record_hrn)
-                if login_base in sites_dict:
-                    site = sites_dict[login_base]
-                    for person in persons:
-                        tmp_username = person['email'].split("@")[0]
-                        alt_username = person['email'].split("@")[0].replace(".", "_").replace("+", "_")
-                        if username in [tmp_username, alt_username] and \
-                           site['site_id'] in person['site_ids'] and \
-                           person['person_id'] == record.pointer:
-                            found = True
-                            break
-        
-            elif type == 'slice':
-                slicename = hrn_to_pl_slicename(record_hrn)
-                for slice in slices:
-                    if slicename == slice['name'] and \
-                       slice['slice_id'] == record.pointer:
-                        found = True
-                        break    
-            elif type == 'node':
-                login_base = get_leaf(get_authority(record_hrn))
-                nodename = Xrn.unescape(get_leaf(record_hrn))
-                if login_base in sites_dict:
-                    site = sites_dict[login_base]
-                    for node in nodes:
-                        tmp_nodename = node['hostname']
-                        if tmp_nodename == nodename and \
-                           node['site_id'] == site['site_id'] and \
-                           node['node_id'] == record.pointer:
-                            found = True
-                            break  
-            else:
-                continue 
-        
-            if not found:
-                try:
-                    record_object = existing_records[(record_hrn, type)]
-                    self.logger.info("PlImporter: deleting record: %s" % record)
-                    dbsession.delete(record_object)
-                    dbsession.commit()
-                except:
-                    self.logger.log_exc("PlImporter: failded to delete record")                    
+                record.hrn.endswith("internet2"):
+                record.stale=False
 
-        # save pub keys
-        self.logger.info('Import: saving current pub keys')
-        save_keys(keys_filename, person_keys)                
-        
+        for record in all_records:
+            try:        stale=record.stale
+            except:     
+                stale=True
+                self.logger.warning("stale not found with %s"%record)
+            if stale:
+                self.logger.info("PlImporter: deleting stale record: %s" % record)
+                dbsession.delete(record)
+                dbsession.commit()