from sfa.trust.gid import create_uuid
from sfa.trust.certificate import convert_public_key, Keypair
-from sfa.storage.alchemy import dbsession
+# using global alchemy.session() here is fine
+# as importer is on standalone one-shot process
+from sfa.storage.alchemy import global_dbsession
from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
-from sfa.dummy.dummyshell import PlShell
+from sfa.dummy.dummyshell import DummyShell
from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
def _get_site_hrn(interface_hrn, site):
shell = DummyShell (config)
######## retrieve all existing SFA objects
- all_records = dbsession.query(RegRecord).all()
+ all_records = global_dbsession.query(RegRecord).all()
# create hash by (type,hrn)
# we essentially use this to know if a given record is already known to SFA
# Get all dummy TB public keys
keys = []
for user in users:
- keys.extend(user['keys'])
+ if 'keys' in user:
+ keys.extend(user['keys'])
# create a dict user_id -> [ keys ]
keys_by_person_id = {}
for user in users:
- keys_by_person_id[user['user_id']] = user['keys']
+ if 'keys' in user:
+ keys_by_person_id[user['user_id']] = user['keys']
# Get all dummy TB nodes
nodes = shell.GetNodes()
# create hash by node_id
pointer= -1,
authority=get_authority(site_hrn))
site_record.just_created()
- dbsession.add(site_record)
- dbsession.commit()
+ global_dbsession.add(site_record)
+ global_dbsession.commit()
self.logger.info("DummyImporter: imported authority (site) : %s" % site_record)
self.remember_record (site_record)
except:
pointer =node['node_id'],
authority=get_authority(node_hrn))
node_record.just_created()
- dbsession.add(node_record)
- dbsession.commit()
+ global_dbsession.add(node_record)
+ global_dbsession.commit()
self.logger.info("DummyImporter: imported node: %s" % node_record)
self.remember_record (node_record)
except:
for user in users:
user_hrn = email_to_hrn(site_hrn, user['email'])
# xxx suspicious again
- if len(person_hrn) > 64: person_hrn = person_hrn[:64]
+ if len(user_hrn) > 64: user_hrn = user_hrn[:64]
user_urn = hrn_to_urn(user_hrn, 'user')
user_record = self.locate_by_type_hrn ( 'user', user_hrn)
else:
self.logger.warning("No key found for user %s"%user_record)
user_record.just_created()
- dbsession.add (user_record)
- dbsession.commit()
+ global_dbsession.add (user_record)
+ global_dbsession.commit()
self.logger.info("DummyImporter: imported person: %s" % user_record)
self.remember_record ( user_record )
else:
user_record.reg_keys=[ RegKey (pubkey)]
self.logger.info("DummyImporter: updated person: %s" % user_record)
- user_record.email = person['email']
- dbsession.commit()
+ user_record.email = user['email']
+ global_dbsession.commit()
user_record.stale=False
except:
self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
# import slices
for slice in slices:
- slice_hrn = slicename_to_hrn(interface_hrn, slice['slice_ame'])
+ slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
if not slice_record:
try:
pointer=slice['slice_id'],
authority=get_authority(slice_hrn))
slice_record.just_created()
- dbsession.add(slice_record)
- dbsession.commit()
+ global_dbsession.add(slice_record)
+ global_dbsession.commit()
self.logger.info("DummyImporter: imported slice: %s" % slice_record)
self.remember_record ( slice_record )
except:
# record current users affiliated with the slice
slice_record.reg_researchers = \
[ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
- dbsession.commit()
+ global_dbsession.commit()
slice_record.stale=False
### remove stale records
self.logger.warning("stale not found with %s"%record)
if stale:
self.logger.info("DummyImporter: deleting stale record: %s" % record)
- dbsession.delete(record)
- dbsession.commit()
+ global_dbsession.delete(record)
+ global_dbsession.commit()