from datetime import datetime from sqlalchemy import or_, and_ from sqlalchemy import Column, Integer, String, DateTime from sqlalchemy import Table, Column, MetaData, join, ForeignKey from sqlalchemy.orm import relationship, backref from sqlalchemy.orm import column_property from sqlalchemy.orm import object_mapper from sqlalchemy.orm import validates from sqlalchemy.ext.declarative import declarative_base from sfa.storage.record import Record from sfa.util.sfalogging import logger from sfa.util.sfatime import utcparse, datetime_to_string from sfa.util.xml import XML from sfa.util.py23 import StringType from sfa.trust.gid import GID ############################## Base = declarative_base() #################### # dicts vs objects #################### # historically the front end to the db dealt with dicts, so the code was only dealing with dicts # sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id'] # which is admittedly much nicer # however we still need to deal with dictionaries if only for the xmlrpc layer # # here are a few utilities for this # # (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick # is to use obj.__dict__ # this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field # however this depends on sqlalchemy's implementation so it should be avoided # # (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict # remember though that writing the resulting dictionary won't change the object # essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_' # 2 ways are provided for that: # . dict(obj) # . obj.todict() # the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names # although it seems to work fine, I've found cases where it issues a weird python error that I could not get right # so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved # # (*) finally for converting a dictionary into an sqlalchemy object, we provide # obj.load_from_dict(dict) class AlchemyObj(Record): def __iter__(self): self._i = iter(object_mapper(self).columns) return self def next(self): n = self._i.next().name return n, getattr(self, n) # # only intended for debugging # def inspect (self, logger, message=""): # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message) # for k in dir(self): # if not k.startswith('_'): # logger.info (" %s: %s"%(k,getattr(self,k))) # logger.info("%s -- Inspecting AlchemyObj -- __dict__"%message) # d=self.__dict__ # for (k,v) in d.iteritems(): # logger.info("[%s]=%s"%(k,v)) ############################## # various kinds of records are implemented as an inheritance hierarchy # RegRecord is the base class for all actual variants # a first draft was using 'type' as the discriminator for the inheritance # but we had to define another more internal column (classtype) so we # accomodate variants in types like authority+am and the like class RegRecord(Base, AlchemyObj): __tablename__ = 'records' record_id = Column (Integer, primary_key=True) # this is the discriminator that tells which class to use classtype = Column (String) # in a first version type was the discriminator # but that could not accomodate for 'authority+sa' and the like type = Column (String) hrn = Column (String) gid = Column (String) authority = Column (String) peer_authority = Column (String) pointer = Column (Integer, default=-1) date_created = Column (DateTime) last_updated = Column (DateTime) # use the 'type' column to decide which subclass the object is of __mapper_args__ = { 'polymorphic_on' : classtype } fields = [ 'type', 'hrn', 'gid', 'authority', 'peer_authority' ] def __init__ (self, type=None, hrn=None, gid=None, authority=None, peer_authority=None, pointer=None, dict=None): if type: self.type=type if hrn: self.hrn=hrn if gid: if isinstance(gid, StringType): self.gid=gid else: self.gid=gid.save_to_string(save_parents=True) if authority: self.authority=authority if peer_authority: self.peer_authority=peer_authority if pointer: self.pointer=pointer if dict: self.load_from_dict (dict) def __repr__(self): result="", " name={}>".format(self.name)) return result def update_pis (self, pi_hrns, dbsession): # strip that in case we have words pi_hrns = [ x.strip() for x in pi_hrns ] request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)) logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"\ % (len(pi_hrns), request.count())) pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all() self.reg_pis = pis #################### class RegSlice(RegRecord): __tablename__ = 'slices' __mapper_args__ = { 'polymorphic_identity' : 'slice' } record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) #### extensions come here reg_researchers = relationship \ ('RegUser', secondary=slice_researcher_table, primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id, secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id, backref='reg_slices_as_researcher', ) def __init__ (self, **kwds): if 'type' not in kwds: kwds['type']='slice' RegRecord.__init__(self, **kwds) def __repr__ (self): return RegRecord.__repr__(self).replace("Record", "Slice") def update_researchers (self, researcher_hrns, dbsession): # strip that in case we have words researcher_hrns = [ x.strip() for x in researcher_hrns ] request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)) logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"\ % (len(researcher_hrns), request.count())) researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all() self.reg_researchers = researchers # when dealing with credentials, we need to retrieve the PIs attached to a slice # WARNING: with the move to passing dbsessions around, we face a glitch here because this # helper function is called from the trust/ area that def get_pis (self): from sqlalchemy.orm import sessionmaker Session = sessionmaker() dbsession = Session.object_session(self) from sfa.util.xrn import get_authority authority_hrn = get_authority(self.hrn) auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first() return auth_record.reg_pis @validates ('expires') def validate_expires (self, key, incoming): return self.validate_datetime (key, incoming) #################### class RegNode(RegRecord): __tablename__ = 'nodes' __mapper_args__ = { 'polymorphic_identity' : 'node' } record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) def __init__(self, **kwds): if 'type' not in kwds: kwds['type']='node' RegRecord.__init__(self, **kwds) def __repr__ (self): return RegRecord.__repr__(self).replace("Record", "Node") #################### class RegUser(RegRecord): __tablename__ = 'users' # these objects will have type='user' in the records table __mapper_args__ = { 'polymorphic_identity' : 'user' } record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) #### extensions come here email = Column ('email', String) # can't use name 'keys' here because when loading from xml we're getting # a 'keys' tag, and assigning a list of strings in a reference column like this crashes reg_keys = relationship \ ('RegKey', backref='reg_user', cascade = "all, delete, delete-orphan", ) # so we can use RegUser (email=.., hrn=..) and the like def __init__ (self, **kwds): # handle local settings if 'email' in kwds: self.email = kwds.pop('email') if 'type' not in kwds: kwds['type'] = 'user' RegRecord.__init__(self, **kwds) # append stuff at the end of the record __repr__ def __repr__ (self): result = RegRecord.__repr__(self).replace("Record", "User") result.replace(">", " email={}>".format(self.email)) return result @validates('email') def validate_email(self, key, address): assert '@' in address return address #################### # xxx tocheck : not sure about eager loading of this one # meaning, when querying the whole records, we expect there should # be a single query to fetch all the keys # or, is it enough that we issue a single query to retrieve all the keys class RegKey(Base): __tablename__ = 'keys' key_id = Column (Integer, primary_key=True) record_id = Column (Integer, ForeignKey ("records.record_id")) key = Column (String) pointer = Column (Integer, default = -1) def __init__ (self, key, pointer=None): self.key = key if pointer: self.pointer = pointer def __repr__ (self): result = ":} # so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users augment_map = {'authority': {'reg-pis' : 'reg_pis',}, 'slice': {'reg-researchers' : 'reg_researchers',}, 'user': {'reg-pi-authorities' : 'reg_authorities_as_pi', 'reg-slices' : 'reg_slices_as_researcher',}, } # xxx mystery # the way we use sqlalchemy might be a little wrong # in any case what has been observed is that (Reg)Records as returned by an sqlalchemy # query not always have their __dict__ properly adjusted # typically a RegAuthority object would have its object.name set properly, but # object.__dict__ has no 'name' key # which is an issue because we rely on __dict__ for many things, in particular this # is what gets exposed to the drivers (this is historical and dates back before sqlalchemy) # so it is recommended to always run this function that will make sure # that such built-in fields are properly set in __dict__ too # def augment_with_sfa_builtins(local_record): # don't ruin the import of that file in a client world from sfa.util.xrn import Xrn # add a 'urn' field setattr(local_record, 'reg-urn', Xrn(xrn=local_record.hrn, type=local_record.type).urn) # users have keys and this is needed to synthesize 'users' sent over to CreateSliver fields_to_check = [] if local_record.type == 'user': user_keys = [ key.key for key in local_record.reg_keys ] setattr(local_record, 'reg-keys', user_keys) fields_to_check = ['email'] elif local_record.type == 'authority': fields_to_check = ['name'] for field in fields_to_check: if not field in local_record.__dict__: logger.debug("augment_with_sfa_builtins: hotfixing missing '{}' in {}" .format(field, local_record.hrn)) local_record.__dict__[field] = getattr(local_record, field) # search in map according to record type type_map = augment_map.get(local_record.type, {}) # use type-dep. map to do the job for (field_name, attribute) in type_map.items(): # get related objects related_records = getattr(local_record, attribute, []) hrns = [ r.hrn for r in related_records ] setattr (local_record, field_name, hrns)