X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=sfa%2Fstorage%2Fmodel.py;h=923576e9609d6779d3dffa71ffa5df44a99f60cb;hb=30d9951e075d93127c3909dcb41be09b420b3525;hp=0b7ddb745c063787788429c2fffdb2a8ec804018;hpb=2cf6d0e7089437ec8ad8e5fdfac26760e0c7a331;p=sfa.git diff --git a/sfa/storage/model.py b/sfa/storage/model.py index 0b7ddb74..923576e9 100644 --- a/sfa/storage/model.py +++ b/sfa/storage/model.py @@ -1,6 +1,6 @@ -from types import StringTypes from datetime import datetime +from sqlalchemy import or_, and_ from sqlalchemy import Column, Integer, String, DateTime from sqlalchemy import Table, Column, MetaData, join, ForeignKey from sqlalchemy.orm import relationship, backref @@ -9,14 +9,16 @@ from sqlalchemy.orm import object_mapper from sqlalchemy.orm import validates from sqlalchemy.ext.declarative import declarative_base +from sfa.storage.record import Record from sfa.util.sfalogging import logger from sfa.util.sfatime import utcparse, datetime_to_string from sfa.util.xml import XML +from sfa.util.py23 import StringType from sfa.trust.gid import GID ############################## -Base=declarative_base() +Base = declarative_base() #################### # dicts vs objects @@ -46,86 +48,14 @@ Base=declarative_base() # (*) finally for converting a dictionary into an sqlalchemy object, we provide # obj.load_from_dict(dict) -class AlchemyObj: +class AlchemyObj(Record): def __iter__(self): self._i = iter(object_mapper(self).columns) return self def next(self): n = self._i.next().name return n, getattr(self, n) - def todict (self): - d=self.__dict__ - keys=[k for k in d.keys() if not k.startswith('_')] - return dict ( [ (k,d[k]) for k in keys ] ) - def load_from_dict (self, d): - for (k,v) in d.iteritems(): - # experimental - if isinstance(v, StringTypes) and v.lower() in ['true']: v=True - if isinstance(v, StringTypes) and v.lower() in ['false']: v=False - setattr(self,k,v) - def validate_datetime (self, key, incoming): - if isinstance (incoming, datetime): return incoming - elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming) - - # in addition we provide convenience for converting to and from xml records - # for this purpose only, we need the subclasses to define 'fields' as either - # a list or a dictionary - def xml_fields (self): - fields=self.fields - if isinstance(fields,dict): fields=fields.keys() - return fields - - def save_as_xml (self): - # xxx not sure about the scope here - input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] ) - xml_record=XML("") - xml_record.parse_dict (input_dict) - return xml_record.toxml() - - def dump(self, format=None, dump_parents=False): - if not format: - format = 'text' - else: - format = format.lower() - if format == 'text': - self.dump_text(dump_parents) - elif format == 'xml': - print self.save_to_string() - elif format == 'simple': - print self.dump_simple() - else: - raise Exception, "Invalid format %s" % format - - def dump_text(self, dump_parents=False): - # print core fields in this order - core_fields = [ 'hrn', 'type', 'authority', 'date_created', 'last_updated', 'gid', ] - print "".join(['=' for i in range(40)]) - print "RECORD" - print " hrn:", self.hrn - print " type:", self.type - print " authority:", self.authority - date_created = utcparse(datetime_to_string(self.date_created)) - print " date created:", date_created - last_updated = utcparse(datetime_to_string(self.last_updated)) - print " last updated:", last_updated - print " gid:" - print self.get_gid_object().dump_string(8, dump_parents) - - # print remaining fields - for attrib_name in dir(self): - attrib = getattr(self, attrib_name) - # skip internals - if attrib_name.startswith('_'): continue - # skip core fields - if attrib_name in core_fields: continue - # skip callables - if callable (attrib): continue - print " %s: %s" % (attrib_name, attrib) - - def dump_simple(self): - return "%s"%self - # # only intended for debugging # def inspect (self, logger, message=""): # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message) @@ -145,7 +75,7 @@ class AlchemyObj: # but we had to define another more internal column (classtype) so we # accomodate variants in types like authority+am and the like -class RegRecord (Base,AlchemyObj): +class RegRecord(Base, AlchemyObj): __tablename__ = 'records' record_id = Column (Integer, primary_key=True) # this is the discriminator that tells which class to use @@ -169,7 +99,7 @@ class RegRecord (Base,AlchemyObj): if type: self.type=type if hrn: self.hrn=hrn if gid: - if isinstance(gid, StringTypes): self.gid=gid + if isinstance(gid, StringType): self.gid=gid else: self.gid=gid.save_to_string(save_parents=True) if authority: self.authority=authority if peer_authority: self.peer_authority=peer_authority @@ -177,69 +107,124 @@ class RegRecord (Base,AlchemyObj): if dict: self.load_from_dict (dict) def __repr__(self): - result="", " name={}>".format(self.name)) + return result -#################### -# slice x user (researchers) association -slice_researcher_table = \ - Table ( 'slice_researcher', Base.metadata, - Column ('slice_id', Integer, ForeignKey ('records.record_id'), primary_key=True), - Column ('researcher_id', Integer, ForeignKey ('records.record_id'), primary_key=True), - ) + def update_pis (self, pi_hrns, dbsession): + # strip that in case we have words + pi_hrns = [ x.strip() for x in pi_hrns ] + request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)) + logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"\ + % (len(pi_hrns), request.count())) + pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all() + self.reg_pis = pis #################### -class RegSlice (RegRecord): +class RegSlice(RegRecord): __tablename__ = 'slices' __mapper_args__ = { 'polymorphic_identity' : 'slice' } record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) @@ -249,30 +234,58 @@ class RegSlice (RegRecord): secondary=slice_researcher_table, primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id, secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id, - backref="reg_slices_as_researcher") + backref='reg_slices_as_researcher', + ) def __init__ (self, **kwds): - if 'type' not in kwds: kwds['type']='slice' + if 'type' not in kwds: + kwds['type']='slice' RegRecord.__init__(self, **kwds) def __repr__ (self): - return RegRecord.__repr__(self).replace("Record","Slice") + return RegRecord.__repr__(self).replace("Record", "Slice") + + def update_researchers (self, researcher_hrns, dbsession): + # strip that in case we have words + researcher_hrns = [ x.strip() for x in researcher_hrns ] + request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)) + logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"\ + % (len(researcher_hrns), request.count())) + researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all() + self.reg_researchers = researchers + + # when dealing with credentials, we need to retrieve the PIs attached to a slice + # WARNING: with the move to passing dbsessions around, we face a glitch here because this + # helper function is called from the trust/ area that + def get_pis (self): + from sqlalchemy.orm import sessionmaker + Session = sessionmaker() + dbsession = Session.object_session(self) + from sfa.util.xrn import get_authority + authority_hrn = get_authority(self.hrn) + auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first() + return auth_record.reg_pis + + @validates ('expires') + def validate_expires (self, key, incoming): + return self.validate_datetime (key, incoming) #################### -class RegNode (RegRecord): +class RegNode(RegRecord): __tablename__ = 'nodes' __mapper_args__ = { 'polymorphic_identity' : 'node' } record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) - def __init__ (self, **kwds): - if 'type' not in kwds: kwds['type']='node' + def __init__(self, **kwds): + if 'type' not in kwds: + kwds['type']='node' RegRecord.__init__(self, **kwds) def __repr__ (self): - return RegRecord.__repr__(self).replace("Record","Node") + return RegRecord.__repr__(self).replace("Record", "Node") #################### -class RegUser (RegRecord): +class RegUser(RegRecord): __tablename__ = 'users' # these objects will have type='user' in the records table __mapper_args__ = { 'polymorphic_identity' : 'user' } @@ -283,20 +296,22 @@ class RegUser (RegRecord): # a 'keys' tag, and assigning a list of strings in a reference column like this crashes reg_keys = relationship \ ('RegKey', backref='reg_user', - cascade="all, delete, delete-orphan") + cascade = "all, delete, delete-orphan", + ) # so we can use RegUser (email=.., hrn=..) and the like def __init__ (self, **kwds): # handle local settings - if 'email' in kwds: self.email=kwds.pop('email') - if 'type' not in kwds: kwds['type']='user' + if 'email' in kwds: + self.email = kwds.pop('email') + if 'type' not in kwds: + kwds['type'] = 'user' RegRecord.__init__(self, **kwds) # append stuff at the end of the record __repr__ def __repr__ (self): - result = RegRecord.__repr__(self).replace("Record","User") - result.replace (">"," email=%s"%self.email) - result += ">" + result = RegRecord.__repr__(self).replace("Record", "User") + result.replace(">", " email={}>".format(self.email)) return result @validates('email') @@ -309,24 +324,106 @@ class RegUser (RegRecord): # meaning, when querying the whole records, we expect there should # be a single query to fetch all the keys # or, is it enough that we issue a single query to retrieve all the keys -class RegKey (Base): +class RegKey(Base): __tablename__ = 'keys' key_id = Column (Integer, primary_key=True) - record_id = Column (Integer, ForeignKey ("records.record_id")) + record_id = Column (Integer, ForeignKey ("records.record_id")) key = Column (String) pointer = Column (Integer, default = -1) def __init__ (self, key, pointer=None): - self.key=key - if pointer: self.pointer=pointer + self.key = key + if pointer: + self.pointer = pointer def __repr__ (self): - result=":} +# so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users +augment_map = {'authority': {'reg-pis' : 'reg_pis',}, + 'slice': {'reg-researchers' : 'reg_researchers',}, + 'user': {'reg-pi-authorities' : 'reg_authorities_as_pi', + 'reg-slices' : 'reg_slices_as_researcher',}, + } + + +# xxx mystery +# the way we use sqlalchemy might be a little wrong +# in any case what has been observed is that (Reg)Records as returned by an sqlalchemy +# query not always have their __dict__ properly adjusted +# typically a RegAuthority object would have its object.name set properly, but +# object.__dict__ has no 'name' key +# which is an issue because we rely on __dict__ for many things, in particular this +# is what gets exposed to the drivers (this is historical and dates back before sqlalchemy) +# so it is recommended to always run this function that will make sure +# that such built-in fields are properly set in __dict__ too +# +def augment_with_sfa_builtins(local_record): + # don't ruin the import of that file in a client world + from sfa.util.xrn import Xrn + # add a 'urn' field + setattr(local_record, 'reg-urn', Xrn(xrn=local_record.hrn, type=local_record.type).urn) + # users have keys and this is needed to synthesize 'users' sent over to CreateSliver + fields_to_check = [] + if local_record.type == 'user': + user_keys = [ key.key for key in local_record.reg_keys ] + setattr(local_record, 'reg-keys', user_keys) + fields_to_check = ['email'] + elif local_record.type == 'authority': + fields_to_check = ['name'] + for field in fields_to_check: + if not field in local_record.__dict__: + logger.debug("augment_with_sfa_builtins: hotfixing missing '{}' in {}" + .format(field, local_record.hrn)) + local_record.__dict__[field] = getattr(local_record, field) + # search in map according to record type + type_map = augment_map.get(local_record.type, {}) + # use type-dep. map to do the job + for (field_name, attribute) in type_map.items(): + # get related objects + related_records = getattr(local_record, attribute, []) + hrns = [ r.hrn for r in related_records ] + setattr (local_record, field_name, hrns) + +