X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=sfa%2Fstorage%2Fmodel.py;h=a2e3bd49b3dfd89613cc019a36c26722e95b76f8;hb=4a9e6751f9f396f463932133b9d62fc925a99ef6;hp=051ba87e15bfe82923877a755ffe9a2f14e02bc8;hpb=0eb2fd645abe02f4fbb79d9ebc5a17b292b8dd2d;p=sfa.git diff --git a/sfa/storage/model.py b/sfa/storage/model.py index 051ba87e..a2e3bd49 100644 --- a/sfa/storage/model.py +++ b/sfa/storage/model.py @@ -1,7 +1,6 @@ -from types import StringTypes from datetime import datetime -from sqlalchemy import or_, and_ +from sqlalchemy import or_, and_ from sqlalchemy import Column, Integer, String, DateTime from sqlalchemy import Table, Column, MetaData, join, ForeignKey from sqlalchemy.orm import relationship, backref @@ -13,7 +12,8 @@ from sqlalchemy.ext.declarative import declarative_base from sfa.storage.record import Record from sfa.util.sfalogging import logger from sfa.util.sfatime import utcparse, datetime_to_string -from sfa.util.xml import XML +from sfa.util.xml import XML +from sfa.util.py23 import StringType from sfa.trust.gid import GID @@ -27,13 +27,13 @@ Base = declarative_base() # sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id'] # which is admittedly much nicer # however we still need to deal with dictionaries if only for the xmlrpc layer -# -# here are a few utilities for this -# +# +# here are a few utilities for this +# # (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick # is to use obj.__dict__ # this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field -# however this depends on sqlalchemy's implementation so it should be avoided +# however this depends on sqlalchemy's implementation so it should be avoided # # (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict # remember though that writing the resulting dictionary won't change the object @@ -48,15 +48,18 @@ Base = declarative_base() # (*) finally for converting a dictionary into an sqlalchemy object, we provide # obj.load_from_dict(dict) + class AlchemyObj(Record): - def __iter__(self): + + def __iter__(self): self._i = iter(object_mapper(self).columns) - return self - def next(self): + return self + + def __next__(self): n = self._i.next().name return n, getattr(self, n) -# # only intended for debugging +# # only intended for debugging # def inspect (self, logger, message=""): # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message) # for k in dir(self): @@ -72,43 +75,52 @@ class AlchemyObj(Record): # various kinds of records are implemented as an inheritance hierarchy # RegRecord is the base class for all actual variants # a first draft was using 'type' as the discriminator for the inheritance -# but we had to define another more internal column (classtype) so we +# but we had to define another more internal column (classtype) so we # accomodate variants in types like authority+am and the like class RegRecord(Base, AlchemyObj): - __tablename__ = 'records' - record_id = Column (Integer, primary_key=True) + __tablename__ = 'records' + record_id = Column(Integer, primary_key=True) # this is the discriminator that tells which class to use - classtype = Column (String) + classtype = Column(String) # in a first version type was the discriminator # but that could not accomodate for 'authority+sa' and the like - type = Column (String) - hrn = Column (String) - gid = Column (String) - authority = Column (String) - peer_authority = Column (String) - pointer = Column (Integer, default=-1) - date_created = Column (DateTime) - last_updated = Column (DateTime) + type = Column(String) + hrn = Column(String) + gid = Column(String) + authority = Column(String) + peer_authority = Column(String) + pointer = Column(Integer, default=-1) + date_created = Column(DateTime) + last_updated = Column(DateTime) # use the 'type' column to decide which subclass the object is of - __mapper_args__ = { 'polymorphic_on' : classtype } - - fields = [ 'type', 'hrn', 'gid', 'authority', 'peer_authority' ] - def __init__ (self, type=None, hrn=None, gid=None, authority=None, peer_authority=None, - pointer=None, dict=None): - if type: self.type=type - if hrn: self.hrn=hrn - if gid: - if isinstance(gid, StringTypes): self.gid=gid - else: self.gid=gid.save_to_string(save_parents=True) - if authority: self.authority=authority - if peer_authority: self.peer_authority=peer_authority - if pointer: self.pointer=pointer - if dict: self.load_from_dict (dict) + __mapper_args__ = {'polymorphic_on': classtype} + + fields = ['type', 'hrn', 'gid', 'authority', 'peer_authority'] + + def __init__(self, type=None, hrn=None, gid=None, authority=None, peer_authority=None, + pointer=None, dict=None): + if type: + self.type = type + if hrn: + self.hrn = hrn + if gid: + if isinstance(gid, StringType): + self.gid = gid + else: + self.gid = gid.save_to_string(save_parents=True) + if authority: + self.authority = authority + if peer_authority: + self.peer_authority = peer_authority + if pointer: + self.pointer = pointer + if dict: + self.load_from_dict(dict) def __repr__(self): - result="", " name={}>".format(self.name)) return result - def update_pis (self, pi_hrns, dbsession): + def update_pis(self, pi_hrns, dbsession): # strip that in case we have words - pi_hrns = [ x.strip() for x in pi_hrns ] + pi_hrns = [x.strip() for x in pi_hrns] request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)) - logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"\ + logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found" % (len(pi_hrns), request.count())) pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all() self.reg_pis = pis #################### + + class RegSlice(RegRecord): - __tablename__ = 'slices' - __mapper_args__ = { 'polymorphic_identity' : 'slice' } - record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) - #### extensions come here + __tablename__ = 'slices' + __mapper_args__ = {'polymorphic_identity': 'slice'} + record_id = Column(Integer, ForeignKey( + "records.record_id"), primary_key=True) + # extensions come here reg_researchers = relationship \ - ('RegUser', + ('RegUser', secondary=slice_researcher_table, - primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id, - secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id, + primaryjoin=RegRecord.record_id == slice_researcher_table.c.slice_id, + secondaryjoin=RegRecord.record_id == slice_researcher_table.c.researcher_id, backref='reg_slices_as_researcher', - ) + ) - def __init__ (self, **kwds): + def __init__(self, **kwds): if 'type' not in kwds: - kwds['type']='slice' + kwds['type'] = 'slice' RegRecord.__init__(self, **kwds) - def __repr__ (self): + def __repr__(self): return RegRecord.__repr__(self).replace("Record", "Slice") - def update_researchers (self, researcher_hrns, dbsession): + def update_researchers(self, researcher_hrns, dbsession): # strip that in case we have words - researcher_hrns = [ x.strip() for x in researcher_hrns ] - request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)) - logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"\ - % (len(researcher_hrns), request.count())) - researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all() + researcher_hrns = [x.strip() for x in researcher_hrns] + request = dbsession.query(RegUser).filter( + RegUser.hrn.in_(researcher_hrns)) + logger.info("RegSlice.update_researchers: %d incoming researchers, %d matches found" + % (len(researcher_hrns), request.count())) + researchers = dbsession.query(RegUser).filter( + RegUser.hrn.in_(researcher_hrns)).all() self.reg_researchers = researchers # when dealing with credentials, we need to retrieve the PIs attached to a slice # WARNING: with the move to passing dbsessions around, we face a glitch here because this # helper function is called from the trust/ area that - def get_pis (self): + def get_pis(self): from sqlalchemy.orm import sessionmaker Session = sessionmaker() dbsession = Session.object_session(self) from sfa.util.xrn import get_authority authority_hrn = get_authority(self.hrn) - auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first() + auth_record = dbsession.query( + RegAuthority).filter_by(hrn=authority_hrn).first() return auth_record.reg_pis - - @validates ('expires') - def validate_expires (self, key, incoming): - return self.validate_datetime (key, incoming) + + @validates('expires') + def validate_expires(self, key, incoming): + return self.validate_datetime(key, incoming) #################### + + class RegNode(RegRecord): - __tablename__ = 'nodes' - __mapper_args__ = { 'polymorphic_identity' : 'node' } - record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) - + __tablename__ = 'nodes' + __mapper_args__ = {'polymorphic_identity': 'node'} + record_id = Column(Integer, ForeignKey( + "records.record_id"), primary_key=True) + def __init__(self, **kwds): if 'type' not in kwds: - kwds['type']='node' + kwds['type'] = 'node' RegRecord.__init__(self, **kwds) - def __repr__ (self): + def __repr__(self): return RegRecord.__repr__(self).replace("Record", "Node") #################### + + class RegUser(RegRecord): - __tablename__ = 'users' + __tablename__ = 'users' # these objects will have type='user' in the records table - __mapper_args__ = { 'polymorphic_identity' : 'user' } - record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True) - #### extensions come here - email = Column ('email', String) + __mapper_args__ = {'polymorphic_identity': 'user'} + record_id = Column(Integer, ForeignKey( + "records.record_id"), primary_key=True) + # extensions come here + email = Column('email', String) # can't use name 'keys' here because when loading from xml we're getting - # a 'keys' tag, and assigning a list of strings in a reference column like this crashes + # a 'keys' tag, and assigning a list of strings in a reference column like + # this crashes reg_keys = relationship \ ('RegKey', backref='reg_user', - cascade = "all, delete, delete-orphan", - ) - + cascade="all, delete, delete-orphan", + ) + # so we can use RegUser (email=.., hrn=..) and the like - def __init__ (self, **kwds): + def __init__(self, **kwds): # handle local settings if 'email' in kwds: self.email = kwds.pop('email') @@ -309,12 +347,12 @@ class RegUser(RegRecord): RegRecord.__init__(self, **kwds) # append stuff at the end of the record __repr__ - def __repr__ (self): + def __repr__(self): result = RegRecord.__repr__(self).replace("Record", "User") result.replace(">", " email={}>".format(self.email)) return result - @validates('email') + @validates('email') def validate_email(self, key, address): assert '@' in address return address @@ -322,34 +360,39 @@ class RegUser(RegRecord): #################### # xxx tocheck : not sure about eager loading of this one # meaning, when querying the whole records, we expect there should -# be a single query to fetch all the keys -# or, is it enough that we issue a single query to retrieve all the keys +# be a single query to fetch all the keys +# or, is it enough that we issue a single query to retrieve all the keys + + class RegKey(Base): - __tablename__ = 'keys' - key_id = Column (Integer, primary_key=True) - record_id = Column (Integer, ForeignKey ("records.record_id")) - key = Column (String) - pointer = Column (Integer, default = -1) - - def __init__ (self, key, pointer=None): + __tablename__ = 'keys' + key_id = Column(Integer, primary_key=True) + record_id = Column(Integer, ForeignKey("records.record_id")) + key = Column(String) + pointer = Column(Integer, default=-1) + + def __init__(self, key, pointer=None): self.key = key if pointer: self.pointer = pointer - def __repr__ (self): + def __repr__(self): result = ":} -# so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users -augment_map = {'authority': {'reg-pis' : 'reg_pis',}, - 'slice': {'reg-researchers' : 'reg_researchers',}, - 'user': {'reg-pi-authorities' : 'reg_authorities_as_pi', - 'reg-slices' : 'reg_slices_as_researcher',}, - } +# so after that, an 'authority' record will e.g. have a 'reg-pis' field +# with the hrns of its pi-users +augment_map = {'authority': {'reg-pis': 'reg_pis', }, + 'slice': {'reg-researchers': 'reg_researchers', }, + 'user': {'reg-pi-authorities': 'reg_authorities_as_pi', + 'reg-slices': 'reg_slices_as_researcher', }, + } # xxx mystery @@ -499,16 +557,18 @@ augment_map = {'authority': {'reg-pis' : 'reg_pis',}, # is what gets exposed to the drivers (this is historical and dates back before sqlalchemy) # so it is recommended to always run this function that will make sure # that such built-in fields are properly set in __dict__ too -# +# def augment_with_sfa_builtins(local_record): # don't ruin the import of that file in a client world from sfa.util.xrn import Xrn # add a 'urn' field - setattr(local_record, 'reg-urn', Xrn(xrn=local_record.hrn, type=local_record.type).urn) - # users have keys and this is needed to synthesize 'users' sent over to CreateSliver + setattr(local_record, 'reg-urn', + Xrn(xrn=local_record.hrn, type=local_record.type).urn) + # users have keys and this is needed to synthesize 'users' sent over to + # CreateSliver fields_to_check = [] if local_record.type == 'user': - user_keys = [ key.key for key in local_record.reg_keys ] + user_keys = [key.key for key in local_record.reg_keys] setattr(local_record, 'reg-keys', user_keys) fields_to_check = ['email'] elif local_record.type == 'authority': @@ -521,10 +581,8 @@ def augment_with_sfa_builtins(local_record): # search in map according to record type type_map = augment_map.get(local_record.type, {}) # use type-dep. map to do the job - for (field_name, attribute) in type_map.items(): + for (field_name, attribute) in list(type_map.items()): # get related objects related_records = getattr(local_record, attribute, []) - hrns = [ r.hrn for r in related_records ] - setattr (local_record, field_name, hrns) - - + hrns = [r.hrn for r in related_records] + setattr(local_record, field_name, hrns)