from sqlalchemy.orm import validates
from sqlalchemy.ext.declarative import declarative_base
+from sfa.storage.record import Record
from sfa.util.sfalogging import logger
+from sfa.util.sfatime import utcparse, datetime_to_string
from sfa.util.xml import XML
from sfa.trust.gid import GID
# (*) finally for converting a dictionary into an sqlalchemy object, we provide
# obj.load_from_dict(dict)
-class AlchemyObj:
+class AlchemyObj(Record):
def __iter__(self):
self._i = iter(object_mapper(self).columns)
return self
def next(self):
n = self._i.next().name
return n, getattr(self, n)
- def todict (self):
- d=self.__dict__
- keys=[k for k in d.keys() if not k.startswith('_')]
- return dict ( [ (k,d[k]) for k in keys ] )
- def load_from_dict (self, d):
- for (k,v) in d.iteritems():
- # experimental
- if isinstance(v, StringTypes) and v.lower() in ['true']: v=True
- if isinstance(v, StringTypes) and v.lower() in ['false']: v=False
- setattr(self,k,v)
-
- # in addition we provide convenience for converting to and from xml records
- # for this purpose only, we need the subclasses to define 'fields' as either
- # a list or a dictionary
- def xml_fields (self):
- fields=self.fields
- if isinstance(fields,dict): fields=fields.keys()
- return fields
-
- def save_as_xml (self):
- # xxx not sure about the scope here
- input_dict = dict( [ (key, getattr(self.key), ) for key in self.xml_fields() if getattr(self,key,None) ] )
- xml_record=XML("<record />")
- xml_record.parse_dict (input_dict)
- return xml_record.toxml()
-
- def dump(self, dump_parents=False):
- for key in self.fields:
- if key == 'gid' and self.gid:
- gid = GID(string=self.gid)
- print " %s:" % key
- gid.dump(8, dump_parents)
- elif getattr(self,key,None):
- print " %s: %s" % (key, getattr(self,key))
-
+
# # only intended for debugging
# def inspect (self, logger, message=""):
# logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
result += ">"
return result
+ # shortcut - former implem. was record-based
+ def get (self, field, default):
+ return getattr(self,field,default)
+
@validates ('gid')
def validate_gid (self, key, gid):
if gid is None: return
elif isinstance(gid, StringTypes): return gid
else: return gid.save_to_string(save_parents=True)
+ def validate_datetime (self, key, incoming):
+ if isinstance (incoming, datetime): return incoming
+ elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
+ else: logger.info("Cannot validate datetime for key %s with input %s"%\
+ (key,incoming))
+
+ @validates ('date_created')
+ def validate_date_created (self, key, incoming): return self.validate_datetime (key, incoming)
+
+ @validates ('last_updated')
+ def validate_last_updated (self, key, incoming): return self.validate_datetime (key, incoming)
+
# xxx - there might be smarter ways to handle get/set'ing gid using validation hooks
def get_gid_object (self):
if not self.gid: return None
now=datetime.now()
self.last_updated=now
+#################### cross-relations tables
+# authority x user (pis) association
+authority_pi_table = \
+ Table ( 'authority_pi', Base.metadata,
+ Column ('authority_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
+ Column ('pi_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
+ )
+# slice x user (researchers) association
+slice_researcher_table = \
+ Table ( 'slice_researcher', Base.metadata,
+ Column ('slice_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
+ Column ('researcher_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
+ )
+
##############################
# all subclasses define a convenience constructor with a default value for type,
# and when applicable a way to define local fields in a kwd=value argument
__tablename__ = 'authorities'
__mapper_args__ = { 'polymorphic_identity' : 'authority' }
record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
+ #### extensions come here
+ reg_pis = relationship \
+ ('RegUser',
+ secondary=authority_pi_table,
+ primaryjoin=RegRecord.record_id==authority_pi_table.c.authority_id,
+ secondaryjoin=RegRecord.record_id==authority_pi_table.c.pi_id,
+ backref='reg_authorities_as_pi')
def __init__ (self, **kwds):
# fill in type if not previously set
def __repr__ (self):
return RegRecord.__repr__(self).replace("Record","Authority")
-####################
-# slice x user (researchers) association
-slice_researcher_table = \
- Table ( 'slice_researcher', Base.metadata,
- Column ('slice_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
- Column ('researcher_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
- )
+ def update_pis (self, pi_hrns):
+ # don't ruin the import of that file in a client world
+ from sfa.storage.alchemy import dbsession
+ # strip that in case we have <researcher> words </researcher>
+ pi_hrns = [ x.strip() for x in pi_hrns ]
+ request = dbsession.query (RegUser).filter(RegUser.hrn.in_(pi_hrns))
+ logger.info ("RegAuthority.update_pis: %d incoming pis, %d matches found"%(len(pi_hrns),request.count()))
+ pis = dbsession.query (RegUser).filter(RegUser.hrn.in_(pi_hrns)).all()
+ self.reg_pis = pis
####################
class RegSlice (RegRecord):
secondary=slice_researcher_table,
primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id,
secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id,
- backref="reg_slices_as_researcher")
+ backref='reg_slices_as_researcher')
def __init__ (self, **kwds):
if 'type' not in kwds: kwds['type']='slice'
def __repr__ (self):
return RegRecord.__repr__(self).replace("Record","Slice")
+ def update_researchers (self, researcher_hrns):
+ # don't ruin the import of that file in a client world
+ from sfa.storage.alchemy import dbsession
+ # strip that in case we have <researcher> words </researcher>
+ researcher_hrns = [ x.strip() for x in researcher_hrns ]
+ request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns))
+ logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"%(len(researcher_hrns),request.count()))
+ researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all()
+ self.reg_researchers = researchers
+
+ # when dealing with credentials, we need to retrieve the PIs attached to a slice
+ def get_pis (self):
+ # don't ruin the import of that file in a client world
+ from sfa.storage.alchemy import dbsession
+ from sfa.util.xrn import get_authority
+ authority_hrn = get_authority(self.hrn)
+ auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first()
+ return auth_record.reg_pis
+
+ @validates ('expires')
+ def validate_expires (self, key, incoming): return self.validate_datetime (key, incoming)
+
####################
class RegNode (RegRecord):
__tablename__ = 'nodes'
logger.info("load from xml, keys=%s"%xml_dict.keys())
return make_record_dict (xml_dict)
+####################
+# augment local records with data from builtin relationships
+# expose related objects as a list of hrns
+# we pick names that clearly won't conflict with the ones used in the old approach,
+# were the relationships data came from the testbed side
+# for each type, a dict of the form {<field-name-exposed-in-record>:<alchemy_accessor_name>}
+# so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users
+augment_map={'authority': {'reg-pis':'reg_pis',},
+ 'slice': {'reg-researchers':'reg_researchers',},
+ 'user': {'reg-pi-authorities':'reg_authorities_as_pi',
+ 'reg-slices':'reg_slices_as_researcher',},
+ }
+
+def augment_with_sfa_builtins (local_record):
+ # don't ruin the import of that file in a client world
+ from sfa.util.xrn import Xrn
+ # add a 'urn' field
+ setattr(local_record,'reg-urn',Xrn(xrn=local_record.hrn,type=local_record.type).urn)
+ # users have keys and this is needed to synthesize 'users' sent over to CreateSliver
+ if local_record.type=='user':
+ user_keys = [ key.key for key in local_record.reg_keys ]
+ setattr(local_record, 'reg-keys', user_keys)
+ # search in map according to record type
+ type_map=augment_map.get(local_record.type,{})
+ # use type-dep. map to do the job
+ for (field_name,attribute) in type_map.items():
+ # get related objects
+ related_records = getattr(local_record,attribute,[])
+ hrns = [ r.hrn for r in related_records ]
+ setattr (local_record, field_name, hrns)
+
+