1 from types import StringTypes
2 from datetime import datetime
4 from sqlalchemy import Column, Integer, String, DateTime
5 from sqlalchemy import Table, Column, MetaData, join, ForeignKey
6 from sqlalchemy.orm import relationship, backref
7 from sqlalchemy.orm import column_property
8 from sqlalchemy.orm import object_mapper
9 from sqlalchemy.orm import validates
10 from sqlalchemy.ext.declarative import declarative_base
12 from sfa.storage.record import Record
13 from sfa.util.sfalogging import logger
14 from sfa.util.sfatime import utcparse, datetime_to_string
15 from sfa.util.xml import XML
17 from sfa.trust.gid import GID
19 ##############################
20 Base=declarative_base()
25 # historically the front end to the db dealt with dicts, so the code was only dealing with dicts
26 # sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id']
27 # which is admittedly much nicer
28 # however we still need to deal with dictionaries if only for the xmlrpc layer
30 # here are a few utilities for this
32 # (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick
33 # is to use obj.__dict__
34 # this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field
35 # however this depends on sqlalchemy's implementation so it should be avoided
37 # (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict
38 # remember though that writing the resulting dictionary won't change the object
39 # essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_'
40 # 2 ways are provided for that:
43 # the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names
44 # although it seems to work fine, I've found cases where it issues a weird python error that I could not get right
45 # so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved
47 # (*) finally for converting a dictionary into an sqlalchemy object, we provide
48 # obj.load_from_dict(dict)
50 class AlchemyObj(Record):
52 self._i = iter(object_mapper(self).columns)
55 n = self._i.next().name
56 return n, getattr(self, n)
58 # # only intended for debugging
59 # def inspect (self, logger, message=""):
60 # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
62 # if not k.startswith('_'):
63 # logger.info (" %s: %s"%(k,getattr(self,k)))
64 # logger.info("%s -- Inspecting AlchemyObj -- __dict__"%message)
66 # for (k,v) in d.iteritems():
67 # logger.info("[%s]=%s"%(k,v))
70 ##############################
71 # various kinds of records are implemented as an inheritance hierarchy
72 # RegRecord is the base class for all actual variants
73 # a first draft was using 'type' as the discriminator for the inheritance
74 # but we had to define another more internal column (classtype) so we
75 # accomodate variants in types like authority+am and the like
77 class RegRecord (Base,AlchemyObj):
78 __tablename__ = 'records'
79 record_id = Column (Integer, primary_key=True)
80 # this is the discriminator that tells which class to use
81 classtype = Column (String)
82 # in a first version type was the discriminator
83 # but that could not accomodate for 'authority+sa' and the like
84 type = Column (String)
87 authority = Column (String)
88 peer_authority = Column (String)
89 pointer = Column (Integer, default=-1)
90 date_created = Column (DateTime)
91 last_updated = Column (DateTime)
92 # use the 'type' column to decide which subclass the object is of
93 __mapper_args__ = { 'polymorphic_on' : classtype }
95 fields = [ 'type', 'hrn', 'gid', 'authority', 'peer_authority' ]
96 def __init__ (self, type=None, hrn=None, gid=None, authority=None, peer_authority=None,
97 pointer=None, dict=None):
98 if type: self.type=type
101 if isinstance(gid, StringTypes): self.gid=gid
102 else: self.gid=gid.save_to_string(save_parents=True)
103 if authority: self.authority=authority
104 if peer_authority: self.peer_authority=peer_authority
105 if pointer: self.pointer=pointer
106 if dict: self.load_from_dict (dict)
109 result="<Record id=%s, type=%s, hrn=%s, authority=%s, pointer=%s" % \
110 (self.record_id, self.type, self.hrn, self.authority, self.pointer)
111 # skip the uniform '--- BEGIN CERTIFICATE --' stuff
112 if self.gid: result+=" gid=%s..."%self.gid[28:36]
113 else: result+=" nogid"
117 # shortcut - former implem. was record-based
118 def get (self, field, default):
119 return getattr(self,field,default)
122 def validate_gid (self, key, gid):
123 if gid is None: return
124 elif isinstance(gid, StringTypes): return gid
125 else: return gid.save_to_string(save_parents=True)
127 def validate_datetime (self, key, incoming):
128 if isinstance (incoming, datetime): return incoming
129 elif isinstance (incoming, (int,float)):return datetime.fromtimestamp (incoming)
131 @validates ('date_created')
132 def validate_date_created (self, key, incoming): return self.validate_datetime (key, incoming)
134 @validates ('last_updated')
135 def validate_last_updated (self, key, incoming): return self.validate_datetime (key, incoming)
137 # xxx - there might be smarter ways to handle get/set'ing gid using validation hooks
138 def get_gid_object (self):
139 if not self.gid: return None
140 else: return GID(string=self.gid)
142 def just_created (self):
144 self.date_created=now
145 self.last_updated=now
147 def just_updated (self):
149 self.last_updated=now
151 #################### cross-relations tables
152 # authority x user (pis) association
153 authority_pi_table = \
154 Table ( 'authority_pi', Base.metadata,
155 Column ('authority_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
156 Column ('pi_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
158 # slice x user (researchers) association
159 slice_researcher_table = \
160 Table ( 'slice_researcher', Base.metadata,
161 Column ('slice_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
162 Column ('researcher_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
165 ##############################
166 # all subclasses define a convenience constructor with a default value for type,
167 # and when applicable a way to define local fields in a kwd=value argument
169 class RegAuthority (RegRecord):
170 __tablename__ = 'authorities'
171 __mapper_args__ = { 'polymorphic_identity' : 'authority' }
172 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
173 #### extensions come here
174 reg_pis = relationship \
176 secondary=authority_pi_table,
177 primaryjoin=RegRecord.record_id==authority_pi_table.c.authority_id,
178 secondaryjoin=RegRecord.record_id==authority_pi_table.c.pi_id,
179 backref='reg_authorities_as_pi')
181 def __init__ (self, **kwds):
182 # fill in type if not previously set
183 if 'type' not in kwds: kwds['type']='authority'
184 # base class constructor
185 RegRecord.__init__(self, **kwds)
187 # no proper data yet, just hack the typename
189 return RegRecord.__repr__(self).replace("Record","Authority")
192 class RegSlice (RegRecord):
193 __tablename__ = 'slices'
194 __mapper_args__ = { 'polymorphic_identity' : 'slice' }
195 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
196 #### extensions come here
197 reg_researchers = relationship \
199 secondary=slice_researcher_table,
200 primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id,
201 secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id,
202 backref='reg_slices_as_researcher')
204 def __init__ (self, **kwds):
205 if 'type' not in kwds: kwds['type']='slice'
206 RegRecord.__init__(self, **kwds)
209 return RegRecord.__repr__(self).replace("Record","Slice")
212 class RegNode (RegRecord):
213 __tablename__ = 'nodes'
214 __mapper_args__ = { 'polymorphic_identity' : 'node' }
215 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
217 def __init__ (self, **kwds):
218 if 'type' not in kwds: kwds['type']='node'
219 RegRecord.__init__(self, **kwds)
222 return RegRecord.__repr__(self).replace("Record","Node")
225 class RegUser (RegRecord):
226 __tablename__ = 'users'
227 # these objects will have type='user' in the records table
228 __mapper_args__ = { 'polymorphic_identity' : 'user' }
229 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
230 #### extensions come here
231 email = Column ('email', String)
232 # can't use name 'keys' here because when loading from xml we're getting
233 # a 'keys' tag, and assigning a list of strings in a reference column like this crashes
234 reg_keys = relationship \
235 ('RegKey', backref='reg_user',
236 cascade="all, delete, delete-orphan")
238 # so we can use RegUser (email=.., hrn=..) and the like
239 def __init__ (self, **kwds):
240 # handle local settings
241 if 'email' in kwds: self.email=kwds.pop('email')
242 if 'type' not in kwds: kwds['type']='user'
243 RegRecord.__init__(self, **kwds)
245 # append stuff at the end of the record __repr__
247 result = RegRecord.__repr__(self).replace("Record","User")
248 result.replace (">"," email=%s"%self.email)
253 def validate_email(self, key, address):
254 assert '@' in address
258 # xxx tocheck : not sure about eager loading of this one
259 # meaning, when querying the whole records, we expect there should
260 # be a single query to fetch all the keys
261 # or, is it enough that we issue a single query to retrieve all the keys
263 __tablename__ = 'keys'
264 key_id = Column (Integer, primary_key=True)
265 record_id = Column (Integer, ForeignKey ("records.record_id"))
266 key = Column (String)
267 pointer = Column (Integer, default = -1)
269 def __init__ (self, key, pointer=None):
271 if pointer: self.pointer=pointer
274 result="<key id=%s key=%s..."%(self.key_id,self.key[8:16],)
275 try: result += " user=%s"%self.reg_user.record_id
276 except: result += " no-user"
280 ##############################
281 # although the db needs of course to be reachable for the following functions
282 # the schema management functions are here and not in alchemy
283 # because the actual details of the classes need to be known
284 # migrations: this code has no notion of the previous versions
285 # of the data model nor of migrations
286 # sfa.storage.migrations.db_init uses this when starting from
288 def init_tables(engine):
289 logger.info("Initializing db schema from current/latest model")
290 Base.metadata.create_all(engine)
292 def drop_tables(engine):
293 logger.info("Dropping tables from current/latest model")
294 Base.metadata.drop_all(engine)
296 ##############################
297 # create a record of the right type from either a dict or an xml string
298 def make_record (dict={}, xml=""):
299 if dict: return make_record_dict (dict)
300 elif xml: return make_record_xml (xml)
301 else: raise Exception("make_record has no input")
303 # convert an incoming record - typically from xmlrpc - into an object
304 def make_record_dict (record_dict):
305 assert ('type' in record_dict)
306 type=record_dict['type'].split('+')[0]
307 if type=='authority':
308 result=RegAuthority (dict=record_dict)
310 result=RegUser (dict=record_dict)
312 result=RegSlice (dict=record_dict)
314 result=RegNode (dict=record_dict)
316 logger.debug("Untyped RegRecord instance")
317 result=RegRecord (dict=record_dict)
318 logger.info ("converting dict into Reg* with type=%s"%type)
319 logger.info ("returning=%s"%result)
321 # register non-db attributes in an extensions field
324 def make_record_xml (xml):
325 xml_record = XML(xml)
326 xml_dict = xml_record.todict()
327 logger.info("load from xml, keys=%s"%xml_dict.keys())
328 return make_record_dict (xml_dict)