1 from types import StringTypes
2 from datetime import datetime
4 from sqlalchemy import or_, and_
5 from sqlalchemy import Column, Integer, String, DateTime
6 from sqlalchemy import Table, Column, MetaData, join, ForeignKey
7 from sqlalchemy.orm import relationship, backref
8 from sqlalchemy.orm import column_property
9 from sqlalchemy.orm import object_mapper
10 from sqlalchemy.orm import validates
11 from sqlalchemy.ext.declarative import declarative_base
13 from sfa.storage.record import Record
14 from sfa.util.sfalogging import logger
15 from sfa.util.sfatime import utcparse, datetime_to_string
16 from sfa.util.xml import XML
18 from sfa.trust.gid import GID
20 ##############################
21 Base = declarative_base()
26 # historically the front end to the db dealt with dicts, so the code was only dealing with dicts
27 # sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id']
28 # which is admittedly much nicer
29 # however we still need to deal with dictionaries if only for the xmlrpc layer
31 # here are a few utilities for this
33 # (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick
34 # is to use obj.__dict__
35 # this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field
36 # however this depends on sqlalchemy's implementation so it should be avoided
38 # (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict
39 # remember though that writing the resulting dictionary won't change the object
40 # essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_'
41 # 2 ways are provided for that:
44 # the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names
45 # although it seems to work fine, I've found cases where it issues a weird python error that I could not get right
46 # so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved
48 # (*) finally for converting a dictionary into an sqlalchemy object, we provide
49 # obj.load_from_dict(dict)
51 class AlchemyObj(Record):
53 self._i = iter(object_mapper(self).columns)
56 n = self._i.next().name
57 return n, getattr(self, n)
59 # # only intended for debugging
60 # def inspect (self, logger, message=""):
61 # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
63 # if not k.startswith('_'):
64 # logger.info (" %s: %s"%(k,getattr(self,k)))
65 # logger.info("%s -- Inspecting AlchemyObj -- __dict__"%message)
67 # for (k,v) in d.iteritems():
68 # logger.info("[%s]=%s"%(k,v))
71 ##############################
72 # various kinds of records are implemented as an inheritance hierarchy
73 # RegRecord is the base class for all actual variants
74 # a first draft was using 'type' as the discriminator for the inheritance
75 # but we had to define another more internal column (classtype) so we
76 # accomodate variants in types like authority+am and the like
78 class RegRecord (Base, AlchemyObj):
79 __tablename__ = 'records'
80 record_id = Column (Integer, primary_key=True)
81 # this is the discriminator that tells which class to use
82 classtype = Column (String)
83 # in a first version type was the discriminator
84 # but that could not accomodate for 'authority+sa' and the like
85 type = Column (String)
88 authority = Column (String)
89 peer_authority = Column (String)
90 pointer = Column (Integer, default=-1)
91 date_created = Column (DateTime)
92 last_updated = Column (DateTime)
93 # use the 'type' column to decide which subclass the object is of
94 __mapper_args__ = { 'polymorphic_on' : classtype }
96 fields = [ 'type', 'hrn', 'gid', 'authority', 'peer_authority' ]
97 def __init__ (self, type=None, hrn=None, gid=None, authority=None, peer_authority=None,
98 pointer=None, dict=None):
99 if type: self.type=type
102 if isinstance(gid, StringTypes): self.gid=gid
103 else: self.gid=gid.save_to_string(save_parents=True)
104 if authority: self.authority=authority
105 if peer_authority: self.peer_authority=peer_authority
106 if pointer: self.pointer=pointer
107 if dict: self.load_from_dict (dict)
110 result="<Record id=%s, type=%s, hrn=%s, authority=%s" % \
111 (self.record_id, self.type, self.hrn, self.authority)
112 # for extra in ('pointer', 'email', 'name'):
113 for extra in ('email', 'name'):
114 if hasattr(self, extra):
115 result += " {}={},".format(extra, getattr(self, extra))
116 # skip the uniform '--- BEGIN CERTIFICATE --' stuff
118 result+=" gid=%s..."%self.gid[28:36]
124 # shortcut - former implem. was record-based
125 def get (self, field, default):
126 return getattr(self,field,default)
129 def validate_gid (self, key, gid):
130 if gid is None: return
131 elif isinstance(gid, StringTypes): return gid
132 else: return gid.save_to_string(save_parents=True)
134 def validate_datetime (self, key, incoming):
135 if isinstance (incoming, datetime):
137 elif isinstance (incoming, (int, float)):
138 return datetime.fromtimestamp (incoming)
140 logger.info("Cannot validate datetime for key %s with input %s"%\
143 @validates ('date_created')
144 def validate_date_created (self, key, incoming):
145 return self.validate_datetime (key, incoming)
147 @validates ('last_updated')
148 def validate_last_updated (self, key, incoming):
149 return self.validate_datetime (key, incoming)
151 # xxx - there might be smarter ways to handle get/set'ing gid using validation hooks
152 def get_gid_object (self):
153 if not self.gid: return None
154 else: return GID(string=self.gid)
156 def just_created (self):
157 now = datetime.utcnow()
158 self.date_created = now
159 self.last_updated = now
161 def just_updated (self):
162 now = datetime.utcnow()
163 self.last_updated = now
165 #################### cross-relations tables
166 # authority x user (pis) association
167 authority_pi_table = \
168 Table ( 'authority_pi', Base.metadata,
169 Column ('authority_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
170 Column ('pi_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
172 # slice x user (researchers) association
173 slice_researcher_table = \
174 Table ( 'slice_researcher', Base.metadata,
175 Column ('slice_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
176 Column ('researcher_id', Integer, ForeignKey ('records.record_id'), primary_key=True),
179 ##############################
180 # all subclasses define a convenience constructor with a default value for type,
181 # and when applicable a way to define local fields in a kwd=value argument
183 class RegAuthority (RegRecord):
184 __tablename__ = 'authorities'
185 __mapper_args__ = { 'polymorphic_identity' : 'authority' }
186 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
187 #### extensions come here
188 name = Column ('name', String)
189 #### extensions come here
190 reg_pis = relationship \
192 secondary = authority_pi_table,
193 primaryjoin = RegRecord.record_id==authority_pi_table.c.authority_id,
194 secondaryjoin = RegRecord.record_id==authority_pi_table.c.pi_id,
195 backref = 'reg_authorities_as_pi',
198 def __init__ (self, **kwds):
199 # handle local settings
201 self.name = kwds.pop('name')
202 # fill in type if not previously set
203 if 'type' not in kwds:
204 kwds['type']='authority'
205 # base class constructor
206 RegRecord.__init__(self, **kwds)
208 # no proper data yet, just hack the typename
210 result = RegRecord.__repr__(self).replace("Record", "Authority")
211 result.replace(">", " name={}>".format(self.name))
214 def update_pis (self, pi_hrns, dbsession):
215 # strip that in case we have <researcher> words </researcher>
216 pi_hrns = [ x.strip() for x in pi_hrns ]
217 request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns))
218 logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"\
219 % (len(pi_hrns), request.count()))
220 pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all()
224 class RegSlice (RegRecord):
225 __tablename__ = 'slices'
226 __mapper_args__ = { 'polymorphic_identity' : 'slice' }
227 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
228 #### extensions come here
229 reg_researchers = relationship \
231 secondary=slice_researcher_table,
232 primaryjoin=RegRecord.record_id==slice_researcher_table.c.slice_id,
233 secondaryjoin=RegRecord.record_id==slice_researcher_table.c.researcher_id,
234 backref='reg_slices_as_researcher',
237 def __init__ (self, **kwds):
238 if 'type' not in kwds:
240 RegRecord.__init__(self, **kwds)
243 return RegRecord.__repr__(self).replace("Record", "Slice")
245 def update_researchers (self, researcher_hrns, dbsession):
246 # strip that in case we have <researcher> words </researcher>
247 researcher_hrns = [ x.strip() for x in researcher_hrns ]
248 request = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns))
249 logger.info ("RegSlice.update_researchers: %d incoming researchers, %d matches found"\
250 % (len(researcher_hrns), request.count()))
251 researchers = dbsession.query (RegUser).filter(RegUser.hrn.in_(researcher_hrns)).all()
252 self.reg_researchers = researchers
254 # when dealing with credentials, we need to retrieve the PIs attached to a slice
255 # WARNING: with the move to passing dbsessions around, we face a glitch here because this
256 # helper function is called from the trust/ area that
258 from sqlalchemy.orm import sessionmaker
259 Session = sessionmaker()
260 dbsession = Session.object_session(self)
261 from sfa.util.xrn import get_authority
262 authority_hrn = get_authority(self.hrn)
263 auth_record = dbsession.query(RegAuthority).filter_by(hrn=authority_hrn).first()
264 return auth_record.reg_pis
266 @validates ('expires')
267 def validate_expires (self, key, incoming):
268 return self.validate_datetime (key, incoming)
271 class RegNode (RegRecord):
272 __tablename__ = 'nodes'
273 __mapper_args__ = { 'polymorphic_identity' : 'node' }
274 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
276 def __init__(self, **kwds):
277 if 'type' not in kwds:
279 RegRecord.__init__(self, **kwds)
282 return RegRecord.__repr__(self).replace("Record", "Node")
285 class RegUser (RegRecord):
286 __tablename__ = 'users'
287 # these objects will have type='user' in the records table
288 __mapper_args__ = { 'polymorphic_identity' : 'user' }
289 record_id = Column (Integer, ForeignKey ("records.record_id"), primary_key=True)
290 #### extensions come here
291 email = Column ('email', String)
292 # can't use name 'keys' here because when loading from xml we're getting
293 # a 'keys' tag, and assigning a list of strings in a reference column like this crashes
294 reg_keys = relationship \
295 ('RegKey', backref='reg_user',
296 cascade = "all, delete, delete-orphan",
299 # so we can use RegUser (email=.., hrn=..) and the like
300 def __init__ (self, **kwds):
301 # handle local settings
303 self.email = kwds.pop('email')
304 if 'type' not in kwds:
305 kwds['type'] = 'user'
306 RegRecord.__init__(self, **kwds)
308 # append stuff at the end of the record __repr__
310 result = RegRecord.__repr__(self).replace("Record", "User")
311 result.replace(">", " email={}>".format(self.email))
315 def validate_email(self, key, address):
316 assert '@' in address
320 # xxx tocheck : not sure about eager loading of this one
321 # meaning, when querying the whole records, we expect there should
322 # be a single query to fetch all the keys
323 # or, is it enough that we issue a single query to retrieve all the keys
325 __tablename__ = 'keys'
326 key_id = Column (Integer, primary_key=True)
327 record_id = Column (Integer, ForeignKey ("records.record_id"))
328 key = Column (String)
329 pointer = Column (Integer, default = -1)
331 def __init__ (self, key, pointer=None):
334 self.pointer = pointer
337 result = "<key id=%s key=%s..." % (self.key_id, self.key[8:16],)
338 try: result += " user=%s" % self.reg_user.record_id
339 except: result += " no-user"
343 class SliverAllocation(Base,AlchemyObj):
344 __tablename__ = 'sliver_allocation'
345 sliver_id = Column(String, primary_key=True)
346 client_id = Column(String)
347 component_id = Column(String)
348 slice_urn = Column(String)
349 allocation_state = Column(String)
351 def __init__(self, **kwds):
352 if 'sliver_id' in kwds:
353 self.sliver_id = kwds['sliver_id']
354 if 'client_id' in kwds:
355 self.client_id = kwds['client_id']
356 if 'component_id' in kwds:
357 self.component_id = kwds['component_id']
358 if 'slice_urn' in kwds:
359 self.slice_urn = kwds['slice_urn']
360 if 'allocation_state' in kwds:
361 self.allocation_state = kwds['allocation_state']
364 result = "<sliver_allocation sliver_id=%s allocation_state=%s"\
365 % (self.sliver_id, self.allocation_state)
368 @validates('allocation_state')
369 def validate_allocation_state(self, key, state):
370 allocation_states = ['geni_unallocated', 'geni_allocated', 'geni_provisioned']
371 assert state in allocation_states
375 def set_allocations(sliver_ids, state, dbsession):
376 if not isinstance(sliver_ids, list):
377 sliver_ids = [sliver_ids]
378 sliver_state_updated = {}
379 constraint = SliverAllocation.sliver_id.in_(sliver_ids)
380 sliver_allocations = dbsession.query (SliverAllocation).filter(constraint)
381 sliver_ids_found = []
382 for sliver_allocation in sliver_allocations:
383 sliver_allocation.allocation_state = state
384 sliver_ids_found.append(sliver_allocation.sliver_id)
386 # Some states may not have been updated becuase no sliver allocation state record
387 # exists for the sliver. Insert new allocation records for these slivers and set
388 # it to geni_allocated.
389 sliver_ids_not_found = set(sliver_ids).difference(sliver_ids_found)
390 for sliver_id in sliver_ids_not_found:
391 record = SliverAllocation(sliver_id=sliver_id, allocation_state=state)
392 dbsession.add(record)
396 def delete_allocations(sliver_ids, dbsession):
397 if not isinstance(sliver_ids, list):
398 sliver_ids = [sliver_ids]
399 constraint = SliverAllocation.sliver_id.in_(sliver_ids)
400 sliver_allocations = dbsession.query(SliverAllocation).filter(constraint)
401 for sliver_allocation in sliver_allocations:
402 dbsession.delete(sliver_allocation)
405 def sync(self, dbsession):
406 constraints = [SliverAllocation.sliver_id == self.sliver_id]
407 results = dbsession.query(SliverAllocation).filter(and_(*constraints))
409 for result in results:
410 records.append(result)
416 record.sliver_id = self.sliver_id
417 record.client_id = self.client_id
418 record.component_id = self.component_id
419 record.slice_urn = self.slice_urn
420 record.allocation_state = self.allocation_state
424 ##############################
425 # although the db needs of course to be reachable for the following functions
426 # the schema management functions are here and not in alchemy
427 # because the actual details of the classes need to be known
428 # migrations: this code has no notion of the previous versions
429 # of the data model nor of migrations
430 # sfa.storage.migrations.db_init uses this when starting from
432 def init_tables(engine):
433 logger.info("Initializing db schema from current/latest model")
434 Base.metadata.create_all(engine)
436 def drop_tables(engine):
437 logger.info("Dropping tables from current/latest model")
438 Base.metadata.drop_all(engine)
440 ##############################
441 # create a record of the right type from either a dict or an xml string
442 def make_record (dict=None, xml=""):
443 if dict is None: dict={}
444 if dict: return make_record_dict (dict)
445 elif xml: return make_record_xml (xml)
446 else: raise Exception("make_record has no input")
448 # convert an incoming record - typically from xmlrpc - into an object
449 def make_record_dict (record_dict):
450 assert ('type' in record_dict)
451 type = record_dict['type'].split('+')[0]
452 if type == 'authority':
453 result = RegAuthority (dict=record_dict)
455 result = RegUser (dict=record_dict)
456 elif type == 'slice':
457 result = RegSlice (dict=record_dict)
459 result = RegNode (dict=record_dict)
461 logger.debug("Untyped RegRecord instance")
462 result = RegRecord (dict=record_dict)
463 logger.info("converting dict into Reg* with type=%s"%type)
464 logger.info("returning=%s"%result)
466 # register non-db attributes in an extensions field
469 def make_record_xml (xml):
470 xml_record = XML(xml)
471 xml_dict = xml_record.todict()
472 logger.info("load from xml, keys=%s"%xml_dict.keys())
473 return make_record_dict (xml_dict)
476 # augment local records with data from builtin relationships
477 # expose related objects as a list of hrns
478 # we pick names that clearly won't conflict with the ones used in the old approach,
479 # were the relationships data came from the testbed side
480 # for each type, a dict of the form {<field-name-exposed-in-record>:<alchemy_accessor_name>}
481 # so after that, an 'authority' record will e.g. have a 'reg-pis' field with the hrns of its pi-users
482 augment_map = {'authority': {'reg-pis' : 'reg_pis',},
483 'slice': {'reg-researchers' : 'reg_researchers',},
484 'user': {'reg-pi-authorities' : 'reg_authorities_as_pi',
485 'reg-slices' : 'reg_slices_as_researcher',},
489 def augment_with_sfa_builtins(local_record):
490 # don't ruin the import of that file in a client world
491 from sfa.util.xrn import Xrn
493 setattr(local_record, 'reg-urn', Xrn(xrn=local_record.hrn, type=local_record.type).urn)
494 # users have keys and this is needed to synthesize 'users' sent over to CreateSliver
495 if local_record.type == 'user':
496 user_keys = [ key.key for key in local_record.reg_keys ]
497 setattr(local_record, 'reg-keys', user_keys)
498 # search in map according to record type
499 type_map = augment_map.get(local_record.type, {})
500 # use type-dep. map to do the job
501 for (field_name, attribute) in type_map.items():
502 # get related objects
503 related_records = getattr(local_record, attribute, [])
504 hrns = [ r.hrn for r in related_records ]
505 setattr (local_record, field_name, hrns)