1 from datetime import datetime
3 from sqlalchemy import or_, and_
4 from sqlalchemy import Column, Integer, String, DateTime
5 from sqlalchemy import Table, Column, MetaData, join, ForeignKey
6 from sqlalchemy.orm import relationship, backref
7 from sqlalchemy.orm import column_property
8 from sqlalchemy.orm import object_mapper
9 from sqlalchemy.orm import validates
10 from sqlalchemy.ext.declarative import declarative_base
12 from sfa.storage.record import Record
13 from sfa.util.sfalogging import logger
14 from sfa.util.sfatime import utcparse, datetime_to_string
15 from sfa.util.xml import XML
16 from sfa.util.py23 import StringType
18 from sfa.trust.gid import GID
20 ##############################
21 Base = declarative_base()
26 # historically the front end to the db dealt with dicts, so the code was only dealing with dicts
27 # sqlalchemy however offers an object interface, meaning that you write obj.id instead of obj['id']
28 # which is admittedly much nicer
29 # however we still need to deal with dictionaries if only for the xmlrpc layer
31 # here are a few utilities for this
33 # (*) first off, when an old pieve of code needs to be used as-is, if only temporarily, the simplest trick
34 # is to use obj.__dict__
35 # this behaves exactly like required, i.e. obj.__dict__['field']='new value' does change obj.field
36 # however this depends on sqlalchemy's implementation so it should be avoided
38 # (*) second, when an object needs to be exposed to the xmlrpc layer, we need to convert it into a dict
39 # remember though that writing the resulting dictionary won't change the object
40 # essentially obj.__dict__ would be fine too, except that we want to discard alchemy private keys starting with '_'
41 # 2 ways are provided for that:
44 # the former dict(obj) relies on __iter__() and next() below, and does not rely on the fields names
45 # although it seems to work fine, I've found cases where it issues a weird python error that I could not get right
46 # so the latter obj.todict() seems more reliable but more hacky as is relies on the form of fields, so this can probably be improved
48 # (*) finally for converting a dictionary into an sqlalchemy object, we provide
49 # obj.load_from_dict(dict)
52 class AlchemyObj(Record):
55 self._i = iter(object_mapper(self).columns)
59 n = self._i.next().name
60 return n, getattr(self, n)
62 # # only intended for debugging
63 # def inspect (self, logger, message=""):
64 # logger.info("%s -- Inspecting AlchemyObj -- attrs"%message)
66 # if not k.startswith('_'):
67 # logger.info (" %s: %s"%(k,getattr(self,k)))
68 # logger.info("%s -- Inspecting AlchemyObj -- __dict__"%message)
70 # for (k,v) in d.iteritems():
71 # logger.info("[%s]=%s"%(k,v))
74 ##############################
75 # various kinds of records are implemented as an inheritance hierarchy
76 # RegRecord is the base class for all actual variants
77 # a first draft was using 'type' as the discriminator for the inheritance
78 # but we had to define another more internal column (classtype) so we
79 # accomodate variants in types like authority+am and the like
81 class RegRecord(Base, AlchemyObj):
82 __tablename__ = 'records'
83 record_id = Column(Integer, primary_key=True)
84 # this is the discriminator that tells which class to use
85 classtype = Column(String)
86 # in a first version type was the discriminator
87 # but that could not accomodate for 'authority+sa' and the like
91 authority = Column(String)
92 peer_authority = Column(String)
93 pointer = Column(Integer, default=-1)
94 date_created = Column(DateTime)
95 last_updated = Column(DateTime)
96 # use the 'type' column to decide which subclass the object is of
97 __mapper_args__ = {'polymorphic_on': classtype}
99 fields = ['type', 'hrn', 'gid', 'authority', 'peer_authority']
101 def __init__(self, type=None, hrn=None, gid=None, authority=None, peer_authority=None,
102 pointer=None, dict=None):
108 if isinstance(gid, StringType):
111 self.gid = gid.save_to_string(save_parents=True)
113 self.authority = authority
115 self.peer_authority = peer_authority
117 self.pointer = pointer
119 self.load_from_dict(dict)
122 result = "<Record id=%s, type=%s, hrn=%s, authority=%s" % \
123 (self.record_id, self.type, self.hrn, self.authority)
124 # for extra in ('pointer', 'email', 'name'):
125 # for extra in ('email', 'name'):
126 # displaying names at this point it too dangerous, because of unicode
127 for extra in ('email'):
128 if hasattr(self, extra):
129 result += " {}={},".format(extra, getattr(self, extra))
130 # skip the uniform '--- BEGIN CERTIFICATE --' stuff
132 result += " gid=%s..." % self.gid[28:36]
138 # shortcut - former implem. was record-based
139 def get(self, field, default):
140 return getattr(self, field, default)
143 def validate_gid(self, key, gid):
146 elif isinstance(gid, StringType):
149 return gid.save_to_string(save_parents=True)
151 def validate_datetime(self, key, incoming):
152 if isinstance(incoming, datetime):
154 elif isinstance(incoming, (int, float)):
155 return datetime.fromtimestamp(incoming)
157 logger.info("Cannot validate datetime for key %s with input %s" %
160 @validates('date_created')
161 def validate_date_created(self, key, incoming):
162 return self.validate_datetime(key, incoming)
164 @validates('last_updated')
165 def validate_last_updated(self, key, incoming):
166 return self.validate_datetime(key, incoming)
168 # xxx - there might be smarter ways to handle get/set'ing gid using
170 def get_gid_object(self):
174 return GID(string=self.gid)
176 def just_created(self):
177 now = datetime.utcnow()
178 self.date_created = now
179 self.last_updated = now
181 def just_updated(self):
182 now = datetime.utcnow()
183 self.last_updated = now
185 # cross-relations tables
186 # authority x user (pis) association
187 authority_pi_table = \
188 Table('authority_pi', Base.metadata,
189 Column('authority_id', Integer, ForeignKey(
190 'records.record_id'), primary_key=True),
191 Column('pi_id', Integer, ForeignKey(
192 'records.record_id'), primary_key=True),
194 # slice x user (researchers) association
195 slice_researcher_table = \
196 Table('slice_researcher', Base.metadata,
197 Column('slice_id', Integer, ForeignKey(
198 'records.record_id'), primary_key=True),
199 Column('researcher_id', Integer, ForeignKey(
200 'records.record_id'), primary_key=True),
203 ##############################
204 # all subclasses define a convenience constructor with a default value for type,
205 # and when applicable a way to define local fields in a kwd=value argument
209 class RegAuthority(RegRecord):
210 __tablename__ = 'authorities'
211 __mapper_args__ = {'polymorphic_identity': 'authority'}
212 record_id = Column(Integer, ForeignKey(
213 "records.record_id"), primary_key=True)
214 # extensions come here
215 name = Column('name', String)
216 # extensions come here
217 reg_pis = relationship \
219 secondary=authority_pi_table,
220 primaryjoin=RegRecord.record_id == authority_pi_table.c.authority_id,
221 secondaryjoin=RegRecord.record_id == authority_pi_table.c.pi_id,
222 backref='reg_authorities_as_pi',
225 def __init__(self, **kwds):
226 # handle local settings
228 self.name = kwds.pop('name')
229 # fill in type if not previously set
230 if 'type' not in kwds:
231 kwds['type'] = 'authority'
232 # base class constructor
233 RegRecord.__init__(self, **kwds)
235 # no proper data yet, just hack the typename
237 result = RegRecord.__repr__(self).replace("Record", "Authority")
238 # here again trying to display names that can be utf8 is too dangerous
239 # result.replace(">", " name={}>".format(self.name))
242 def update_pis(self, pi_hrns, dbsession):
243 # strip that in case we have <researcher> words </researcher>
244 pi_hrns = [x.strip() for x in pi_hrns]
245 request = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns))
246 logger.info("RegAuthority.update_pis: %d incoming pis, %d matches found"
247 % (len(pi_hrns), request.count()))
248 pis = dbsession.query(RegUser).filter(RegUser.hrn.in_(pi_hrns)).all()
254 class RegSlice(RegRecord):
255 __tablename__ = 'slices'
256 __mapper_args__ = {'polymorphic_identity': 'slice'}
257 record_id = Column(Integer, ForeignKey(
258 "records.record_id"), primary_key=True)
259 # extensions come here
260 reg_researchers = relationship \
262 secondary=slice_researcher_table,
263 primaryjoin=RegRecord.record_id == slice_researcher_table.c.slice_id,
264 secondaryjoin=RegRecord.record_id == slice_researcher_table.c.researcher_id,
265 backref='reg_slices_as_researcher',
268 def __init__(self, **kwds):
269 if 'type' not in kwds:
270 kwds['type'] = 'slice'
271 RegRecord.__init__(self, **kwds)
274 return RegRecord.__repr__(self).replace("Record", "Slice")
276 def update_researchers(self, researcher_hrns, dbsession):
277 # strip that in case we have <researcher> words </researcher>
278 researcher_hrns = [x.strip() for x in researcher_hrns]
279 request = dbsession.query(RegUser).filter(
280 RegUser.hrn.in_(researcher_hrns))
281 logger.info("RegSlice.update_researchers: %d incoming researchers, %d matches found"
282 % (len(researcher_hrns), request.count()))
283 researchers = dbsession.query(RegUser).filter(
284 RegUser.hrn.in_(researcher_hrns)).all()
285 self.reg_researchers = researchers
287 # when dealing with credentials, we need to retrieve the PIs attached to a slice
288 # WARNING: with the move to passing dbsessions around, we face a glitch here because this
289 # helper function is called from the trust/ area that
291 from sqlalchemy.orm import sessionmaker
292 Session = sessionmaker()
293 dbsession = Session.object_session(self)
294 from sfa.util.xrn import get_authority
295 authority_hrn = get_authority(self.hrn)
296 auth_record = dbsession.query(
297 RegAuthority).filter_by(hrn=authority_hrn).first()
298 return auth_record.reg_pis
300 @validates('expires')
301 def validate_expires(self, key, incoming):
302 return self.validate_datetime(key, incoming)
307 class RegNode(RegRecord):
308 __tablename__ = 'nodes'
309 __mapper_args__ = {'polymorphic_identity': 'node'}
310 record_id = Column(Integer, ForeignKey(
311 "records.record_id"), primary_key=True)
313 def __init__(self, **kwds):
314 if 'type' not in kwds:
315 kwds['type'] = 'node'
316 RegRecord.__init__(self, **kwds)
319 return RegRecord.__repr__(self).replace("Record", "Node")
324 class RegUser(RegRecord):
325 __tablename__ = 'users'
326 # these objects will have type='user' in the records table
327 __mapper_args__ = {'polymorphic_identity': 'user'}
328 record_id = Column(Integer, ForeignKey(
329 "records.record_id"), primary_key=True)
330 # extensions come here
331 email = Column('email', String)
332 # can't use name 'keys' here because when loading from xml we're getting
333 # a 'keys' tag, and assigning a list of strings in a reference column like
335 reg_keys = relationship \
336 ('RegKey', backref='reg_user',
337 cascade="all, delete, delete-orphan",
340 # so we can use RegUser (email=.., hrn=..) and the like
341 def __init__(self, **kwds):
342 # handle local settings
344 self.email = kwds.pop('email')
345 if 'type' not in kwds:
346 kwds['type'] = 'user'
347 RegRecord.__init__(self, **kwds)
349 # append stuff at the end of the record __repr__
351 result = RegRecord.__repr__(self).replace("Record", "User")
352 result.replace(">", " email={}>".format(self.email))
356 def validate_email(self, key, address):
357 assert '@' in address
361 # xxx tocheck : not sure about eager loading of this one
362 # meaning, when querying the whole records, we expect there should
363 # be a single query to fetch all the keys
364 # or, is it enough that we issue a single query to retrieve all the keys
368 __tablename__ = 'keys'
369 key_id = Column(Integer, primary_key=True)
370 record_id = Column(Integer, ForeignKey("records.record_id"))
372 pointer = Column(Integer, default=-1)
374 def __init__(self, key, pointer=None):
377 self.pointer = pointer
380 result = "<key id=%s key=%s..." % (self.key_id, self.key[8:16],)
382 result += " user=%s" % self.reg_user.record_id
389 class SliverAllocation(Base, AlchemyObj):
390 __tablename__ = 'sliver_allocation'
391 sliver_id = Column(String, primary_key=True)
392 client_id = Column(String)
393 component_id = Column(String)
394 slice_urn = Column(String)
395 allocation_state = Column(String)
397 def __init__(self, **kwds):
398 if 'sliver_id' in kwds:
399 self.sliver_id = kwds['sliver_id']
400 if 'client_id' in kwds:
401 self.client_id = kwds['client_id']
402 if 'component_id' in kwds:
403 self.component_id = kwds['component_id']
404 if 'slice_urn' in kwds:
405 self.slice_urn = kwds['slice_urn']
406 if 'allocation_state' in kwds:
407 self.allocation_state = kwds['allocation_state']
410 result = "<sliver_allocation sliver_id=%s allocation_state=%s"\
411 % (self.sliver_id, self.allocation_state)
414 @validates('allocation_state')
415 def validate_allocation_state(self, key, state):
416 allocation_states = ['geni_unallocated',
417 'geni_allocated', 'geni_provisioned']
418 assert state in allocation_states
422 def set_allocations(sliver_ids, state, dbsession):
423 if not isinstance(sliver_ids, list):
424 sliver_ids = [sliver_ids]
425 sliver_state_updated = {}
426 constraint = SliverAllocation.sliver_id.in_(sliver_ids)
427 sliver_allocations = dbsession.query(
428 SliverAllocation).filter(constraint)
429 sliver_ids_found = []
430 for sliver_allocation in sliver_allocations:
431 sliver_allocation.allocation_state = state
432 sliver_ids_found.append(sliver_allocation.sliver_id)
434 # Some states may not have been updated becuase no sliver allocation state record
435 # exists for the sliver. Insert new allocation records for these slivers and set
436 # it to geni_allocated.
437 sliver_ids_not_found = set(sliver_ids).difference(sliver_ids_found)
438 for sliver_id in sliver_ids_not_found:
439 record = SliverAllocation(
440 sliver_id=sliver_id, allocation_state=state)
441 dbsession.add(record)
445 def delete_allocations(sliver_ids, dbsession):
446 if not isinstance(sliver_ids, list):
447 sliver_ids = [sliver_ids]
448 constraint = SliverAllocation.sliver_id.in_(sliver_ids)
449 sliver_allocations = dbsession.query(
450 SliverAllocation).filter(constraint)
451 for sliver_allocation in sliver_allocations:
452 dbsession.delete(sliver_allocation)
455 def sync(self, dbsession):
456 constraints = [SliverAllocation.sliver_id == self.sliver_id]
457 results = dbsession.query(SliverAllocation).filter(and_(*constraints))
459 for result in results:
460 records.append(result)
466 record.sliver_id = self.sliver_id
467 record.client_id = self.client_id
468 record.component_id = self.component_id
469 record.slice_urn = self.slice_urn
470 record.allocation_state = self.allocation_state
474 ##############################
475 # although the db needs of course to be reachable for the following functions
476 # the schema management functions are here and not in alchemy
477 # because the actual details of the classes need to be known
478 # migrations: this code has no notion of the previous versions
479 # of the data model nor of migrations
480 # sfa.storage.migrations.db_init uses this when starting from
482 def init_tables(engine):
483 logger.info("Initializing db schema from current/latest model")
484 Base.metadata.create_all(engine)
487 def drop_tables(engine):
488 logger.info("Dropping tables from current/latest model")
489 Base.metadata.drop_all(engine)
491 ##############################
492 # create a record of the right type from either a dict or an xml string
495 def make_record(dict=None, xml=""):
499 return make_record_dict(dict)
501 return make_record_xml(xml)
503 raise Exception("make_record has no input")
505 # convert an incoming record - typically from xmlrpc - into an object
508 def make_record_dict(record_dict):
509 assert ('type' in record_dict)
510 type = record_dict['type'].split('+')[0]
511 if type == 'authority':
512 result = RegAuthority(dict=record_dict)
514 result = RegUser(dict=record_dict)
515 elif type == 'slice':
516 result = RegSlice(dict=record_dict)
518 result = RegNode(dict=record_dict)
520 logger.debug("Untyped RegRecord instance")
521 result = RegRecord(dict=record_dict)
522 logger.info("converting dict into Reg* with type=%s" % type)
523 logger.info("returning=%s" % result)
525 # register non-db attributes in an extensions field
529 def make_record_xml(xml_str):
531 xml_dict = xml.todict()
532 logger.info("load from xml, keys=%s" % xml_dict.keys())
533 return make_record_dict(xml_dict)
536 # augment local records with data from builtin relationships
537 # expose related objects as a list of hrns
538 # we pick names that clearly won't conflict with the ones used in the old approach,
539 # were the relationships data came from the testbed side
540 # for each type, a dict of the form {<field-name-exposed-in-record>:<alchemy_accessor_name>}
541 # so after that, an 'authority' record will e.g. have a 'reg-pis' field
542 # with the hrns of its pi-users
543 augment_map = {'authority': {'reg-pis': 'reg_pis', },
544 'slice': {'reg-researchers': 'reg_researchers', },
545 'user': {'reg-pi-authorities': 'reg_authorities_as_pi',
546 'reg-slices': 'reg_slices_as_researcher', },
551 # the way we use sqlalchemy might be a little wrong
552 # in any case what has been observed is that (Reg)Records as returned by an sqlalchemy
553 # query not always have their __dict__ properly adjusted
554 # typically a RegAuthority object would have its object.name set properly, but
555 # object.__dict__ has no 'name' key
556 # which is an issue because we rely on __dict__ for many things, in particular this
557 # is what gets exposed to the drivers (this is historical and dates back before sqlalchemy)
558 # so it is recommended to always run this function that will make sure
559 # that such built-in fields are properly set in __dict__ too
561 def augment_with_sfa_builtins(local_record):
562 # don't ruin the import of that file in a client world
563 from sfa.util.xrn import Xrn
565 setattr(local_record, 'reg-urn',
566 Xrn(xrn=local_record.hrn, type=local_record.type).urn)
567 # users have keys and this is needed to synthesize 'users' sent over to
570 if local_record.type == 'user':
571 user_keys = [key.key for key in local_record.reg_keys]
572 setattr(local_record, 'reg-keys', user_keys)
573 fields_to_check = ['email']
574 elif local_record.type == 'authority':
575 fields_to_check = ['name']
576 for field in fields_to_check:
577 if not field in local_record.__dict__:
578 logger.debug("augment_with_sfa_builtins: hotfixing missing '{}' in {}"
579 .format(field, local_record.hrn))
580 local_record.__dict__[field] = getattr(local_record, field)
581 # search in map according to record type
582 type_map = augment_map.get(local_record.type, {})
583 # use type-dep. map to do the job
584 for (field_name, attribute) in type_map.items():
585 # get related objects
586 related_records = getattr(local_record, attribute, [])
587 hrns = [r.hrn for r in related_records]
588 setattr(local_record, field_name, hrns)