2 pkg_resources.require("SQLAlchemy>=0.3.10")
3 pkg_resources.require("Elixir>=0.4.0")
4 # import the basic Elixir classes and functions for declaring the data model
5 # (see http://elixir.ematia.de/trac/wiki/TutorialDivingIn)
6 from elixir import EntityMeta, Entity, Field, OneToMany, ManyToOne, ManyToMany
7 from elixir import options_defaults, using_options, setup_all, entities
8 # import some datatypes for table columns from Elixir
9 # (see http://www.sqlalchemy.org/docs/04/types.html for more)
10 from elixir import String, Unicode, Integer, DateTime
11 from sqlalchemy import ColumnDefault
12 from sqlalchemy import Table
13 from sqlalchemy.orm import ColumnProperty, object_session
15 from xml.marshal.generic import Marshaller
16 from xml.dom.ext import PrettyPrint
17 from xml.dom.ext.reader.Sax import FromXml
18 from elementtree import ElementTree
20 options_defaults['autosetup'] = False
22 from elixir.statements import Statement
23 from sqlalchemy import Sequence
28 print "WARNING: no defines.py available"
30 from monitor.database.dborm import zab_metadata, zab_session
32 __metadata__ = zab_metadata
33 __session__ = zab_session
36 # - declare association between Media and MediaType so that look ups can
37 # occur on 'description'
39 class ZabbixSerialize(object):
42 def xmlDeserialize(cls, xml):
44 return cls.dict2object(d)
46 def xmlSerialize(self, elem=None):
47 dict = self.convert_dict(self.to_dict())
49 if hasattr(self, 'deepcopy'):
50 for val in self.deepcopy:
51 dict[val] = getattr(self, val)
53 skip_keys = [self._descriptor.auto_primarykey]
54 if hasattr(self, 'skip_keys'):
55 skip_keys += self.skip_keys
57 return self.xmlMessage(dict, skip_keys, elem)
60 def xmlMessage(cls, dict=None, skip_keys=[], use_elem=None):
62 elem = ElementTree.Element(cls.classname())
64 if isinstance(dict, type({})):
65 for key, value in dict.items():
69 if isinstance(value, type(0)):
70 ElementTree.SubElement(elem, key, type="int").text = str(value)
72 elif isinstance(value, type(0L)):
73 ElementTree.SubElement(elem, key, type="long").text = str(value)
75 elif isinstance(value, type([])):
77 e = ElementTree.SubElement(elem, key, type="list")
79 d = obj.convert_dict(obj.to_dict())
82 ElementTree.SubElement(elem, key).text = value
84 elif isinstance(dict, type([])):
87 key = "%s_list" % o.__class__.__name__.lower()
88 e = ElementTree.SubElement(elem, key, type="list")
90 d = obj.convert_dict(obj.to_dict())
93 if use_elem is not None:
96 return ElementTree.tostring(elem)
99 def xml2dict(cls, message, elem=None):
100 em = get_zabbix_entitymap()
102 if message and elem is None:
103 elem = ElementTree.XML(message)
105 raise Exception("Cannot proceed with empty xml, and no elem")
107 #print "tag: %s : classname : %s" % (elem.tag, cls.classname())
108 if cls is not ZabbixSerialize:
109 assert elem.tag == cls.classname()
112 if elem.get("type") == "int":
113 dict[elem.tag] = int(elem.text)
114 elif elem.get("type") == "long":
115 dict[elem.tag] = long(elem.text)
116 elif elem.get("type") == "list":
117 if cls is not ZabbixSerialize:
118 assert elem.tag in cls.deepcopy, "List (%s) in XML is not a recognized type for this object (%s)" % (elem.tag, cls.classname())
121 dict[elem.tag].append( em[e.tag].xml2dict(None, e) )
122 elif elem.text is None:
125 dict[elem.tag] = elem.text
129 def dict2object(cls, dict):
130 em = get_zabbix_entitymap()
131 if cls is ZabbixSerialize:
132 # note: assume that there's only one type of class
134 for key in dict.keys():
135 clsobj = get_zabbix_class_from_name(key)
136 retdict[key] = [ clsobj.dict2object(data) for data in dict[key] ]
139 # take deepcopy values out of dict.
141 if hasattr(cls, 'deepcopy'):
142 for val in cls.deepcopy:
144 backup[val] = dict[val]
148 # for each deepcopy object, convert all values in list
149 for k in backup.keys():
150 clsobj = get_zabbix_class_from_name(k)
151 l = [ clsobj.dict2object(data) for data in backup[k] ]
154 # find or create the primary object
155 obj = cls.find_or_create(**dict)
156 #if cls is DiscoveryCheck or \
157 # cls is ActionCondition or \
158 # cls is ActionOperation:
159 # # NOTE: Some objects should always be created. like DiscoveryCheck
162 # obj = cls.get_by(**dict)
165 # print "CREATING NEW %s" % cls.classname()
168 # print "FOUND EXISTING OBJECT: %s"% obj
170 # add deepcopy values to primary object
171 for k in backup.keys():
172 print type(backup[k][0])
174 if isinstance(obj, User) and isinstance(backup[k][0], UsrGrp):
175 print "adding groups to user"
179 elif isinstance(obj, User) and isinstance(backup[k][0], Media):
180 print "adding media to user"
182 obj.media_list.append(g)
184 elif isinstance(obj, UsrGrp) and isinstance(backup[k][0], HostGroup):
185 print "adding hostgroup to usergroup"
186 print "NOT IMPLEMENTED!!!"
188 obj.append_hostgroup(g)
191 elif isinstance(obj, Action) and isinstance(backup[k][0], ActionCondition):
192 print "adding actionconditon to action"
194 obj.actioncondition_list.append(g)
196 elif isinstance(obj, Action) and isinstance(backup[k][0], ActionOperation):
197 print "adding actionoperation to action"
199 obj.actionoperation_list.append(g)
201 elif isinstance(obj, ActionOperation) and \
202 isinstance(backup[k][0], OperationCondition):
203 print "adding operationcondition to actionoperation"
205 obj.operationcondition_list.append(g)
207 elif isinstance(obj, DiscoveryRule) and isinstance(backup[k][0], DiscoveryCheck):
208 print "adding discoverycheck to discoveryrule"
210 obj.discoverycheck_list.append(v)
214 def convert_dict(self, d):
217 if type(d[key]) == type([]):
218 rd[str(key)] = [ self.convert_dict(v) for v in d[key] ]
220 rd[str(key)] = d[key]
227 def prettyserialize(self):
228 xml = self.xmlSerialize()
232 class ZabbixEntity(ZabbixSerialize):
233 __metaclass__ = EntityMeta
235 def __init__(self, **kwargs):
236 print "__INIT__ %s" % self.classname()
237 tablename = self._descriptor.tablename
238 fieldname = self._descriptor.auto_primarykey
239 index = IDs.get_by(table_name=tablename, field_name=fieldname)
241 print "NEW IDs index INSIDE INIT"
242 index = IDs(table_name=tablename, field_name=fieldname, nodeid=0, nextid=10)
244 index.nextid = index.nextid + 1
245 kwargs[fieldname] = index.nextid
250 if hasattr(self, 'deepcopy'):
251 for k in self.deepcopy:
252 rd[k] = [ str(v) for v in getattr(self, k) ]
254 rd.update(self.to_dict())
261 return self.classname() + "(" + val + ")"
267 def set(self, **kwargs):
268 for key, value in kwargs.iteritems():
269 setattr(self, key, value)
272 def find_or_create(cls, exec_if_new=None, set_if_new={}, **kwargs):
273 if cls is DiscoveryCheck or cls is ActionCondition or \
274 cls is ActionOperation:
275 # NOTE: Some objects should always be created. like DiscoveryCheck
278 # NOTE: ignore *_list items
281 if "_list" not in key:
282 query[key] = kwargs[key]
283 print "SEARCHING USING %s" % query
284 obj = cls.get_by(**query)
287 print "CREATING NEW %s" % cls.classname()
288 print "USING %s" % kwargs
290 obj.set(**set_if_new)
294 print "FOUND EXISTING OBJECT: %s"% obj
298 def update_or_create(cls, data, surrogate=True):
299 pk_props = cls._descriptor.primary_key_properties
301 # if all pk are present and not None
302 if not [1 for p in pk_props if data.get(p.key) is None]:
303 pk_tuple = tuple([data[prop.key] for prop in pk_props])
304 record = cls.query.get(pk_tuple)
307 raise Exception("cannot create surrogate with pk")
314 raise Exception("cannot create non surrogate without pk")
315 record.from_dict(data)
317 update_or_create = classmethod(update_or_create)
319 def from_dict(self, data):
321 Update a mapped class with data from a JSON-style nested dict/list
324 # surrogate can be guessed from autoincrement/sequence but I guess
325 # that's not 100% reliable, so we'll need an override
327 mapper = sqlalchemy.orm.object_mapper(self)
329 for key, value in data.iteritems():
330 if isinstance(value, dict):
331 dbvalue = getattr(self, key)
332 rel_class = mapper.get_property(key).mapper.class_
333 pk_props = rel_class._descriptor.primary_key_properties
335 # If the data doesn't contain any pk, and the relationship
336 # already has a value, update that record.
337 if not [1 for p in pk_props if p.key in data] and \
339 dbvalue.from_dict(value)
341 record = rel_class.update_or_create(value)
342 setattr(self, key, record)
343 elif isinstance(value, list) and \
344 value and isinstance(value[0], dict):
346 rel_class = mapper.get_property(key).mapper.class_
349 if not isinstance(row, dict):
351 'Cannot send mixed (dict/non dict) data '
352 'to list relationships in from_dict data.')
353 record = rel_class.update_or_create(row)
354 new_attr_value.append(record)
355 setattr(self, key, new_attr_value)
357 setattr(self, key, value)
359 def to_dict(self, deep={}, exclude=[]):
360 """Generate a JSON-style nested dict/list structure from an object."""
361 col_prop_names = [p.key for p in self.mapper.iterate_properties \
362 if isinstance(p, ColumnProperty)]
363 data = dict([(name, getattr(self, name))
364 for name in col_prop_names if name not in exclude])
365 for rname, rdeep in deep.iteritems():
366 dbdata = getattr(self, rname)
367 #FIXME: use attribute names (ie coltoprop) instead of column names
368 fks = self.mapper.get_property(rname).remote_side
369 exclude = [c.name for c in fks]
370 if isinstance(dbdata, list):
371 data[rname] = [o.to_dict(rdeep, exclude) for o in dbdata]
373 data[rname] = dbdata.to_dict(rdeep, exclude)
377 def flush(self, *args, **kwargs):
378 return object_session(self).flush([self], *args, **kwargs)
380 def delete(self, *args, **kwargs):
381 return object_session(self).delete(self, *args, **kwargs)
383 def expire(self, *args, **kwargs):
384 return object_session(self).expire(self, *args, **kwargs)
386 def refresh(self, *args, **kwargs):
387 return object_session(self).refresh(self, *args, **kwargs)
389 def expunge(self, *args, **kwargs):
390 return object_session(self).expunge(self, *args, **kwargs)
392 # This bunch of session methods, along with all the query methods below
393 # only make sense when using a global/scoped/contextual session.
394 def _global_session(self):
395 return self._descriptor.session.registry()
396 _global_session = property(_global_session)
398 def merge(self, *args, **kwargs):
399 return self._global_session.merge(self, *args, **kwargs)
401 def save(self, *args, **kwargs):
402 return self._global_session.save(self, *args, **kwargs)
404 def update(self, *args, **kwargs):
405 return self._global_session.update(self, *args, **kwargs)
407 # only exist in SA < 0.5
408 # IMO, the replacement (session.add) doesn't sound good enough to be added
409 # here. For example: "o = Order(); o.add()" is not very telling. It's
410 # better to leave it as "session.add(o)"
411 def save_or_update(self, *args, **kwargs):
412 return self._global_session.save_or_update(self, *args, **kwargs)
415 def get_by(cls, *args, **kwargs):
416 return cls.query.filter_by(*args, **kwargs).first()
417 get_by = classmethod(get_by)
419 def get(cls, *args, **kwargs):
420 return cls.query.get(*args, **kwargs)
421 get = classmethod(get)
429 class Escalation(ZabbixEntity):
431 tablename='escalations',
433 auto_primarykey='escalationid'
436 class Event(ZabbixEntity):
440 auto_primarykey='eventid'
443 class Item(ZabbixEntity):
447 auto_primarykey='itemid'
450 class Acknowledge(ZabbixEntity):
452 tablename='acknowledges',
454 auto_primarykey='acknowledgeid'
457 class Trigger(ZabbixEntity):
459 tablename='triggers',
461 auto_primarykey='triggerid'
465 class Right(ZabbixEntity):
466 # rights of a usergroup to interact with hosts of a hostgroup
470 auto_primarykey='rightid',
472 # column groupid is an index to usrgrp.usrgrpid
473 # column id is an index into the host-groups.groupid
474 # permission is 3=rw, 2=ro, 1=r_list, 0=deny
476 # TODO: NOTE: When serialization occurs, the 'permissions' field is lost,
477 # currently since the rights table is merely treated as an intermediate
478 # table for the m2m between usrgrp and groups.
480 rights = Table('rights', __metadata__, autoload=True)
481 hostsgroups = Table('hosts_groups', __metadata__, autoload=True)
482 hoststemplates = Table('hosts_templates', __metadata__, autoload=True)
485 # m2m table between hosts and groups below
486 class HostsGroups(ZabbixEntity):
488 tablename='hosts_groups',
490 auto_primarykey='hostgroupid',
493 class HostsTemplates(ZabbixEntity):
495 tablename='hosts_templates',
497 auto_primarykey='hosttemplateid',
500 class Host(ZabbixEntity):
504 auto_primarykey='hostid',
506 hostgroup_list = ManyToMany(
509 foreign_keys=lambda: [hostsgroups.c.groupid, hostsgroups.c.hostid],
510 primaryjoin=lambda: Host.hostid==hostsgroups.c.hostid,
511 secondaryjoin=lambda: HostGroup.groupid==hostsgroups.c.groupid,
513 template_list = ManyToMany(
515 table=hoststemplates,
516 foreign_keys=lambda: [hoststemplates.c.hostid, hoststemplates.c.templateid],
517 primaryjoin=lambda: Host.hostid==hoststemplates.c.hostid,
518 secondaryjoin=lambda: Host.hostid==hoststemplates.c.templateid,
521 def append_template(self, template):
522 row = HostsTemplates(hostid=self.hostid, templateid=template.hostid)
525 def remove_template(self, template):
526 row = HostsTemplates.get_by(hostid=self.hostid, templateid=template.hostid)
531 # NOTE: media objects are automatically handled.
532 hosts_templates_match = HostsTemplates.query.filter_by(hostid=self.hostid).all()
533 for row in hosts_templates_match:
536 hosts_groups_match = HostsGroups.query.filter_by(hostid=self.hostid).all()
537 for row in hosts_groups_match:
539 super(Host, self).delete()
541 class HostGroup(ZabbixEntity):
545 auto_primarykey='groupid',
547 usrgrp_list = ManyToMany(
550 foreign_keys=lambda: [rights.c.groupid, rights.c.id],
551 primaryjoin=lambda: HostGroup.groupid==rights.c.id,
552 secondaryjoin=lambda: UsrGrp.usrgrpid==rights.c.groupid,
554 host_list = ManyToMany(
557 foreign_keys=lambda: [hostsgroups.c.groupid, hostsgroups.c.hostid],
558 primaryjoin=lambda: HostGroup.groupid==hostsgroups.c.groupid,
559 secondaryjoin=lambda: Host.hostid==hostsgroups.c.hostid,
562 # NOTE: media objects are automatically handled.
563 hosts_groups_match = HostsGroups.query.filter_by(groupid=self.groupid).all()
564 for row in hosts_groups_match:
566 super(HostGroup, self).delete()
568 class UsersGroups(ZabbixEntity):
570 tablename='users_groups',
572 auto_primarykey='id',
575 class MediaType(ZabbixEntity):
577 tablename='media_type',
579 auto_primarykey='mediatypeid',
582 class Script(ZabbixEntity):
586 auto_primarykey='scriptid',
590 # DISCOVERY ################################################3
592 class DiscoveryCheck(ZabbixEntity):
596 auto_primarykey='dcheckid',
598 skip_keys = ['druleid']
599 discoveryrule = ManyToOne('DiscoveryRule',
600 primaryjoin=lambda: DiscoveryCheck.druleid == DiscoveryRule.druleid,
601 foreign_keys=lambda: [DiscoveryCheck.druleid],
604 class DiscoveryRule(ZabbixEntity): # parent of dchecks
608 auto_primarykey='druleid',
610 deepcopy = ['discoverycheck_list']
611 discoverycheck_list = OneToMany('DiscoveryCheck', cascade='all, delete-orphan',
612 primaryjoin=lambda: DiscoveryCheck.druleid == DiscoveryRule.druleid,
613 foreign_keys=lambda: [DiscoveryCheck.druleid])
615 discoveredhost_list = OneToMany('DiscoveredHost', cascade='all, delete-orphan',
616 primaryjoin=lambda: DiscoveredHost.druleid == DiscoveryRule.druleid,
617 foreign_keys=lambda: [DiscoveredHost.druleid])
619 class DiscoveredHost(ZabbixEntity):
623 auto_primarykey='dhostid',
625 discoveryrule = ManyToOne('DiscoveryRule',
626 primaryjoin=lambda: DiscoveredHost.druleid == DiscoveryRule.druleid,
627 foreign_keys=lambda: [DiscoveredHost.druleid],
630 discoveryservice_list = OneToMany('DiscoveryService', cascade='all, delete-orphan',
631 primaryjoin=lambda: DiscoveryService.dhostid== DiscoveredHost.dhostid,
632 foreign_keys=lambda: [DiscoveryService.dhostid],)
634 class DiscoveryService(ZabbixEntity):
636 tablename='dservices',
638 auto_primarykey='dserviceid',
640 discoveryrule = ManyToOne('DiscoveredHost',
641 primaryjoin=lambda: DiscoveryService.dhostid== DiscoveredHost.dhostid,
642 foreign_keys=lambda: [DiscoveryService.dhostid],
646 # ACTIONS ################################################3
648 class ActionOperation(ZabbixEntity):
650 tablename='operations', autoload=True, auto_primarykey='operationid',
652 deepcopy = ['operationcondition_list']
653 skip_keys = ['actionid']
654 action = ManyToOne('Action', ondelete='cascade',
655 primaryjoin=lambda: ActionOperation.actionid == Action.actionid,
656 foreign_keys=lambda: [ActionOperation.actionid])
658 operationcondition_list = OneToMany('OperationCondition', cascade='all, delete-orphan',
659 primaryjoin=lambda: OperationCondition.operationid == ActionOperation.operationid,
660 foreign_keys=lambda: [OperationCondition.operationid])
662 class OperationCondition(ZabbixEntity):
664 tablename='opconditions', autoload=True, auto_primarykey='opconditionid',
666 skip_keys = ['operationid']
667 actionoperation = ManyToOne('ActionOperation', ondelete='cascade',
668 primaryjoin=lambda: OperationCondition.operationid == ActionOperation.operationid,
669 foreign_keys=lambda: [OperationCondition.operationid])
671 class ActionCondition(ZabbixEntity):
673 tablename='conditions', autoload=True, auto_primarykey='conditionid',
675 skip_keys = ['actionid']
676 action = ManyToOne('Action', ondelete='cascade',
677 primaryjoin=lambda: ActionCondition.actionid == Action.actionid,
678 foreign_keys=lambda: [ActionCondition.actionid])
680 class Action(ZabbixEntity):
682 tablename='actions', autoload=True, auto_primarykey='actionid',
684 deepcopy = ['actionoperation_list', 'actioncondition_list']
685 actionoperation_list = OneToMany('ActionOperation', cascade='all, delete-orphan',
686 primaryjoin=lambda: ActionOperation.actionid == Action.actionid,
687 foreign_keys=lambda: [ActionOperation.actionid])
689 actioncondition_list = OneToMany('ActionCondition', cascade='all, delete-orphan',
690 primaryjoin=lambda: ActionCondition.actionid == Action.actionid,
691 foreign_keys=lambda: [ActionCondition.actionid])
693 # USERS & EMAIL MEDIA ################################################3
695 class Media(ZabbixEntity):
699 auto_primarykey='mediaid',
701 skip_keys = ['userid']
702 user = ManyToOne('User',
703 primaryjoin=lambda: Media.userid == User.userid,
704 foreign_keys=lambda: [Media.userid],
707 users_groups = Table('users_groups', __metadata__, autoload=True)
709 class User(ZabbixEntity): # parent of media
713 auto_primarykey='userid',
715 deepcopy = ['media_list', 'usrgrp_list']
716 media_list = OneToMany('Media',
717 primaryjoin=lambda: Media.userid == User.userid,
718 foreign_keys=lambda: [Media.userid],
719 cascade='all, delete-orphan')
721 # READ-ONLY: do not append or remove groups here.
722 usrgrp_list = ManyToMany('UsrGrp',
724 foreign_keys=lambda: [users_groups.c.userid, users_groups.c.usrgrpid],
725 primaryjoin=lambda: User.userid==users_groups.c.userid,
726 secondaryjoin=lambda: UsrGrp.usrgrpid==users_groups.c.usrgrpid)
729 # NOTE: media objects are automatically handled.
730 users_groups_match = UsersGroups.query.filter_by(userid=self.userid).all()
731 for row in users_groups_match:
733 super(User, self).delete()
735 def append_group(self, group):
736 ug_row = UsersGroups(usrgrpid=group.usrgrpid, userid=self.userid)
739 def remove_group(self, group):
740 ug_row = UsersGroups.get_by(usrgrpid=group.usrgrpid, userid=self.userid)
741 if ug_row is not None:
745 class UsrGrp(ZabbixEntity):
749 auto_primarykey='usrgrpid',
751 deepcopy= ['hostgroup_list']
753 user_list = ManyToMany(
756 foreign_keys=lambda: [users_groups.c.userid, users_groups.c.usrgrpid],
757 secondaryjoin=lambda: User.userid==users_groups.c.userid,
758 primaryjoin=lambda: UsrGrp.usrgrpid==users_groups.c.usrgrpid,
761 hostgroup_list = ManyToMany(
764 foreign_keys=lambda: [rights.c.groupid, rights.c.id],
765 primaryjoin=lambda: UsrGrp.usrgrpid==rights.c.groupid,
766 secondaryjoin=lambda: HostGroup.groupid==rights.c.id,
770 rights_match = Right.query.filter_by(groupid=self.usrgrpid).all()
771 for row in rights_match:
774 users_groups_match = UsersGroups.query.filter_by(usrgrpid=self.usrgrpid).all()
775 for row in users_groups_match:
778 super(UsrGrp, self).delete()
780 def append_hostgroup(self, hg):
781 # NOTE: I know it looks wrong, but this is how the keys are mapped.
782 print "APPENDING HOSTGROUP %s!!!!!!!!!!" % hg.name
783 ug_row = Right(groupid=self.usrgrpid, id=hg.groupid, permission=3)
787 def append_user(self, user):
788 ug_row = UsersGroups(userid=user.userid, usrgrpid=self.usrgrpid)
792 def remove_user(self, user):
793 ug_row = UsersGroups.get_by(userid=user.userid, usrgrpid=self.usrgrpid)
794 if ug_row is not None:
800 'scripts' : 'scriptid',
801 'usrgrp' : 'usrgrpid',
804 'users_groups' : 'id',
805 'groups' : 'groupid',
806 'rights' : 'rightid',
807 'drules' : 'druleid',
808 'dchecks' : 'dcheckid',
809 'actions' : 'actionid',
810 'conditions' : 'conditionid',
811 'operations' : 'operationid',
812 'opconditions' : 'opconditionid',
814 need_to_flush = False
816 for tablename in fields.keys():
817 fieldname = fields[tablename]
819 index = IDs.get_by(table_name=tablename, field_name=fieldname)
821 print "NEW IDs index INSIDE confirm_ids"
822 index = IDs(table_name=tablename, field_name=fieldname, nodeid=0, nextid=10)
833 def get_zabbix_class_from_name(name):
834 em = get_zabbix_entitymap()
837 name=name[:-5] # strip off the _list part.
840 if name == k.lower():
844 def get_zabbix_entitymap():
846 for n,c in zip([ u.__name__ for u in entities], entities):
850 # COMMON OBJECT TYPES
851 class OperationConditionNotAck(object):
853 o = OperationCondition(
854 conditiontype=defines.CONDITION_TYPE_EVENT_ACKNOWLEDGED,
855 operator=defines.CONDITION_OPERATOR_EQUAL,