2 pkg_resources.require("SQLAlchemy>=0.3.10")
3 pkg_resources.require("Elixir>=0.4.0")
4 # import the basic Elixir classes and functions for declaring the data model
5 # (see http://elixir.ematia.de/trac/wiki/TutorialDivingIn)
6 from elixir import EntityMeta, Entity, Field, OneToMany, ManyToOne, ManyToMany
7 from elixir import options_defaults, using_options, setup_all, metadata, entities
8 # import some datatypes for table columns from Elixir
9 # (see http://www.sqlalchemy.org/docs/04/types.html for more)
10 from elixir import String, Unicode, Integer, DateTime
11 from sqlalchemy import ColumnDefault
12 from sqlalchemy import Table
13 from sqlalchemy.orm import ColumnProperty, object_session
15 from xml.marshal.generic import Marshaller
16 from xml.dom.ext import PrettyPrint
17 from xml.dom.ext.reader.Sax import FromXml
18 from elementtree import ElementTree
20 options_defaults['autosetup'] = False
22 from elixir.statements import Statement
23 from sqlalchemy import Sequence
28 #from elixir import metadata
29 #from monitor.database.dborm import zabbix_db, zabbix_session
30 #__metadata__ = zabbix_db
31 #__session__ = zabbix_session
34 # - declare association between Media and MediaType so that look ups can
35 # occur on 'description'
37 class ZabbixSerialize(object):
40 def xmlDeserialize(cls, xml):
42 return cls.dict2object(d)
44 def xmlSerialize(self, elem=None):
45 dict = self.convert_dict(self.to_dict())
47 if hasattr(self, 'deepcopy'):
48 for val in self.deepcopy:
49 dict[val] = getattr(self, val)
51 skip_keys = [self._descriptor.auto_primarykey]
52 if hasattr(self, 'skip_keys'):
53 skip_keys += self.skip_keys
55 return self.xmlMessage(dict, skip_keys, elem)
58 def xmlMessage(cls, dict=None, skip_keys=[], use_elem=None):
60 elem = ElementTree.Element(cls.classname())
62 if isinstance(dict, type({})):
63 for key, value in dict.items():
67 if isinstance(value, type(0)):
68 ElementTree.SubElement(elem, key, type="int").text = str(value)
70 elif isinstance(value, type(0L)):
71 ElementTree.SubElement(elem, key, type="long").text = str(value)
73 elif isinstance(value, type([])):
75 e = ElementTree.SubElement(elem, key, type="list")
77 d = obj.convert_dict(obj.to_dict())
80 ElementTree.SubElement(elem, key).text = value
82 elif isinstance(dict, type([])):
85 key = "%s_list" % o.__class__.__name__.lower()
86 e = ElementTree.SubElement(elem, key, type="list")
88 d = obj.convert_dict(obj.to_dict())
91 if use_elem is not None:
94 return ElementTree.tostring(elem)
97 def xml2dict(cls, message, elem=None):
98 em = get_zabbix_entitymap()
100 if message and elem is None:
101 elem = ElementTree.XML(message)
103 raise Exception("Cannot proceed with empty xml, and no elem")
105 #print "tag: %s : classname : %s" % (elem.tag, cls.classname())
106 if cls is not ZabbixSerialize:
107 assert elem.tag == cls.classname()
110 if elem.get("type") == "int":
111 dict[elem.tag] = int(elem.text)
112 elif elem.get("type") == "long":
113 dict[elem.tag] = long(elem.text)
114 elif elem.get("type") == "list":
115 if cls is not ZabbixSerialize:
116 assert elem.tag in cls.deepcopy, "List (%s) in XML is not a recognized type for this object (%s)" % (elem.tag, cls.classname())
119 dict[elem.tag].append( em[e.tag].xml2dict(None, e) )
120 elif elem.text is None:
123 dict[elem.tag] = elem.text
127 def dict2object(cls, dict):
128 em = get_zabbix_entitymap()
129 if cls is ZabbixSerialize:
130 # note: assume that there's only one type of class
132 for key in dict.keys():
133 clsobj = get_zabbix_class_from_name(key)
134 retdict[key] = [ clsobj.dict2object(data) for data in dict[key] ]
137 # take deepcopy values out of dict.
139 if hasattr(cls, 'deepcopy'):
140 for val in cls.deepcopy:
142 backup[val] = dict[val]
146 # for each deepcopy object, convert all values in list
147 for k in backup.keys():
148 clsobj = get_zabbix_class_from_name(k)
149 l = [ clsobj.dict2object(data) for data in backup[k] ]
152 # find or create the primary object
153 obj = cls.find_or_create(**dict)
154 #if cls is DiscoveryCheck or \
155 # cls is ActionCondition or \
156 # cls is ActionOperation:
157 # # NOTE: Some objects should always be created. like DiscoveryCheck
160 # obj = cls.get_by(**dict)
163 # print "CREATING NEW %s" % cls.classname()
166 # print "FOUND EXISTING OBJECT: %s"% obj
168 # add deepcopy values to primary object
169 for k in backup.keys():
170 print type(backup[k][0])
172 if isinstance(obj, User) and isinstance(backup[k][0], UsrGrp):
173 print "adding groups to user"
177 elif isinstance(obj, User) and isinstance(backup[k][0], Media):
178 print "adding media to user"
180 obj.media_list.append(g)
182 elif isinstance(obj, UsrGrp) and isinstance(backup[k][0], HostGroup):
183 print "adding hostgroup to usergroup"
184 print "NOT IMPLEMENTED!!!"
186 obj.append_hostgroup(g)
189 elif isinstance(obj, Action) and isinstance(backup[k][0], ActionCondition):
190 print "adding actionconditon to action"
192 obj.actioncondition_list.append(g)
194 elif isinstance(obj, Action) and isinstance(backup[k][0], ActionOperation):
195 print "adding actionoperation to action"
197 obj.actionoperation_list.append(g)
199 elif isinstance(obj, ActionOperation) and \
200 isinstance(backup[k][0], OperationCondition):
201 print "adding operationcondition to actionoperation"
203 obj.operationcondition_list.append(g)
205 elif isinstance(obj, DiscoveryRule) and isinstance(backup[k][0], DiscoveryCheck):
206 print "adding discoverycheck to discoveryrule"
208 obj.discoverycheck_list.append(v)
212 def convert_dict(self, d):
215 if type(d[key]) == type([]):
216 rd[str(key)] = [ self.convert_dict(v) for v in d[key] ]
218 rd[str(key)] = d[key]
225 def prettyserialize(self):
226 xml = self.xmlSerialize()
230 class ZabbixEntity(ZabbixSerialize):
231 __metaclass__ = EntityMeta
233 def __init__(self, **kwargs):
234 print "__INIT__ %s" % self.classname()
235 tablename = self._descriptor.tablename
236 fieldname = self._descriptor.auto_primarykey
237 index = IDs.get_by(table_name=tablename, field_name=fieldname)
239 index = IDs(table_name=tablename, field_name=fieldname, nodeid=0, nextid=10)
241 index.nextid = index.nextid + 1
242 kwargs[fieldname] = index.nextid
247 if hasattr(self, 'deepcopy'):
248 for k in self.deepcopy:
249 rd[k] = [ str(v) for v in getattr(self, k) ]
251 rd.update(self.to_dict())
258 return self.classname() + "(" + val + ")"
264 def set(self, **kwargs):
265 for key, value in kwargs.iteritems():
266 setattr(self, key, value)
269 def find_or_create(cls, exec_if_new=None, set_if_new={}, **kwargs):
270 if cls is DiscoveryCheck or cls is ActionCondition or \
271 cls is ActionOperation:
272 # NOTE: Some objects should always be created. like DiscoveryCheck
275 # NOTE: ignore *_list items
278 if "_list" not in key:
279 query[key] = kwargs[key]
280 print "SEARCHING USING %s" % query
281 obj = cls.get_by(**query)
284 print "CREATING NEW %s" % cls.classname()
285 print "USING %s" % kwargs
287 obj.set(**set_if_new)
291 print "FOUND EXISTING OBJECT: %s"% obj
295 def update_or_create(cls, data, surrogate=True):
296 pk_props = cls._descriptor.primary_key_properties
298 # if all pk are present and not None
299 if not [1 for p in pk_props if data.get(p.key) is None]:
300 pk_tuple = tuple([data[prop.key] for prop in pk_props])
301 record = cls.query.get(pk_tuple)
304 raise Exception("cannot create surrogate with pk")
311 raise Exception("cannot create non surrogate without pk")
312 record.from_dict(data)
314 update_or_create = classmethod(update_or_create)
316 def from_dict(self, data):
318 Update a mapped class with data from a JSON-style nested dict/list
321 # surrogate can be guessed from autoincrement/sequence but I guess
322 # that's not 100% reliable, so we'll need an override
324 mapper = sqlalchemy.orm.object_mapper(self)
326 for key, value in data.iteritems():
327 if isinstance(value, dict):
328 dbvalue = getattr(self, key)
329 rel_class = mapper.get_property(key).mapper.class_
330 pk_props = rel_class._descriptor.primary_key_properties
332 # If the data doesn't contain any pk, and the relationship
333 # already has a value, update that record.
334 if not [1 for p in pk_props if p.key in data] and \
336 dbvalue.from_dict(value)
338 record = rel_class.update_or_create(value)
339 setattr(self, key, record)
340 elif isinstance(value, list) and \
341 value and isinstance(value[0], dict):
343 rel_class = mapper.get_property(key).mapper.class_
346 if not isinstance(row, dict):
348 'Cannot send mixed (dict/non dict) data '
349 'to list relationships in from_dict data.')
350 record = rel_class.update_or_create(row)
351 new_attr_value.append(record)
352 setattr(self, key, new_attr_value)
354 setattr(self, key, value)
356 def to_dict(self, deep={}, exclude=[]):
357 """Generate a JSON-style nested dict/list structure from an object."""
358 col_prop_names = [p.key for p in self.mapper.iterate_properties \
359 if isinstance(p, ColumnProperty)]
360 data = dict([(name, getattr(self, name))
361 for name in col_prop_names if name not in exclude])
362 for rname, rdeep in deep.iteritems():
363 dbdata = getattr(self, rname)
364 #FIXME: use attribute names (ie coltoprop) instead of column names
365 fks = self.mapper.get_property(rname).remote_side
366 exclude = [c.name for c in fks]
367 if isinstance(dbdata, list):
368 data[rname] = [o.to_dict(rdeep, exclude) for o in dbdata]
370 data[rname] = dbdata.to_dict(rdeep, exclude)
374 def flush(self, *args, **kwargs):
375 return object_session(self).flush([self], *args, **kwargs)
377 def delete(self, *args, **kwargs):
378 return object_session(self).delete(self, *args, **kwargs)
380 def expire(self, *args, **kwargs):
381 return object_session(self).expire(self, *args, **kwargs)
383 def refresh(self, *args, **kwargs):
384 return object_session(self).refresh(self, *args, **kwargs)
386 def expunge(self, *args, **kwargs):
387 return object_session(self).expunge(self, *args, **kwargs)
389 # This bunch of session methods, along with all the query methods below
390 # only make sense when using a global/scoped/contextual session.
391 def _global_session(self):
392 return self._descriptor.session.registry()
393 _global_session = property(_global_session)
395 def merge(self, *args, **kwargs):
396 return self._global_session.merge(self, *args, **kwargs)
398 def save(self, *args, **kwargs):
399 return self._global_session.save(self, *args, **kwargs)
401 def update(self, *args, **kwargs):
402 return self._global_session.update(self, *args, **kwargs)
404 # only exist in SA < 0.5
405 # IMO, the replacement (session.add) doesn't sound good enough to be added
406 # here. For example: "o = Order(); o.add()" is not very telling. It's
407 # better to leave it as "session.add(o)"
408 def save_or_update(self, *args, **kwargs):
409 return self._global_session.save_or_update(self, *args, **kwargs)
412 def get_by(cls, *args, **kwargs):
413 return cls.query.filter_by(*args, **kwargs).first()
414 get_by = classmethod(get_by)
416 def get(cls, *args, **kwargs):
417 return cls.query.get(*args, **kwargs)
418 get = classmethod(get)
426 class Right(ZabbixEntity):
427 # rights of a usergroup to interact with hosts of a hostgroup
431 auto_primarykey='rightid',
433 # column groupid is an index to usrgrp.usrgrpid
434 # column id is an index into the host-groups.groupid
435 # permission is 3=rw, 2=ro, 1=r_list, 0=deny
437 # TODO: NOTE: When serialization occurs, the 'permissions' field is lost,
438 # currently since the rights table is merely treated as an intermediate
439 # table for the m2m between usrgrp and groups.
441 rights = Table('rights', metadata, autoload=True)
442 hostsgroups = Table('hosts_groups', metadata, autoload=True)
445 # m2m table between hosts and groups below
446 class HostsGroups(ZabbixEntity):
448 tablename='hosts_groups',
450 auto_primarykey='hostgroupid',
453 class Host(ZabbixEntity):
457 auto_primarykey='hostid',
459 hostgroup_list = ManyToMany(
462 foreign_keys=lambda: [hostsgroups.c.groupid, hostsgroups.c.hostid],
463 primaryjoin=lambda: Host.hostid==hostsgroups.c.hostid,
464 secondaryjoin=lambda: HostGroup.groupid==hostsgroups.c.groupid,
467 # NOTE: media objects are automatically handled.
468 hosts_groups_match = HostsGroups.query.filter_by(hostid=self.hostid).all()
469 for row in hosts_groups_match:
471 super(Host, self).delete()
473 class HostGroup(ZabbixEntity):
477 auto_primarykey='groupid',
479 usrgrp_list = ManyToMany(
482 foreign_keys=lambda: [rights.c.groupid, rights.c.id],
483 primaryjoin=lambda: HostGroup.groupid==rights.c.id,
484 secondaryjoin=lambda: UsrGrp.usrgrpid==rights.c.groupid,
486 host_list = ManyToMany(
489 foreign_keys=lambda: [hostsgroups.c.groupid, hostsgroups.c.hostid],
490 primaryjoin=lambda: HostGroup.groupid==hostsgroups.c.groupid,
491 secondaryjoin=lambda: Host.hostid==hostsgroups.c.hostid,
494 # NOTE: media objects are automatically handled.
495 hosts_groups_match = HostsGroups.query.filter_by(groupid=self.groupid).all()
496 for row in hosts_groups_match:
498 super(HostGroup, self).delete()
500 class UsersGroups(ZabbixEntity):
502 tablename='users_groups',
504 auto_primarykey='id',
507 class MediaType(ZabbixEntity):
509 tablename='media_type',
511 auto_primarykey='mediatypeid',
514 class Script(ZabbixEntity):
518 auto_primarykey='scriptid',
522 # DISCOVERY ################################################3
524 class DiscoveryCheck(ZabbixEntity):
528 auto_primarykey='dcheckid',
530 skip_keys = ['druleid']
531 discoveryrule = ManyToOne('DiscoveryRule',
532 primaryjoin=lambda: DiscoveryCheck.druleid == DiscoveryRule.druleid,
533 foreign_keys=lambda: [DiscoveryCheck.druleid],
536 class DiscoveryRule(ZabbixEntity): # parent of dchecks
540 auto_primarykey='druleid',
542 deepcopy = ['discoverycheck_list']
543 discoverycheck_list = OneToMany('DiscoveryCheck', cascade='all, delete-orphan',
544 primaryjoin=lambda: DiscoveryCheck.druleid == DiscoveryRule.druleid,
545 foreign_keys=lambda: [DiscoveryCheck.druleid])
547 discoveredhost_list = OneToMany('DiscoveredHost', cascade='all, delete-orphan',
548 primaryjoin=lambda: DiscoveredHost.druleid == DiscoveryRule.druleid,
549 foreign_keys=lambda: [DiscoveredHost.druleid])
551 class DiscoveredHost(ZabbixEntity):
555 auto_primarykey='dhostid',
557 discoveryrule = ManyToOne('DiscoveryRule',
558 primaryjoin=lambda: DiscoveredHost.druleid == DiscoveryRule.druleid,
559 foreign_keys=lambda: [DiscoveredHost.druleid],
562 discoveryservice_list = OneToMany('DiscoveryService', cascade='all, delete-orphan',
563 primaryjoin=lambda: DiscoveryService.dhostid== DiscoveredHost.dhostid,
564 foreign_keys=lambda: [DiscoveryService.dhostid],)
566 class DiscoveryService(ZabbixEntity):
568 tablename='dservices',
570 auto_primarykey='dserviceid',
572 discoveryrule = ManyToOne('DiscoveredHost',
573 primaryjoin=lambda: DiscoveryService.dhostid== DiscoveredHost.dhostid,
574 foreign_keys=lambda: [DiscoveryService.dhostid],
578 # ACTIONS ################################################3
580 class ActionOperation(ZabbixEntity):
582 tablename='operations', autoload=True, auto_primarykey='operationid',
584 deepcopy = ['operationcondition_list']
585 skip_keys = ['actionid']
586 action = ManyToOne('Action', ondelete='cascade',
587 primaryjoin=lambda: ActionOperation.actionid == Action.actionid,
588 foreign_keys=lambda: [ActionOperation.actionid])
590 operationcondition_list = OneToMany('OperationCondition', cascade='all, delete-orphan',
591 primaryjoin=lambda: OperationCondition.operationid == ActionOperation.operationid,
592 foreign_keys=lambda: [OperationCondition.operationid])
594 class OperationCondition(ZabbixEntity):
596 tablename='opconditions', autoload=True, auto_primarykey='opconditionid',
598 skip_keys = ['operationid']
599 actionoperation = ManyToOne('ActionOperation', ondelete='cascade',
600 primaryjoin=lambda: OperationCondition.operationid == ActionOperation.operationid,
601 foreign_keys=lambda: [OperationCondition.operationid])
603 class ActionCondition(ZabbixEntity):
605 tablename='conditions', autoload=True, auto_primarykey='conditionid',
607 skip_keys = ['actionid']
608 action = ManyToOne('Action', ondelete='cascade',
609 primaryjoin=lambda: ActionCondition.actionid == Action.actionid,
610 foreign_keys=lambda: [ActionCondition.actionid])
612 class Action(ZabbixEntity):
614 tablename='actions', autoload=True, auto_primarykey='actionid',
616 deepcopy = ['actionoperation_list', 'actioncondition_list']
617 actionoperation_list = OneToMany('ActionOperation', cascade='all, delete-orphan',
618 primaryjoin=lambda: ActionOperation.actionid == Action.actionid,
619 foreign_keys=lambda: [ActionOperation.actionid])
621 actioncondition_list = OneToMany('ActionCondition', cascade='all, delete-orphan',
622 primaryjoin=lambda: ActionCondition.actionid == Action.actionid,
623 foreign_keys=lambda: [ActionCondition.actionid])
625 # USERS & EMAIL MEDIA ################################################3
627 class Media(ZabbixEntity):
631 auto_primarykey='mediaid',
633 skip_keys = ['userid']
634 user = ManyToOne('User',
635 primaryjoin=lambda: Media.userid == User.userid,
636 foreign_keys=lambda: [Media.userid],
639 users_groups = Table('users_groups', metadata, autoload=True)
641 class User(ZabbixEntity): # parent of media
645 auto_primarykey='userid',
647 deepcopy = ['media_list', 'usrgrp_list']
648 media_list = OneToMany('Media',
649 primaryjoin=lambda: Media.userid == User.userid,
650 foreign_keys=lambda: [Media.userid],
651 cascade='all, delete-orphan')
653 # READ-ONLY: do not append or remove groups here.
654 usrgrp_list = ManyToMany('UsrGrp',
656 foreign_keys=lambda: [users_groups.c.userid, users_groups.c.usrgrpid],
657 primaryjoin=lambda: User.userid==users_groups.c.userid,
658 secondaryjoin=lambda: UsrGrp.usrgrpid==users_groups.c.usrgrpid)
661 # NOTE: media objects are automatically handled.
662 users_groups_match = UsersGroups.query.filter_by(userid=self.userid).all()
663 for row in users_groups_match:
665 super(User, self).delete()
667 def append_group(self, group):
668 ug_row = UsersGroups(usrgrpid=group.usrgrpid, userid=self.userid)
671 def remove_group(self, group):
672 ug_row = UsersGroups.get_by(usrgrpid=group.usrgrpid, userid=self.userid)
673 if ug_row is not None:
678 class UsrGrp(ZabbixEntity):
682 auto_primarykey='usrgrpid',
684 deepcopy= ['hostgroup_list']
686 user_list = ManyToMany(
689 foreign_keys=lambda: [users_groups.c.userid, users_groups.c.usrgrpid],
690 secondaryjoin=lambda: User.userid==users_groups.c.userid,
691 primaryjoin=lambda: UsrGrp.usrgrpid==users_groups.c.usrgrpid,
694 hostgroup_list = ManyToMany(
697 foreign_keys=lambda: [rights.c.groupid, rights.c.id],
698 primaryjoin=lambda: UsrGrp.usrgrpid==rights.c.groupid,
699 secondaryjoin=lambda: HostGroup.groupid==rights.c.id,
703 rights_match = Right.query.filter_by(groupid=self.usrgrpid).all()
704 for row in rights_match:
707 users_groups_match = UsersGroups.query.filter_by(usrgrpid=self.usrgrpid).all()
708 for row in users_groups_match:
711 super(UsrGrp, self).delete()
713 def append_hostgroup(self, hg):
714 # NOTE: I know it looks wrong, but this is how the keys are mapped.
715 print "APPENDING HOSTGROUP %s!!!!!!!!!!" % hg.name
716 ug_row = Right(groupid=self.usrgrpid, id=hg.groupid, permission=3)
720 def append_user(self, user):
721 ug_row = UsersGroups(userid=user.userid, usrgrpid=self.usrgrpid)
725 def remove_user(self, user):
726 ug_row = UsersGroups.get_by(userid=user.userid, usrgrpid=self.usrgrpid)
727 if ug_row is not None:
734 def get_zabbix_class_from_name(name):
735 em = get_zabbix_entitymap()
738 name=name[:-5] # strip off the _list part.
741 if name == k.lower():
745 def get_zabbix_entitymap():
747 for n,c in zip([ u.__name__ for u in entities], entities):
751 # COMMON OBJECT TYPES
752 class OperationConditionNotAck(object):
754 o = OperationCondition(
755 conditiontype=defines.CONDITION_TYPE_EVENT_ACKNOWLEDGED,
756 operator=defines.CONDITION_OPERATOR_EQUAL,
761 #u = User(alias="stephen.soltesz@gmail.com", name="stephen.soltesz@gmail.com", surname="", passwd=md5.md5("test").hexdigest(), url="", autologin=0, autologout=900, lang="en_gb", refresh=30, type=1, theme="default.css")