X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=zabbix%2Fzabbixsync.py;h=ffef361bd2363f8c8390a78cdb867a6f96cc5116;hb=6ab1e756a6267bee392984e1ce26332b4ef66b79;hp=fd2126e7179cc06704d21c95159e2b9bf16e6674;hpb=170be1fae6fd5956641668443245c50fbf7fd566;p=monitor.git diff --git a/zabbix/zabbixsync.py b/zabbix/zabbixsync.py index fd2126e..ffef361 100755 --- a/zabbix/zabbixsync.py +++ b/zabbix/zabbixsync.py @@ -1,18 +1,19 @@ #!/usr/bin/python import sys +import os import site -from monitor.wrapper import plc +from monitor.wrapper import plc, plccache from monitor import database import zabbixsite -from monitor.database.dborm import session +from monitor.database.dborm import zab_session as session +from monitor.database.zabbixapi.model import confirm_ids, HostGroup -print "test" -plcdb = database.dbLoad("l_plcsites") -netid2ip = database.dbLoad("plcdb_netid2ip") -lb2hn = database.dbLoad("plcdb_lb2hn") +plcdb = plccache.l_sites # database.dbLoad("l_plcsites") +netid2ip = plccache.plcdb_netid2ip # database.dbLoad("plcdb_netid2ip") +lb2hn = plccache.plcdb_lb2hn # database.dbLoad("plcdb_lb2hn") def get_site_iplist(loginbase): node_list = lb2hn[loginbase] @@ -21,9 +22,10 @@ def get_site_iplist(loginbase): # TODO: if it is, then we need to break up the discovery rule. ip_list = "" for node in node_list: - ip = netid2ip[node['nodenetwork_ids'][0]] - if len(ip_list) > 0: ip_list += "," - ip_list += ip + if len(node['nodenetwork_ids']) > 0: + ip = netid2ip[node['nodenetwork_ids'][0]] + if len(ip_list) > 0: ip_list += "," + ip_list += ip return ip_list @@ -33,34 +35,60 @@ def add_loginbase(loginbase): pis = plc.getPIEmails(loginbase) iplist = get_site_iplist(loginbase) + os.system("""echo '%s' | tr ',' '\n' >> /usr/share/monitor/nodelist.txt""" % iplist ) + print "zabbixsite.setup_site('%s', %s, %s, '%s')" % (loginbase,techs, pis, iplist) zabbixsite.setup_site(loginbase, techs, pis, iplist) if __name__=="__main__": from monitor import parser as parsermodule - parser = parsermodule.getParser() - parser.set_defaults( setupglobal=False, syncsite=True, site=None) + parser = parsermodule.getParser(['cacheset']) + parser.set_defaults( setupglobal=False, syncsite=True, site=None, setupids=False) + parser.add_option("", "--setupids", action="store_true", dest="setupids", + help="Setup global IDs.") parser.add_option("", "--setupglobal", action="store_true", dest="setupglobal", help="Setup global settings.") - parser.add_option("", "--nosite", action="store_true", dest="syncsite", + parser.add_option("", "--nosite", action="store_false", dest="syncsite", help="Do not sync sites.") parser.add_option("", "--site", dest="site", help="Sync only given site name.") opts = parsermodule.parse_args(parser) + os.system("""echo '' > /usr/share/monitor/nodelist.txt""") + + if opts.setupids: + # Not sure why, but this doesn't work if we continue. so exit. + # This step only needs to be called once, but there is no harm in + # calling it multiple times. + confirm_ids() + session.flush() + sys.exit(0) + if opts.setupglobal: zabbixsite.setup_global() + session.flush() if opts.syncsite: + api = plc.getCachedAuthAPI() query = {'peer_id' : None} if opts.site: query.update({'login_base' : opts.site}) + # ADD SITES sites = api.GetSites(query, ['login_base']) - for site in sites: + site_api_list = [ site['login_base'] for site in sites ] + for site in sites[:20]: add_loginbase(site['login_base']) session.flush() - # TODO: for any removed site that is in the db, call zabbixsite.delete_site() + if not opts.site: + # NOTE: for all sites in DB but not API, call zabbixsite.delete_site() + hg_list = filter(lambda x: '_hostgroup' in x.name, HostGroup.query.all() ) + site_db_list = [ hg.name[:-len('_hostgroup')] for hg in hg_list ] + in_db_not_plc = set(site_db_list) - set(site_api_list) + for login_base in in_db_not_plc: + print "Deleting %s" % login_base + zabbixsite.delete_site(site['login_base']) +