X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=monitor%2Fwrapper%2Fplccache.py;h=d818e970de589f8d9e67f9bdd60881fc3fc80128;hb=HEAD;hp=7b1d258e8477450db96b75265fd314ec32f71468;hpb=0a896f10303078562b737c83fd08dc25f0973c58;p=monitor.git diff --git a/monitor/wrapper/plccache.py b/monitor/wrapper/plccache.py index 7b1d258..d818e97 100755 --- a/monitor/wrapper/plccache.py +++ b/monitor/wrapper/plccache.py @@ -2,57 +2,12 @@ import sys from monitor.wrapper import plc +from monitor.generic import * from monitor.database.info.model import * +from monitor import database +from monitor import config import profile -def dsites_from_lsites(l_sites): - d_sites = {} - id2lb = {} - for site in l_sites: - if not site['login_base'] in d_sites: - d_sites[site['login_base']] = site - id2lb[site['site_id']] = site['login_base'] - else: - #print "Two sites have the same login_base value %s!" % site['login_base'] - #sys.exit(1) - continue - return (d_sites, id2lb) - -def dsn_from_dsln(d_sites, id2lb, l_nodes): - lb2hn = {} - dsn = {} - hn2lb = {} - for id in id2lb: - if id2lb[id] not in lb2hn: - lb2hn[id2lb[id]] = [] - - for node in l_nodes: - # this won't reach sites without nodes, which I guess isn't a problem. - if node['site_id'] in id2lb.keys(): - login_base = id2lb[node['site_id']] - else: - print >>sys.stderr, "%s has a foreign site_id %s" % (node['hostname'], - node['site_id']) - continue - for i in id2lb: - print i, " ", id2lb[i] - raise Exception, "Node has missing site id!! %s %d" %(node['hostname'], node['site_id']) - if not login_base in dsn: - lb2hn[login_base] = [] - dsn[login_base] = {} - dsn[login_base]['plc'] = d_sites[login_base] - dsn[login_base]['monitor'] = {} # event log, or something - - hostname = node['hostname'] - lb2hn[login_base].append(node) - dsn[login_base][hostname] = {} - dsn[login_base][hostname]['plc'] = node - dsn[login_base][hostname]['comon'] = {} - dsn[login_base][hostname]['monitor'] = {} - - hn2lb[hostname] = login_base - return (dsn, hn2lb, lb2hn) - l_sites = None l_nodes = None l_pcus = None @@ -61,6 +16,54 @@ plcdb_hn2lb = None plcdb_lb2hn = None plcdb_id2lb = None +class CachedPLC(plc.PLC): + + def _param_to_str(self, name, *params): + fields = len(params) + retstr = "" + retstr += "%s-" % name + for x in params: + retstr += "%s-" % x + return retstr[:-1] + + def __getattr__(self, name): + method = getattr(self.api, name) + if method is None: + raise AssertionError("method does not exist") + + def run_or_returncached(*params): + cachename = self._param_to_str(name, *params) + #print "cachename is %s" % cachename + if hasattr(config, 'refresh'): + refresh = config.refresh + else: + refresh = False + + if 'Get' in name: + if not database.cachedRecently(cachename): + load_old_cache = False + try: + values = method(self.auth, *params) + except: + print "Call %s FAILED: Using old cached data" % cachename + load_old_cache = True + + if load_old_cache: + values = database.dbLoad(cachename) + else: + database.dbDump(cachename, values) + + return values + else: + values = database.dbLoad(cachename) + return values + else: + return method(self.auth, *params) + + return run_or_returncached + +cacheapi = CachedPLC(plc.auth.auth, plc.auth.server) + def init(): import traceback #print "IMPORTING PLCCACHE: ", @@ -93,13 +96,15 @@ def init(): l_pcus.append(pcu) print >>sys.stderr, "building id2lb" - (d_sites,id2lb) = dsites_from_lsites(l_sites) + (d_sites,id2lb) = dsites_from_lsites_id(l_sites) print >>sys.stderr, "building lb2hn" - (plcdb, hn2lb, lb2hn) = dsn_from_dsln(d_sites, id2lb, l_nodes) + (plcdb, hn2lb, lb2hn, exclude) = dsn_from_dsln(d_sites, id2lb, l_nodes) plcdb_hn2lb = hn2lb plcdb_lb2hn = lb2hn plcdb_id2lb = id2lb + + l_nodes = filter(lambda x: x['hostname'] not in exclude, l_nodes) return @@ -115,6 +120,7 @@ def GetNodesBySite(loginbase): return GetNodesByIds(site.plc_site_stats['node_ids']) def GetNodeByName(hostname): + print "GetNodeByName %s" % hostname node = PlcNode.get_by(hostname=hostname) return node.plc_node_stats @@ -142,6 +148,13 @@ def deleteExtra(l_plc, objectClass=PlcSite, dbKey='loginbase', plcKey='login_bas dbobj = objectClass.get_by(**{dbKey : obj}) dbobj.delete() +def conv(s): + # strip non-ascii characters to prvent errors + r = s + if type(s) in (str,unicode): + r = "".join([x for x in s if ord(x) < 128]) + return r + def sync(): l_sites = plc.api.GetSites({'peer_id':None}, ['login_base', 'site_id', 'abbreviated_name', 'latitude', @@ -150,7 +163,8 @@ def sync(): l_nodes = plc.api.GetNodes({'peer_id':None}, ['hostname', 'node_id', 'ports', 'site_id', 'boot_state', 'run_level', 'version', 'last_updated', 'date_created', 'key', - 'last_contact', 'pcu_ids', 'interface_ids']) + 'last_contact', 'pcu_ids', 'interface_ids', + 'last_boot', 'last_download', 'last_pcu_reboot', 'last_pcu_confirmation']) l_pcus = plc.api.GetPCUs() print >>sys.stderr, "sync sites" @@ -168,8 +182,8 @@ def sync(): dbpcu = PlcPCU2.findby_or_create(pcu_id=pcu['pcu_id']) dbpcu.date_checked = datetime.now() for key in pcu.keys(): - print >>sys.stderr, "setting %s = %s" % (key, pcu[key]) - setattr(dbpcu, key, pcu[key]) + print >>sys.stderr, "setting %s = %s" % (key, conv(pcu[key])) + setattr(dbpcu, key, conv(pcu[key])) deleteExtra(l_pcus, PlcPCU2, 'pcu_id', 'pcu_id') deleteExtra(l_pcus, HistoryPCURecord, 'plc_pcuid', 'pcu_id') @@ -182,7 +196,9 @@ def sync(): dbnode.hostname = node['hostname'] dbnode.date_checked = datetime.now() dbnode.plc_node_stats = node - deleteExtra(l_nodes, PlcNode, 'hostname', 'hostname') + deleteExtra(l_nodes, PlcNode, 'node_id', 'node_id') + deleteExtra(l_nodes, HistoryNodeRecord, 'plc_nodeid', 'node_id') + deleteExtra(l_nodes, PlcNode, 'hostname', 'hostname') deleteExtra(l_nodes, HistoryNodeRecord, 'hostname', 'hostname') deleteExtra(l_nodes, FindbadNodeRecord, 'hostname', 'hostname') session.flush()