X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=monitor%2Fwrapper%2Fplccache.py;h=d818e970de589f8d9e67f9bdd60881fc3fc80128;hb=HEAD;hp=f92fa854dfda4b552b3a5f3e12613b1ad0e4afb7;hpb=5772ce036b96297a23f834ea34ce4466ef4d522c;p=monitor.git diff --git a/monitor/wrapper/plccache.py b/monitor/wrapper/plccache.py index f92fa85..d818e97 100755 --- a/monitor/wrapper/plccache.py +++ b/monitor/wrapper/plccache.py @@ -2,57 +2,12 @@ import sys from monitor.wrapper import plc +from monitor.generic import * from monitor.database.info.model import * +from monitor import database +from monitor import config import profile -def dsites_from_lsites(l_sites): - d_sites = {} - id2lb = {} - for site in l_sites: - if not site['login_base'] in d_sites: - d_sites[site['login_base']] = site - id2lb[site['site_id']] = site['login_base'] - else: - #print "Two sites have the same login_base value %s!" % site['login_base'] - #sys.exit(1) - continue - return (d_sites, id2lb) - -def dsn_from_dsln(d_sites, id2lb, l_nodes): - lb2hn = {} - dsn = {} - hn2lb = {} - for id in id2lb: - if id2lb[id] not in lb2hn: - lb2hn[id2lb[id]] = [] - - for node in l_nodes: - # this won't reach sites without nodes, which I guess isn't a problem. - if node['site_id'] in id2lb.keys(): - login_base = id2lb[node['site_id']] - else: - print "%s has a foreign site_id %s" % (node['hostname'], - node['site_id']) - continue - for i in id2lb: - print i, " ", id2lb[i] - raise Exception, "Node has missing site id!! %s %d" %(node['hostname'], node['site_id']) - if not login_base in dsn: - lb2hn[login_base] = [] - dsn[login_base] = {} - dsn[login_base]['plc'] = d_sites[login_base] - dsn[login_base]['monitor'] = {} # event log, or something - - hostname = node['hostname'] - lb2hn[login_base].append(node) - dsn[login_base][hostname] = {} - dsn[login_base][hostname]['plc'] = node - dsn[login_base][hostname]['comon'] = {} - dsn[login_base][hostname]['monitor'] = {} - - hn2lb[hostname] = login_base - return (dsn, hn2lb, lb2hn) - l_sites = None l_nodes = None l_pcus = None @@ -61,23 +16,75 @@ plcdb_hn2lb = None plcdb_lb2hn = None plcdb_id2lb = None +class CachedPLC(plc.PLC): + + def _param_to_str(self, name, *params): + fields = len(params) + retstr = "" + retstr += "%s-" % name + for x in params: + retstr += "%s-" % x + return retstr[:-1] + + def __getattr__(self, name): + method = getattr(self.api, name) + if method is None: + raise AssertionError("method does not exist") + + def run_or_returncached(*params): + cachename = self._param_to_str(name, *params) + #print "cachename is %s" % cachename + if hasattr(config, 'refresh'): + refresh = config.refresh + else: + refresh = False + + if 'Get' in name: + if not database.cachedRecently(cachename): + load_old_cache = False + try: + values = method(self.auth, *params) + except: + print "Call %s FAILED: Using old cached data" % cachename + load_old_cache = True + + if load_old_cache: + values = database.dbLoad(cachename) + else: + database.dbDump(cachename, values) + + return values + else: + values = database.dbLoad(cachename) + return values + else: + return method(self.auth, *params) + + return run_or_returncached + +cacheapi = CachedPLC(plc.auth.auth, plc.auth.server) + def init(): + import traceback + #print "IMPORTING PLCCACHE: ", + #traceback.print_stack() global l_sites global l_nodes global l_pcus global plcdb_hn2lb global plcdb_lb2hn global plcdb_id2lb - print "initing plccache" + print >>sys.stderr, "initing plccache" + print >>sys.stderr, "collecting plcsites" dbsites = PlcSite.query.all() l_sites = [ s.plc_site_stats for s in dbsites ] - print "plcnode" + print >>sys.stderr, "collecting plcnodes" dbnodes = PlcNode.query.all() l_nodes = [ s.plc_node_stats for s in dbnodes ] - print "plcpcu" + print >>sys.stderr, "collecting plcpcus" dbpcus = PlcPCU2.query.all() l_pcus = [] for s in dbpcus: @@ -87,16 +94,17 @@ def init(): 'model', 'password', 'ports']: pcu[k] = getattr(s, k) l_pcus.append(pcu) - #l_pcus = [ s.plc_pcu_stats for s in dbpcus ] - print "dsites_from_lsites" - (d_sites,id2lb) = dsites_from_lsites(l_sites) - print "dsn_from_dsln" - (plcdb, hn2lb, lb2hn) = dsn_from_dsln(d_sites, id2lb, l_nodes) + print >>sys.stderr, "building id2lb" + (d_sites,id2lb) = dsites_from_lsites_id(l_sites) + print >>sys.stderr, "building lb2hn" + (plcdb, hn2lb, lb2hn, exclude) = dsn_from_dsln(d_sites, id2lb, l_nodes) plcdb_hn2lb = hn2lb plcdb_lb2hn = lb2hn plcdb_id2lb = id2lb + + l_nodes = filter(lambda x: x['hostname'] not in exclude, l_nodes) return @@ -112,6 +120,7 @@ def GetNodesBySite(loginbase): return GetNodesByIds(site.plc_site_stats['node_ids']) def GetNodeByName(hostname): + print "GetNodeByName %s" % hostname node = PlcNode.get_by(hostname=hostname) return node.plc_node_stats @@ -135,22 +144,30 @@ def deleteExtra(l_plc, objectClass=PlcSite, dbKey='loginbase', plcKey='login_bas plcobj_key = [ s[plcKey] for s in l_plc ] extra_key = set(dbobj_key) - set(plcobj_key) for obj in extra_key: - print "deleting %s" % obj + print >>sys.stderr, "deleting %s" % obj dbobj = objectClass.get_by(**{dbKey : obj}) dbobj.delete() +def conv(s): + # strip non-ascii characters to prvent errors + r = s + if type(s) in (str,unicode): + r = "".join([x for x in s if ord(x) < 128]) + return r + def sync(): l_sites = plc.api.GetSites({'peer_id':None}, ['login_base', 'site_id', 'abbreviated_name', 'latitude', 'longitude', 'max_slices', 'slice_ids', 'node_ids', 'enabled', 'date_created' ]) l_nodes = plc.api.GetNodes({'peer_id':None}, - ['hostname', 'node_id', 'ports', 'site_id', 'boot_state', + ['hostname', 'node_id', 'ports', 'site_id', 'boot_state', 'run_level', 'version', 'last_updated', 'date_created', 'key', - 'last_contact', 'pcu_ids', 'interface_ids']) + 'last_contact', 'pcu_ids', 'interface_ids', + 'last_boot', 'last_download', 'last_pcu_reboot', 'last_pcu_confirmation']) l_pcus = plc.api.GetPCUs() - print "sync sites" + print >>sys.stderr, "sync sites" for site in l_sites: dbsite = PlcSite.findby_or_create(site_id=site['site_id']) dbsite.loginbase = site['login_base'] @@ -160,26 +177,28 @@ def sync(): deleteExtra(l_sites, HistorySiteRecord, 'loginbase', 'login_base') session.flush() - print "sync pcus" + print >>sys.stderr, "sync pcus" for pcu in l_pcus: dbpcu = PlcPCU2.findby_or_create(pcu_id=pcu['pcu_id']) dbpcu.date_checked = datetime.now() for key in pcu.keys(): - print "setting %s = %s" % (key, pcu[key]) - setattr(dbpcu, key, pcu[key]) + print >>sys.stderr, "setting %s = %s" % (key, conv(pcu[key])) + setattr(dbpcu, key, conv(pcu[key])) deleteExtra(l_pcus, PlcPCU2, 'pcu_id', 'pcu_id') deleteExtra(l_pcus, HistoryPCURecord, 'plc_pcuid', 'pcu_id') deleteExtra(l_pcus, FindbadPCURecord, 'plc_pcuid', 'pcu_id') session.flush() - print "sync nodes" + print >>sys.stderr, "sync nodes" for node in l_nodes: dbnode = PlcNode.findby_or_create(node_id=node['node_id']) dbnode.hostname = node['hostname'] dbnode.date_checked = datetime.now() dbnode.plc_node_stats = node - deleteExtra(l_nodes, PlcNode, 'hostname', 'hostname') + deleteExtra(l_nodes, PlcNode, 'node_id', 'node_id') + deleteExtra(l_nodes, HistoryNodeRecord, 'plc_nodeid', 'node_id') + deleteExtra(l_nodes, PlcNode, 'hostname', 'hostname') deleteExtra(l_nodes, HistoryNodeRecord, 'hostname', 'hostname') deleteExtra(l_nodes, FindbadNodeRecord, 'hostname', 'hostname') session.flush()