X-Git-Url: http://git.onelab.eu/?a=blobdiff_plain;f=findbadpcu.py;h=9eb3be70fccb57cb08131a636a066375a4e74170;hb=refs%2Fheads%2F2.0;hp=d00d7f7df55b50900fbec68febf31f94d5de4ec3;hpb=be11398f97d51c72f275df3d4f7e095e900f2add;p=monitor.git diff --git a/findbadpcu.py b/findbadpcu.py index d00d7f7..9eb3be7 100755 --- a/findbadpcu.py +++ b/findbadpcu.py @@ -14,7 +14,7 @@ import threading import monitor from monitor import config -from monitor.database.info.model import FindbadPCURecordSync, FindbadPCURecord, session +from monitor.database.info.model import FindbadPCURecord, session from monitor import database from monitor import util from monitor.wrapper import plc, plccache @@ -43,10 +43,11 @@ def checkPCUs(l_pcus, cohash): # CREATE all the work requests for pcuname in l_pcus: pcu_id = int(pcuname) - fbnodesync = FindbadPCURecordSync.findby_or_create(plc_pcuid=pcu_id, if_new_set={'round' : 0}) - fbnodesync.flush() + #fbnodesync = FindbadPCURecordSync.findby_or_create(plc_pcuid=pcu_id, if_new_set={'round' : 0}) + #fbnodesync.flush() - node_round = fbnodesync.round + #node_round = fbnodesync.round + node_round = global_round - 1 if node_round < global_round or config.force: # recreate node stats when refreshed #print "%s" % nodename @@ -75,7 +76,7 @@ def checkPCUs(l_pcus, cohash): print "All results collected." break - print FindbadPCURecordSync.query.count() + #print FindbadPCURecordSync.query.count() print FindbadPCURecord.query.count() session.flush() @@ -86,29 +87,37 @@ def main(): l_pcus = plccache.l_pcus cohash = {} - fbsync = FindbadPCURecordSync.findby_or_create(plc_pcuid=0, - if_new_set={'round' : global_round}) + #fbsync = FindbadPCURecordSync.findby_or_create(plc_pcuid=0, + #if_new_set={'round' : global_round}) - global_round = fbsync.round + #global_round = fbsync.round api = plc.getAuthAPI() if config.site is not None: - site = api.GetSites(config.site) - l_nodes = api.GetNodes(site[0]['node_ids'], ['pcu_ids']) + site = plccache.GetSitesByName([config.site]) + l_nodes = plccache.GetNodesByIds(site[0]['node_ids']) pcus = [] for node in l_nodes: pcus += node['pcu_ids'] # clear out dups. l_pcus = [pcu for pcu in sets.Set(pcus)] + + elif config.node is not None: + node = plccache.GetNodeByName(config.node) + print node + pcus = node['pcu_ids'] + # clear out dups. + l_pcus = [pcu for pcu in sets.Set(pcus)] + elif config.sitelist: site_list = config.sitelist.split(',') - sites = api.GetSites(site_list) + sites = plccache.GetSitesByName(site_list) node_ids = [] for s in sites: node_ids += s['node_ids'] - l_nodes = api.GetNodes(node_ids, ['pcu_ids']) + l_nodes = plccache.GetNodeByIds(node_ids) pcus = [] for node in l_nodes: pcus += node['pcu_ids'] @@ -139,8 +148,8 @@ def main(): if config.increment: # update global round number to force refreshes across all nodes - fbsync.round = global_round - fbsync.flush() + #fbsync.round = global_round + #fbsync.flush() session.flush() return 0 @@ -163,6 +172,7 @@ if __name__ == '__main__': pcuid=None, pcuselect=None, site=None, + node=None, sitelist=None, dbname="findbadpcus", cachenodes=False, @@ -171,6 +181,8 @@ if __name__ == '__main__': ) parser.add_option("-f", "--nodelist", dest="nodelist", metavar="FILE", help="Provide the input file for the node list") + parser.add_option("", "--node", dest="node", metavar="FILE", + help="Get all pcus associated with the given node") parser.add_option("", "--site", dest="site", metavar="FILE", help="Get all pcus associated with the given site's nodes") parser.add_option("", "--sitelist", dest="sitelist", metavar="FILE",