+ max_cpu = row.get("max_avg_cpu", row.get("max_cpu",0))
+ cpu=float(max_cpu)/100.0
+ row["hotness"] = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD))
+
+ def compose_latest_query(self, fieldNames=None, groupByFields=["%hostname", "event"]):
+ """ Compose a query that returns the 'most recent' row for each (hostname, event)
+ pair.
+ """
+
+ if not fieldNames:
+ fieldNames = ["%hostname", "%bytes_sent", "time", "event", "%site", "%elapsed", "%slice", "%cpu"]
+
+ fields = ["table1.%s AS %s" % (x,x) for x in fieldNames]
+ fields = ", ".join(fields)
+
+ tableDesc = "%s.%s" % (self.projectName, self.tableName)
+
+ groupByOn = ["table1.time = latest.maxtime"]
+ for field in groupByFields:
+ groupByOn.append("table1.%s = latest.%s" % (field, field))
+
+ groupByOn = " AND ".join(groupByOn)
+ groupByFields = ", ".join(groupByFields)
+
+ base_query = "SELECT %s FROM [%s@-3600000--1] AS table1 JOIN (SELECT %s, max(time) as maxtime from [%s@-3600000--1] GROUP BY %s) AS latest ON %s" % \
+ (fields, tableDesc, groupByFields, tableDesc, groupByFields, groupByOn)
+
+ return base_query
+
+ def get_cached_query_results(self, q):
+ global glo_cached_queries
+
+ if q in glo_cached_queries:
+ if (time.time() - glo_cached_queries[q]["time"]) <= 60:
+ print "using cached query"
+ return glo_cached_queries[q]["rows"]
+
+ print "refreshing cached query"
+ result = self.run_query(q)
+ glo_cached_queries[q] = {"time": time.time(), "rows": result}
+
+ return result