1 from bigquery_analytics import BigQueryAnalytics
11 if os.path.exists("/home/smbaker/projects/vicci/plstackapi/planetstack"):
12 sys.path.append("/home/smbaker/projects/vicci/plstackapi/planetstack")
14 sys.path.append("/opt/planetstack")
16 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
17 from django.conf import settings
19 from django.db import connection
20 from core.models import Slice, Sliver, ServiceClass, Reservation, Tag, Network, User, Node, Image, Deployment, Site, NetworkTemplate, NetworkSlice, Service
25 glo_cached_queries = {}
27 class PlanetStackAnalytics(BigQueryAnalytics):
28 def __init__(self, tableName=None):
30 tableName = settings.BIGQUERY_TABLE
32 BigQueryAnalytics.__init__(self, tableName)
34 def service_to_sliceNames(self, serviceName):
35 service=Service.objects.get(name=serviceName)
37 slices = service.slices.all()
39 # BUG in data model -- Slice.service has related name 'service' and
40 # it should be 'slices'
41 slices = service.service.all()
43 return [slice.name for slice in slices]
45 def compose_query(self, slice=None, site=None, node=None, service=None, event="libvirt_heartbeat", timeBucket="60", avg=[], sum=[], count=[], computed=[], val=[], groupBy=["Time"], orderBy=["Time"], tableName=None, latest=False, maxAge=60*60):
47 tableName = self.tableName
49 maxAge = maxAge * 1000
50 tablePart = "[%s.%s@-%d--1]" % ("vicci", tableName, maxAge)
54 srcFieldNames = ["time"]
56 fields.append("SEC_TO_TIMESTAMP(INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s) as Time" % (str(timeBucket),str(timeBucket)))
57 #fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s as Time" % (str(timeBucket),str(timeBucket)))
60 fields.append("AVG(%s) as avg_%s" % (fieldName, fieldName.replace("%","")))
61 fieldNames.append("avg_%s" % fieldName.replace("%",""))
62 srcFieldNames.append(fieldName)
65 fields.append("SUM(%s) as sum_%s" % (fieldName, fieldName.replace("%","")))
66 fieldNames.append("sum_%s" % fieldName.replace("%",""))
67 srcFieldNames.append(fieldName)
69 for fieldName in count:
70 fields.append("COUNT(distinct %s) as count_%s" % (fieldName, fieldName.replace("%","")))
71 fieldNames.append("count_%s" % fieldName.replace("%",""))
72 srcFieldNames.append(fieldName)
75 fields.append(fieldName)
76 fieldNames.append(fieldName)
77 srcFieldNames.append(fieldName)
79 for fieldName in computed:
81 parts = fieldName.split("/")
82 computedFieldName = "computed_" + parts[0].replace("%","")+"_div_"+parts[1].replace("%","")
85 parts = computed.split("*")
86 computedFieldName = "computed_" + parts[0].replace("%","")+"_mult_"+parts[1].replace("%","")
87 fields.append("SUM(%s)%sSUM(%s) as %s" % (parts[0], operator, parts[1], computedFieldName))
88 fieldNames.append(computedFieldName)
89 srcFieldNames.append(parts[0])
90 srcFieldNames.append(parts[1])
92 for fieldName in groupBy:
93 if (fieldName not in ["Time"]):
94 fields.append(fieldName)
95 fieldNames.append(fieldName)
96 srcFieldNames.append(fieldName)
98 fields = ", ".join(fields)
103 where.append("%%slice='%s'" % slice)
105 where.append("%%site='%s'" % site)
107 where.append("%%hostname='%s'" % node)
109 where.append("event='%s'" % event)
111 sliceNames = self.service_to_sliceNames(service)
113 where.append("(" + " OR ".join(["%%slice='%s'" % sliceName for sliceName in sliceNames]) +")")
116 where = " WHERE " + " AND ".join(where)
121 groupBySub = " GROUP BY " + ",".join(groupBy + ["%hostname"])
122 groupBy = " GROUP BY " + ",".join(groupBy)
124 groupBySub = " GROUP BY %hostname"
128 orderBy = " ORDER BY " + ",".join(orderBy)
133 latestFields = ["table1.%s as %s" % (x,x) for x in srcFieldNames]
134 latestFields = ", ".join(latestFields)
135 tablePart = """(SELECT %s FROM %s AS table1
137 (SELECT %%hostname, event, max(time) as maxtime from %s GROUP BY %%hostname, event) AS latest
139 table1.%%hostname = latest.%%hostname AND table1.event = latest.event AND table1.time = latest.maxtime)""" % (latestFields, tablePart, tablePart)
142 subQuery = "SELECT %%hostname, %s FROM %s" % (fields, tablePart)
144 subQuery = subQuery + where
145 subQuery = subQuery + groupBySub
148 for fieldName in fieldNames:
149 if fieldName.startswith("avg"):
150 sumFields.append("AVG(%s) as avg_%s"%(fieldName,fieldName))
151 sumFields.append("MAX(%s) as max_%s"%(fieldName,fieldName))
152 elif (fieldName.startswith("count")) or (fieldName.startswith("sum")) or (fieldName.startswith("computed")):
153 sumFields.append("SUM(%s) as sum_%s"%(fieldName,fieldName))
155 sumFields.append(fieldName)
157 sumFields = ",".join(sumFields)
159 query = "SELECT %s, %s FROM (%s)" % ("Time", sumFields, subQuery)
161 query = query + groupBy
163 query = query + orderBy
165 query = "SELECT %s FROM %s" % (fields, tablePart)
167 query = query + " " + where
169 query = query + groupBy
171 query = query + orderBy
175 def get_list_from_req(self, req, name, default=[]):
176 value = req.GET.get(name, None)
179 value=value.replace("@","%")
180 return value.split(",")
182 def format_result(self, format, result, query, dataSourceUrl):
183 if (format == "json_dicts"):
184 result = {"query": query, "rows": result, "dataSourceUrl": dataSourceUrl}
185 return ("application/javascript", json.dumps(result))
187 elif (format == "json_arrays"):
191 for key in sorted(row.keys()):
192 new_row.append(row[key])
193 new_result.append(new_row)
194 new_result = {"query": query, "rows": new_result}
195 return ("application/javascript", json.dumps(new_result))
197 elif (format == "html_table"):
201 for key in sorted(row.keys()):
202 new_row.append("<TD>%s</TD>" % str(row[key]))
203 new_rows.append("<TR>%s</TR>" % "".join(new_row))
205 new_result = "<TABLE>%s</TABLE>" % "\n".join(new_rows)
207 return ("text/html", new_result)
209 def merge_datamodel_sites(self, rows, slice=None):
210 """ For a query that included "site" in its groupby, merge in the
211 opencloud site information.
216 slice = Slice.objects.get(name=slice)
221 sitename = row["site"]
223 model_site = Site.objects.get(name=sitename)
225 # we didn't find it in the data model
228 allocated_slivers = 0
229 if model_site and slice:
230 for sliver in slice.slivers.all():
231 if sliver.node.site == model_site:
232 allocated_slivers = allocated_slivers + 1
234 row["lat"] = float(model_site.location.latitude)
235 row["long"] = float(model_site.location.longitude)
236 row["url"] = model_site.site_url
237 row["numNodes"] = model_site.nodes.count()
238 row["allocated_slivers"] = allocated_slivers
240 max_cpu = row.get("max_avg_cpu", row.get("max_cpu",0))
241 cpu=float(max_cpu)/100.0
242 row["hotness"] = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD))
244 def compose_latest_query(self, fieldNames=None, groupByFields=["%hostname", "event"]):
245 """ Compose a query that returns the 'most recent' row for each (hostname, event)
250 fieldNames = ["%hostname", "%bytes_sent", "time", "event", "%site", "%elapsed", "%slice", "%cpu"]
252 fields = ["table1.%s AS %s" % (x,x) for x in fieldNames]
253 fields = ", ".join(fields)
255 tableDesc = "%s.%s" % (self.projectName, self.tableName)
257 groupByOn = ["table1.time = latest.maxtime"]
258 for field in groupByFields:
259 groupByOn.append("table1.%s = latest.%s" % (field, field))
261 groupByOn = " AND ".join(groupByOn)
262 groupByFields = ", ".join(groupByFields)
264 base_query = "SELECT %s FROM [%s@-3600000--1] AS table1 JOIN (SELECT %s, max(time) as maxtime from [%s@-3600000--1] GROUP BY %s) AS latest ON %s" % \
265 (fields, tableDesc, groupByFields, tableDesc, groupByFields, groupByOn)
269 def get_cached_query_results(self, q, wait=True):
270 global glo_cached_queries
272 if q in glo_cached_queries:
273 if (time.time() - glo_cached_queries[q]["time"]) <= 60:
274 print "using cached query"
275 return glo_cached_queries[q]["rows"]
280 print "refreshing cached query"
281 result = self.run_query(q)
282 glo_cached_queries[q] = {"time": time.time(), "rows": result}
286 def process_request(self, req):
289 tqx = req.GET.get("tqx", None)
291 slice = req.GET.get("slice", None)
292 site = req.GET.get("site", None)
293 node = req.GET.get("node", None)
294 service = req.GET.get("service", None)
295 event = req.GET.get("event", "libvirt_heartbeat")
297 format = req.GET.get("format", "json_dicts")
299 timeBucket = int(req.GET.get("timeBucket", 60))
300 avg = self.get_list_from_req(req, "avg")
301 sum = self.get_list_from_req(req, "sum")
302 count = self.get_list_from_req(req, "count")
303 computed = self.get_list_from_req(req, "computed")
304 groupBy = self.get_list_from_req(req, "groupBy", ["Time"])
305 orderBy = self.get_list_from_req(req, "orderBy", ["Time"])
307 maxRows = req.GET.get("maxRows", None)
308 mergeDataModelSites = req.GET.get("mergeDataModelSites", None)
310 maxAge = int(req.GET.get("maxAge", 60*60))
312 cached = req.GET.get("cached", None)
314 q = self.compose_query(slice, site, node, service, event, timeBucket, avg, sum, count, computed, [], groupBy, orderBy, maxAge=maxAge)
318 dataSourceUrl = "http://" + req.META["SERVER_NAME"] + ":" + req.META["SERVER_PORT"] + req.META["PATH_INFO"] + "?" + req.META["QUERY_STRING"].replace("format=","origFormat=").replace("%","%25") + "&format=charts";
320 if (format=="dataSourceUrl"):
321 result = {"dataSourceUrl": dataSourceUrl}
322 return ("application/javascript", result)
324 elif (format=="raw"):
325 result = self.run_query_raw(q)
326 result["dataSourceUrl"] = dataSourceUrl
328 result = json.dumps(result);
330 return ("application/javascript", result)
332 elif (format=="nodata"):
333 result = {"dataSourceUrl": dataSourceUrl, "query": q}
334 result = json.dumps(result);
335 return {"application/javascript", result}
337 elif (format=="charts"):
338 bq_result = self.run_query_raw(q)
340 # cloudscrutiny code is probably better!
342 table["cols"] = self.schema_to_cols(bq_result["schema"])
344 if "rows" in bq_result:
345 for row in bq_result["rows"]:
347 for (colnum,col) in enumerate(row["f"]):
349 dt = datetime.datetime.fromtimestamp(float(col["v"]))
350 rowcols.append({"v": 'new Date("%s")' % dt.isoformat()})
353 rowcols.append({"v": float(col["v"])})
355 rowcols.append({"v": col["v"]})
356 rows.append({"c": rowcols})
360 reqId = tqx.strip("reqId:")
364 result = {"status": "okColumnChart", "reqId": reqId, "table": table, "version": "0.6"}
366 result = "google.visualization.Query.setResponse(" + json.dumps(result) + ");"
368 def unquote_it(x): return x.group()[1:-1].replace('\\"', '"')
370 p = re.compile(r'"new Date\(\\"[^"]*\\"\)"')
371 result=p.sub(unquote_it, result)
373 return ("application/javascript", result)
377 results = self.get_cached_query_results(self.compose_latest_query())
381 filter["slice"] = slice
383 filter["site"] = site
385 filter["hostname"] = node
387 filter["event"] = event
389 result = self.postprocess_results(results, filter=filter, sum=sum, count=count, avg=avg, computed=computed, maxDeltaTime=120, groupBy=["doesnotexist"])
391 result = self.run_query(q)
394 result = result[-int(maxRows):]
396 if mergeDataModelSites:
397 self.merge_datamodel_sites(result)
399 return self.format_result(format, result, q, dataSourceUrl)
401 def DoPlanetStackAnalytics(request):
402 bq = PlanetStackAnalytics()
403 result = bq.process_request(request)
408 bq = PlanetStackAnalytics(tableName="demoevents")
410 q = bq.compose_latest_query(groupByFields=["%hostname", "event", "%slice"])
411 results = bq.run_query(q)
413 #results = bq.postprocess_results(results,
414 # filter={"slice": "HyperCache"},
416 # computed=["bytes_sent/elapsed"],
417 # sum=["bytes_sent", "computed_bytes_sent_div_elapsed"], avg=["cpu"],
420 results = bq.postprocess_results(results, filter={"slice": "HyperCache"}, maxi=["cpu"], count=["hostname"], computed=["bytes_sent/elapsed"], groupBy=["Time", "site"], maxDeltaTime=80)
422 bq.dump_table(results)
426 q=bq.compose_query(sum=["%bytes_sent"], avg=["%cpu"], latest=True, groupBy=["Time", "%site"])
428 bq.dump_table(bq.run_query(q))
430 q=bq.compose_query(avg=["%cpu","%bandwidth"], count=["%hostname"], slice="HyperCache")
432 bq.dump_table(bq.run_query(q))
434 q=bq.compose_query(computed=["%bytes_sent/%elapsed"])
437 bq.dump_table(bq.run_query(q))
439 q=bq.compose_query(timeBucket=60*60, avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"])
442 bq.dump_table(bq.run_query(q))
444 if __name__ == "__main__":