2afacb3ac188a9799c12f5538cdbd0c76a614917
[plstackapi.git] / planetstack / hpc_wizard / planetstack_analytics.py
1 from bigquery_analytics import BigQueryAnalytics
2 import datetime
3 import re
4 import os
5 import sys
6 import time
7 import json
8 import traceback
9 import urllib2
10
11 if os.path.exists("/home/smbaker/projects/vicci/plstackapi/planetstack"):
12     sys.path.append("/home/smbaker/projects/vicci/plstackapi/planetstack")
13 else:
14     sys.path.append("/opt/planetstack")
15
16 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
17 from django import db
18 from django.db import connection
19 from core.models import Slice, Sliver, ServiceClass, Reservation, Tag, Network, User, Node, Image, Deployment, Site, NetworkTemplate, NetworkSlice, Service
20
21 BLUE_LOAD=5000000
22 RED_LOAD=15000000
23
24 glo_cached_queries = {}
25
26 class PlanetStackAnalytics(BigQueryAnalytics):
27     def __init__(self, tableName="demoevents"):
28         BigQueryAnalytics.__init__(self, tableName)
29
30     def service_to_sliceNames(self, serviceName):
31         service=Service.objects.get(name=serviceName)
32         try:
33             slices = service.slices.all()
34         except:
35             # BUG in data model -- Slice.service has related name 'service' and
36             #                      it should be 'slices'
37             slices = service.service.all()
38
39         return [slice.name for slice in slices]
40
41     def compose_query(self, slice=None, site=None, node=None, service=None, timeBucket="60", avg=[], sum=[], count=[], computed=[], val=[], groupBy=["Time"], orderBy=["Time"], tableName="demoevents", latest=False, max_age=60*60):
42         max_age = max_age * 1000
43         tablePart = "[%s.%s@-%d--1]" % ("vicci", tableName, max_age)
44
45         fields = []
46         fieldNames = []
47         srcFieldNames = ["time"]
48
49         fields.append("SEC_TO_TIMESTAMP(INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s) as Time" % (str(timeBucket),str(timeBucket)))
50         #fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s as Time" % (str(timeBucket),str(timeBucket)))
51
52         for fieldName in avg:
53             fields.append("AVG(%s) as avg_%s" % (fieldName, fieldName.replace("%","")))
54             fieldNames.append("avg_%s" % fieldName.replace("%",""))
55             srcFieldNames.append(fieldName)
56
57         for fieldName in sum:
58             fields.append("SUM(%s) as sum_%s" % (fieldName, fieldName.replace("%","")))
59             fieldNames.append("sum_%s" % fieldName.replace("%",""))
60             srcFieldNames.append(fieldName)
61
62         for fieldName in count:
63             fields.append("COUNT(distinct %s) as count_%s" % (fieldName, fieldName.replace("%","")))
64             fieldNames.append("count_%s" % fieldName.replace("%",""))
65             srcFieldNames.append(fieldName)
66
67         for fieldName in val:
68             fields.append(fieldName)
69             fieldNames.append(fieldName)
70             srcFieldNames.append(fieldName)
71
72         for fieldName in computed:
73             operator = "/"
74             parts = fieldName.split("/")
75             computedFieldName = "computed_" + parts[0].replace("%","")+"_div_"+parts[1].replace("%","")
76             if len(parts)==1:
77                 operator = "*"
78                 parts = computed.split("*")
79                 computedFieldName = "computed_" + parts[0].replace("%","")+"_mult_"+parts[1].replace("%","")
80             fields.append("SUM(%s)%sSUM(%s) as %s" % (parts[0], operator, parts[1], computedFieldName))
81             fieldNames.append(computedFieldName)
82             srcFieldNames.append(parts[0])
83             srcFieldNames.append(parts[1])
84
85         for fieldName in groupBy:
86             if (fieldName not in ["Time"]):
87                 fields.append(fieldName)
88                 fieldNames.append(fieldName)
89                 srcFieldNames.append(fieldName)
90
91         fields = ", ".join(fields)
92
93         where = []
94
95         if slice:
96             where.append("%%slice='%s'" % slice)
97         if site:
98             where.append("%%site='%s'" % site)
99         if node:
100             where.append("%%hostname='%s'" % node)
101         if service:
102             sliceNames = self.service_to_sliceNames(service)
103             if sliceNames:
104                 where.append("(" + " OR ".join(["%%slice='%s'" % sliceName for sliceName in sliceNames]) +")")
105
106         if where:
107             where = " WHERE " + " AND ".join(where)
108         else:
109             where =""
110
111         if groupBy:
112             groupBySub = " GROUP BY " + ",".join(groupBy + ["%hostname"])
113             groupBy = " GROUP BY " + ",".join(groupBy)
114         else:
115             groupBySub = " GROUP BY %hostname"
116             groupBy = ""
117
118         if orderBy:
119             orderBy = " ORDER BY " + ",".join(orderBy)
120         else:
121             orderBy = ""
122
123         if latest:
124             latestFields = ["table1.%s as %s" % (x,x) for x in srcFieldNames]
125             latestFields = ", ".join(latestFields)
126             tablePart = """(SELECT %s FROM %s AS table1
127                             JOIN
128                                 (SELECT %%hostname, event, max(time) as maxtime from %s GROUP BY %%hostname, event) AS latest
129                             ON
130                                 table1.%%hostname = latest.%%hostname AND table1.event = latest.event AND table1.time = latest.maxtime)""" % (latestFields, tablePart, tablePart)
131
132         if computed:
133             subQuery = "SELECT %%hostname, %s FROM %s" % (fields, tablePart)
134             if where:
135                 subQuery = subQuery + where
136             subQuery = subQuery + groupBySub
137
138             sumFields = []
139             for fieldName in fieldNames:
140                 if fieldName.startswith("avg"):
141                     sumFields.append("AVG(%s) as avg_%s"%(fieldName,fieldName))
142                     sumFields.append("MAX(%s) as max_%s"%(fieldName,fieldName))
143                 elif (fieldName.startswith("count")) or (fieldName.startswith("sum")) or (fieldName.startswith("computed")):
144                     sumFields.append("SUM(%s) as sum_%s"%(fieldName,fieldName))
145                 else:
146                     sumFields.append(fieldName)
147
148             sumFields = ",".join(sumFields)
149
150             query = "SELECT %s, %s FROM (%s)" % ("Time", sumFields, subQuery)
151             if groupBy:
152                 query = query + groupBy
153             if orderBy:
154                 query = query + orderBy
155         else:
156             query = "SELECT %s FROM %s" % (fields, tablePart)
157             if where:
158                 query = query + " " + where
159             if groupBy:
160                 query = query + groupBy
161             if orderBy:
162                 query = query + orderBy
163
164         return query
165
166     def get_list_from_req(self, req, name, default=[]):
167         value = req.GET.get(name, None)
168         if not value:
169             return default
170         value=value.replace("@","%")
171         return value.split(",")
172
173     def format_result(self, format, result, query, dataSourceUrl):
174         if (format == "json_dicts"):
175             result = {"query": query, "rows": result, "dataSourceUrl": dataSourceUrl}
176             return ("application/javascript", json.dumps(result))
177
178         elif (format == "json_arrays"):
179             new_result = []
180             for row in result:
181                 new_row = []
182                 for key in sorted(row.keys()):
183                     new_row.append(row[key])
184                 new_result.append(new_row)
185                 new_result = {"query": query, "rows": new_result}
186             return ("application/javascript", json.dumps(new_result))
187
188         elif (format == "html_table"):
189             new_rows = []
190             for row in result:
191                 new_row = []
192                 for key in sorted(row.keys()):
193                     new_row.append("<TD>%s</TD>" % str(row[key]))
194                 new_rows.append("<TR>%s</TR>" % "".join(new_row))
195
196             new_result = "<TABLE>%s</TABLE>" % "\n".join(new_rows)
197
198             return ("text/html", new_result)
199
200     def merge_datamodel_sites(self, rows, slice=None):
201         """ For a query that included "site" in its groupby, merge in the
202             opencloud site information.
203         """
204
205         if slice:
206             try:
207                 slice = Slice.objects.get(name=slice)
208             except:
209                 slice = None
210
211         for row in rows:
212             sitename = row["site"]
213             try:
214                 model_site = Site.objects.get(name=sitename)
215             except:
216                 # we didn't find it in the data model
217                 continue
218
219             allocated_slivers = 0
220             if model_site and slice:
221                 for sliver in slice.slivers.all():
222                     if sliver.node.site == model_site:
223                         allocated_slivers = allocated_slivers + 1
224
225             row["lat"] = float(model_site.location.latitude)
226             row["long"] = float(model_site.location.longitude)
227             row["url"] = model_site.site_url
228             row["numNodes"] = model_site.nodes.count()
229             row["allocated_slivers"] = allocated_slivers
230
231             max_cpu = row.get("max_avg_cpu", row.get("max_cpu",0))
232             cpu=float(max_cpu)/100.0
233             row["hotness"] = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD))
234
235     def compose_latest_query(self, fieldNames=None, groupByFields=["%hostname", "event"]):
236         """ Compose a query that returns the 'most recent' row for each (hostname, event)
237             pair.
238         """
239
240         if not fieldNames:
241             fieldNames = ["%hostname", "%bytes_sent", "time", "event", "%site", "%elapsed", "%slice", "%cpu"]
242
243         fields = ["table1.%s AS %s" % (x,x) for x in fieldNames]
244         fields = ", ".join(fields)
245
246         tableDesc = "%s.%s" % (self.projectName, self.tableName)
247
248         groupByOn = ["table1.time = latest.maxtime"]
249         for field in groupByFields:
250             groupByOn.append("table1.%s = latest.%s" % (field, field))
251
252         groupByOn = " AND ".join(groupByOn)
253         groupByFields = ", ".join(groupByFields)
254
255         base_query = "SELECT %s FROM [%s@-3600000--1] AS table1 JOIN (SELECT %s, max(time) as maxtime from [%s@-3600000--1] GROUP BY %s) AS latest ON %s" % \
256                       (fields, tableDesc, groupByFields, tableDesc, groupByFields, groupByOn)
257
258         return base_query
259
260     def get_cached_query_results(self, q, wait=True):
261         global glo_cached_queries
262
263         if q in glo_cached_queries:
264             if (time.time() - glo_cached_queries[q]["time"]) <= 60:
265                 print "using cached query"
266                 return glo_cached_queries[q]["rows"]
267
268         if not wait:
269             return None
270
271         print "refreshing cached query"
272         result = self.run_query(q)
273         glo_cached_queries[q] = {"time": time.time(), "rows": result}
274
275         return result
276
277     def process_request(self, req):
278         print req.GET
279
280         tqx = req.GET.get("tqx", None)
281
282         slice = req.GET.get("slice", None)
283         site = req.GET.get("site", None)
284         node = req.GET.get("node", None)
285         service = req.GET.get("service", None)
286
287         format = req.GET.get("format", "json_dicts")
288
289         timeField = req.GET.get("timeBucket", "60")
290         avg = self.get_list_from_req(req, "avg")
291         sum = self.get_list_from_req(req, "sum")
292         count = self.get_list_from_req(req, "count")
293         computed = self.get_list_from_req(req, "computed")
294         groupBy = self.get_list_from_req(req, "groupBy", ["Time"])
295         orderBy = self.get_list_from_req(req, "orderBy", ["Time"])
296
297         maxRows = req.GET.get("maxRows", None)
298         mergeDataModelSites = req.GET.get("mergeDataModelSites", None)
299
300         cached = req.GET.get("cached", None)
301
302         q = self.compose_query(slice, site, node, service, timeField, avg, sum, count, computed, [], groupBy, orderBy)
303
304         print q
305
306         dataSourceUrl = "http://" + req.META["SERVER_NAME"] + ":" + req.META["SERVER_PORT"] + req.META["PATH_INFO"] + "?" + req.META["QUERY_STRING"].replace("format=","origFormat=").replace("%","%25") + "&format=charts";
307
308         if (format=="dataSourceUrl"):
309             result = {"dataSourceUrl": dataSourceUrl}
310             return ("application/javascript", result)
311
312         elif (format=="raw"):
313             result = self.run_query_raw(q)
314             result["dataSourceUrl"] = dataSourceUrl
315
316             result = json.dumps(result);
317
318             return ("application/javascript", result)
319
320         elif (format=="nodata"):
321             result = {"dataSourceUrl": dataSourceUrl, "query": q}
322             result = json.dumps(result);
323             return {"application/javascript", result}
324
325         elif (format=="charts"):
326             bq_result = self.run_query_raw(q)
327
328             # cloudscrutiny code is probably better!
329             table = {}
330             table["cols"] = self.schema_to_cols(bq_result["schema"])
331             rows = []
332             if "rows" in bq_result:
333                 for row in bq_result["rows"]:
334                     rowcols = []
335                     for (colnum,col) in enumerate(row["f"]):
336                         if (colnum==0):
337                             dt = datetime.datetime.fromtimestamp(float(col["v"]))
338                             rowcols.append({"v": 'new Date("%s")' % dt.isoformat()})
339                         else:
340                             try:
341                                 rowcols.append({"v": float(col["v"])})
342                             except:
343                                 rowcols.append({"v": col["v"]})
344                     rows.append({"c": rowcols})
345             table["rows"] = rows
346
347             if tqx:
348                 reqId = tqx.strip("reqId:")
349             else:
350                 reqId = "0"
351
352             result = {"status": "okColumnChart", "reqId": reqId, "table": table, "version": "0.6"}
353
354             result = "google.visualization.Query.setResponse(" + json.dumps(result) + ");"
355
356             def unquote_it(x): return x.group()[1:-1].replace('\\"', '"')
357
358             p = re.compile(r'"new Date\(\\"[^"]*\\"\)"')
359             result=p.sub(unquote_it, result)
360
361             return ("application/javascript", result)
362
363         else:
364             if cached:
365                 results = self.get_cached_query_results(self.compose_latest_query())
366
367                 filter={}
368                 if slice:
369                     filter["slice"] = slice
370                 if site:
371                     filter["site"] = site
372                 if node:
373                     filter["hostname"] = node
374
375                 result = self.postprocess_results(results, filter=filter, sum=sum, count=count, avg=avg, computed=computed, maxDeltaTime=120, groupBy=["doesnotexist"])
376             else:
377                 result = self.run_query(q)
378
379             if maxRows:
380                 result = result[-int(maxRows):]
381
382             if mergeDataModelSites:
383                 self.merge_datamodel_sites(result)
384
385             return self.format_result(format, result, q, dataSourceUrl)
386
387 def DoPlanetStackAnalytics(request):
388     bq = PlanetStackAnalytics()
389     result = bq.process_request(request)
390
391     return result
392
393 def main():
394     bq = PlanetStackAnalytics()
395
396     q = bq.compose_latest_query(groupByFields=["%hostname", "event", "%slice"])
397     results = bq.run_query(q)
398
399     #results = bq.postprocess_results(results,
400     #                                 filter={"slice": "HyperCache"},
401     #                                 groupBy=["site"],
402     #                                 computed=["bytes_sent/elapsed"],
403     #                                 sum=["bytes_sent", "computed_bytes_sent_div_elapsed"], avg=["cpu"],
404     #                                 maxDeltaTime=60)
405
406     results = bq.postprocess_results(results, filter={"slice": "HyperCache"}, maxi=["cpu"], count=["hostname"], computed=["bytes_sent/elapsed"], groupBy=["Time", "site"], maxDeltaTime=80)
407
408     bq.dump_table(results)
409
410     sys.exit(0)
411
412     q=bq.compose_query(sum=["%bytes_sent"], avg=["%cpu"], latest=True, groupBy=["Time", "%site"])
413     print q
414     bq.dump_table(bq.run_query(q))
415
416     q=bq.compose_query(avg=["%cpu","%bandwidth"], count=["%hostname"], slice="HyperCache")
417     print q
418     bq.dump_table(bq.run_query(q))
419
420     q=bq.compose_query(computed=["%bytes_sent/%elapsed"])
421     print
422     print q
423     bq.dump_table(bq.run_query(q))
424
425     q=bq.compose_query(timeBucket=60*60, avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"])
426     print
427     print q
428     bq.dump_table(bq.run_query(q))
429
430 if __name__ == "__main__":
431     main()
432
433
434
435
436