1 from bigquery_analytics import BigQueryAnalytics
10 if os.path.exists("/home/smbaker/projects/vicci/plstackapi/planetstack"):
11 sys.path.append("/home/smbaker/projects/vicci/plstackapi/planetstack")
13 sys.path.append("/opt/planetstack")
15 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
17 from django.db import connection
18 from core.models import Slice, Sliver, ServiceClass, Reservation, Tag, Network, User, Node, Image, Deployment, Site, NetworkTemplate, NetworkSlice, Service
23 class PlanetStackAnalytics(BigQueryAnalytics):
24 def __init__(self, tableName="demoevents"):
25 BigQueryAnalytics.__init__(self, tableName)
27 def service_to_sliceNames(self, serviceName):
28 service=Service.objects.get(name=serviceName)
30 slices = service.slices.all()
32 # BUG in data model -- Slice.service has related name 'service' and
33 # it should be 'slices'
34 slices = service.service.all()
36 return [slice.name for slice in slices]
38 def compose_query(self, slice=None, site=None, node=None, service=None, timeBucket="60", avg=[], sum=[], count=[], computed=[], groupBy=["Time"], orderBy=["Time"], tableName="demoevents"):
39 tablePart = "%s.%s@-3600000--1" % ("vicci", tableName)
44 fields.append("SEC_TO_TIMESTAMP(INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s) as Time" % (str(timeBucket),str(timeBucket)))
45 #fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/%s)*%s as Time" % (str(timeBucket),str(timeBucket)))
48 fields.append("AVG(%s) as avg_%s" % (fieldName, fieldName.replace("%","")))
49 fieldNames.append("avg_%s" % fieldName.replace("%",""))
52 fields.append("SUM(%s) as sum_%s" % (fieldName, fieldName.replace("%","")))
53 fieldNames.append("sum_%s" % fieldName.replace("%",""))
55 for fieldName in count:
56 fields.append("COUNT(distinct %s) as count_%s" % (fieldName, fieldName.replace("%","")))
57 fieldNames.append("count_%s" % fieldName.replace("%",""))
59 for fieldName in computed:
61 parts = fieldName.split("/")
62 computedFieldName = "computed_" + parts[0].replace("%","")+"_div_"+parts[1].replace("%","")
65 parts = computed.split("*")
66 computedFieldName = "computed_" + parts[0].replace("%","")+"_mult_"+parts[1].replace("%","")
67 fields.append("SUM(%s)%sSUM(%s) as %s" % (parts[0], operator, parts[1], computedFieldName))
68 fieldNames.append(computedFieldName)
70 for fieldName in groupBy:
71 if (fieldName not in ["Time"]):
72 fields.append(fieldName)
73 fieldNames.append(fieldName)
75 fields = ", ".join(fields)
80 where.append("%%slice='%s'" % slice)
82 where.append("%%site='%s'" % site)
84 where.append("%%hostname='%s'" % node)
86 sliceNames = self.service_to_sliceNames(service)
88 where.append("(" + " OR ".join(["%%slice='%s'" % sliceName for sliceName in sliceNames]) +")")
91 where = " WHERE " + " AND ".join(where)
96 groupBySub = " GROUP BY " + ",".join(groupBy + ["%hostname"])
97 groupBy = " GROUP BY " + ",".join(groupBy)
99 groupBySub = " GROUP BY %hostname"
103 orderBy = " ORDER BY " + ",".join(orderBy)
108 subQuery = "SELECT %%hostname, %s FROM [%s]" % (fields, tablePart)
110 subQuery = subQuery + where
111 subQuery = subQuery + groupBySub
112 #subQuery = subQuery + " GROUP BY %s,%%hostname" % timeField
115 for fieldName in fieldNames:
116 if fieldName.startswith("avg"):
117 sumFields.append("AVG(%s) as avg_%s"%(fieldName,fieldName))
118 sumFields.append("MAX(%s) as max_%s"%(fieldName,fieldName))
119 elif (fieldName.startswith("count")) or (fieldName.startswith("sum")) or (fieldName.startswith("computed")):
120 sumFields.append("SUM(%s) as sum_%s"%(fieldName,fieldName))
122 sumFields.append(fieldName)
124 sumFields = ",".join(sumFields)
126 query = "SELECT %s, %s FROM (%s)" % ("Time", sumFields, subQuery)
128 query = query + groupBy
130 query = query + orderBy
132 query = "SELECT %s FROM [%s]" % (fields, tablePart)
134 query = query + " " + where
136 query = query + groupBy
138 query = query + orderBy
142 def get_list_from_req(self, req, name, default=[]):
143 value = req.GET.get(name, None)
146 value=value.replace("@","%")
147 return value.split(",")
149 def format_result(self, format, result, query, dataSourceUrl):
150 if (format == "json_dicts"):
151 result = {"query": query, "rows": result, "dataSourceUrl": dataSourceUrl}
152 return ("application/javascript", json.dumps(result))
154 elif (format == "json_arrays"):
158 for key in sorted(row.keys()):
159 new_row.append(row[key])
160 new_result.append(new_row)
161 new_result = {"query": query, "rows": new_result}
162 return ("application/javascript", json.dumps(new_result))
164 elif (format == "html_table"):
168 for key in sorted(row.keys()):
169 new_row.append("<TD>%s</TD>" % str(row[key]))
170 new_rows.append("<TR>%s</TR>" % "".join(new_row))
172 new_result = "<TABLE>%s</TABLE>" % "\n".join(new_rows)
174 return ("text/html", new_result)
176 elif (format == "json_hpcdash"):
179 new_row = {"lat": float(row.get("lat", 0)),
180 "long": float(row.get("long", 0)),
182 "numNodes": int(row.get("numNodes",0)),
183 "numHPCSlivers": int(row.get("sum_count_hostname", 0)),
184 "siteUrl": row.get("url", ""),
185 "hot": float(row.get("hotness", 0.0)),
186 "load": int(float(row.get("max_avg_cpu", 0)))}
187 new_rows[row["site"]] = new_row
188 return ("application/javascript", json.dumps(new_rows))
190 def only_largest(self, rows, fieldName):
191 """ Given a fieldName, only return the set of rows that had the
192 maximum value of that fieldName.
194 maxVal = max( [int(row[fieldName]) for row in rows] )
195 new_rows = [row for row in rows if int(row[fieldName])==maxVal]
198 def merge_datamodel_sites(self, rows):
199 """ For a query that included "site" in its groupby, merge in the
200 opencloud site information.
203 sitename = row["site"]
205 model_site = Site.objects.get(name=sitename)
207 # we didn't find it in the data model
210 row["lat"] = float(model_site.location.latitude)
211 row["long"] = float(model_site.location.longitude)
212 row["url"] = model_site.site_url
213 row["numNodes"] = model_site.nodes.count()
215 if "max_avg_cpu" in row:
216 cpu=float(row["max_avg_cpu"])/100.0
217 row["hotness"] = max(0.0, ((cpu*RED_LOAD) - BLUE_LOAD)/(RED_LOAD-BLUE_LOAD))
219 def process_request(self, req):
222 tqx = req.GET.get("tqx", None)
224 slice = req.GET.get("slice", None)
225 site = req.GET.get("site", None)
226 node = req.GET.get("node", None)
227 service = req.GET.get("service", None)
229 format = req.GET.get("format", "json_dicts")
231 timeField = req.GET.get("timeBucket", "60")
232 avg = self.get_list_from_req(req, "avg")
233 sum = self.get_list_from_req(req, "sum")
234 count = self.get_list_from_req(req, "count")
235 computed = self.get_list_from_req(req, "computed")
236 groupBy = self.get_list_from_req(req, "groupBy", ["Time"])
237 orderBy = self.get_list_from_req(req, "orderBy", ["Time"])
239 maxRows = req.GET.get("maxRows", None)
240 onlyLargest = req.GET.get("onlyLargest", None)
241 mergeDataModelSites = req.GET.get("mergeDataModelSites", None)
243 q = self.compose_query(slice, site, node, service, timeField, avg, sum, count, computed, groupBy, orderBy)
246 dataSourceUrl = "http://" + req.META["SERVER_NAME"] + ":" + req.META["SERVER_PORT"] + req.META["PATH_INFO"] + "?" + req.META["QUERY_STRING"].replace("format=","origFormat=").replace("%","%25") + "&format=charts";
248 if (format=="dataSourceUrl"):
249 result = {"dataSourceUrl": dataSourceUrl}
250 return ("application/javascript", result)
252 elif (format=="raw"):
253 result = self.run_query_raw(q)
254 result["dataSourceUrl"] = dataSourceUrl
256 result = json.dumps(result);
258 return ("application/javascript", result)
260 elif (format=="charts"):
261 bq_result = self.run_query_raw(q)
263 # cloudscrutiny code is probably better!
265 table["cols"] = self.schema_to_cols(bq_result["schema"])
267 for row in bq_result["rows"]:
269 for (colnum,col) in enumerate(row["f"]):
271 dt = datetime.datetime.fromtimestamp(float(col["v"]))
272 rowcols.append({"v": 'new Date("%s")' % dt.isoformat()})
275 rowcols.append({"v": float(col["v"])})
277 rowcols.append({"v": col["v"]})
278 rows.append({"c": rowcols})
282 reqId = tqx.strip("reqId:")
286 result = {"status": "okColumnChart", "reqId": reqId, "table": table, "version": "0.6"}
288 result = "google.visualization.Query.setResponse(" + json.dumps(result) + ");"
290 def unquote_it(x): return x.group()[1:-1].replace('\\"', '"')
292 p = re.compile(r'"new Date\(\\"[^"]*\\"\)"')
293 result=p.sub(unquote_it, result)
295 return ("application/javascript", result)
298 result = self.run_query(q)
301 result = self.only_largest(result, onlyLargest)
303 if mergeDataModelSites:
304 self.merge_datamodel_sites(result)
307 result = result[-int(maxRows):]
309 return self.format_result(format, result, q, dataSourceUrl)
312 def DoPlanetStackAnalytics(request):
313 bq = PlanetStackAnalytics()
314 result = bq.process_request(request)
319 bq = PlanetStackAnalytics()
321 q=bq.compose_query(avg=["%cpu","%bandwidth"], count=["%hostname"], slice="HyperCache")
323 bq.dump_table(bq.run_query(q))
325 q=bq.compose_query(computed=["%bytes_sent/%elapsed"])
328 bq.dump_table(bq.run_query(q))
330 q=bq.compose_query(timeBucket=60*60, avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"])
333 bq.dump_table(bq.run_query(q))
335 q=bq.compose_query(avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"], service="HPC Service", groupBy=["Time","%site"])
338 result=bq.run_query(q)
339 result = bq.only_largest(result, "Time")
340 bq.merge_datamodel_sites(result)
341 #bq.dump_table(result)
342 print bq.format_result("json_hpcdash", result, q)
344 if __name__ == "__main__":