1 from bigquery_analytics import BigQueryAnalytics
5 class PlanetStackAnalytics(BigQueryAnalytics):
6 def __init__(self, tableName="demoevents"):
7 BigQueryAnalytics.__init__(self, tableName)
9 def compose_query(self, slice=None, site=None, node=None, timeField="MinuteTime", avg=[], sum=[], count=[], computed=[], groupBy=["MinuteTime"], orderBy=["MinuteTime"], tableName="demoevents"):
10 tablePart = "%s.%s@-3600000--1" % ("vicci", tableName)
15 if (timeField=="MinuteTime"):
16 fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/60)*60 as MinuteTime")
17 elif (timeField=="HourTime"):
18 fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/60/60)*60*60 as HourTime")
19 elif (timeField=="DayTime"):
20 fields.append("INTEGER(TIMESTAMP_TO_SEC(time)/60/60/24)*60*60*24 as DayTime")
23 fields.append("AVG(%s) as avg_%s" % (fieldName, fieldName.replace("%","")))
24 fieldNames.append("avg_%s" % fieldName.replace("%",""))
27 fields.append("SUM(%s) as sum_%s" % (fieldName, fieldName.replace("%","")))
28 fieldNames.append("sum_%s" % fieldName.replace("%",""))
30 for fieldName in count:
31 fields.append("COUNT(distinct %s) as count_%s" % (fieldName, fieldName.replace("%","")))
32 fieldNames.append("count_%s" % fieldName.replace("%",""))
34 for fieldName in computed:
36 parts = fieldName.split("/")
37 computedFieldName = "computed_" + parts[0].replace("%","")+"_div_"+parts[1].replace("%","")
40 parts = computed.split("*")
41 computedFieldName = "computed_" + parts[0].replace("%","")+"_mult_"+parts[1].replace("%","")
42 fields.append("SUM(%s)%sSUM(%s) as %s" % (parts[0], operator, parts[1], computedFieldName))
43 fieldNames.append(computedFieldName)
45 fields = ", ".join(fields)
50 where.append("%%slice='%s'" % slice)
52 where.append("%%site='%s'" % site)
54 where.append("%%hostname='%s'" % node)
57 where = " WHERE " + " AND ".join(where)
62 groupBy = " GROUP BY " + ",".join(groupBy)
67 orderBy = " ORDER BY " + ",".join(orderBy)
72 subQuery = "SELECT %%hostname, %s FROM [%s]" % (fields, tablePart)
74 subQuery = subQuery + where
75 subQuery = subQuery + " GROUP BY %s,%%hostname" % timeField
78 for fieldName in fieldNames:
79 if fieldName.startswith("avg"):
80 sumFields.append("AVG(%s) as avg_%s"%(fieldName,fieldName))
82 sumFields.append("SUM(%s) as sum_%s"%(fieldName,fieldName))
84 sumFields = ",".join(sumFields)
86 query = "SELECT %s, %s FROM (%s)" % (timeField, sumFields, subQuery)
88 query = query + groupBy
90 query = query + orderBy
92 query = "SELECT %s FROM [%s]" % (fields, tablePart)
94 query = query + " " + where
96 query = query + groupBy
98 query = query + orderBy
102 def get_list_from_req(self, req, name, default=[]):
103 value = req.GET.get(name, None)
106 return value.split(",")
108 def format_result(self, format, result, query):
109 if (format == "json_dicts"):
110 result = {"query": query, "rows": result}
111 return ("application/javascript", json.dumps(result))
113 elif (format == "json_arrays"):
117 for key in sorted(row.keys()):
118 new_row.append(row[key])
119 new_result.append(new_row)
120 new_result = {"query": query, "rows": new_result}
121 return ("application/javascript", json.dumps(new_result))
123 elif (format == "html_table"):
127 for key in sorted(row.keys()):
128 new_row.append("<TD>%s</TD>" % str(row[key]))
129 new_rows.append("<TR>%s</TR>" % "".join(new_row))
131 new_result = "<TABLE>%s</TABLE>" % "\n".join(new_rows)
133 return ("text/html", new_result)
135 def process_request(self, req):
138 tqx = req.GET.get("reqId", None)
140 slice = req.GET.get("slice", None)
141 site = req.GET.get("site", None)
142 node = req.GET.get("node", None)
144 format = req.GET.get("format", "json_dicts")
146 timeField = req.GET.get("timeField", "MinuteTime")
147 avg = self.get_list_from_req(req, "avg")
148 sum = self.get_list_from_req(req, "sum")
149 count = self.get_list_from_req(req, "count")
150 computed = self.get_list_from_req(req, "computed")
151 groupBy = self.get_list_from_req(req, "groupBy", ["MinuteTime"])
152 orderBy = self.get_list_from_req(req, "orderBy", ["MinuteTime"])
154 maxRows = req.GET.get("maxRows", None)
156 q = self.compose_query(slice, site, node, timeField, avg, sum, count, computed, groupBy, orderBy)
161 result = self.run_query_raw(q)
162 result["reqId"] = 0 # XXX FIXME
163 return ("application/javascript", json.dumps(result))
165 result = self.run_query(q)
168 result = result[-int(maxRows):]
170 return self.format_result(format, result, q)
173 def DoPlanetStackAnalytics(request):
174 bq = PlanetStackAnalytics()
175 result = bq.process_request(request)
180 bq = PlanetStackAnalytics()
182 q=bq.compose_query(avg=["%cpu"], count=["%hostname"], slice="HyperCache")
184 bq.dump_table(bq.run_query(q))
186 q=bq.compose_query(computed=["%bytes_sent/%elapsed"])
189 bq.dump_table(bq.run_query(q))
190 #print bq.run_query_raw(q)
192 q=bq.compose_query(timeField="HourTime", avg=["%cpu"], count=["%hostname"], computed=["%bytes_sent/%elapsed"], groupBy=["HourTime"], orderBy=["HourTime"])
195 bq.dump_table(bq.run_query(q))
197 if __name__ == "__main__":