- "numNodes": int(row.get("numNodes",0)),
- "activeHPCSlivers": int(row.get("count_hostname", 0)),
- "numHPCSlivers": int(row.get("allocated_slivers", 0)),
- "siteUrl": str(row.get("url", "")),
- "hot": float(row.get("hotness", 0.0)),
- "bandwidth": row.get("sum_computed_bytes_sent_div_elapsed",0),
- "load": int(float(row.get("max_avg_cpu", row.get("max_cpu",0))))}
- new_rows[str(row["site"])] = new_row
+ "numNodes": int(site.nodes.count()),
+ "activeHPCSlivers": int(stats_row.get("count_hostname", 0)), # measured number of slivers, from bigquery statistics
+ "numHPCSlivers": allocated_slivers, # allocated number of slivers, from data model
+ "siteUrl": str(site.site_url),
+ "bandwidth": stats_row.get("sum_computed_bytes_sent_div_elapsed",0),
+ "load": max_cpu,
+ "hot": float(hotness)}
+ new_rows[str(site.name)] = new_row
+
+ # get rid of sites with 0 slivers that overlap other sites with >0 slivers
+ for (k,v) in new_rows.items():
+ bad=False
+ if v["numHPCSlivers"]==0:
+ for v2 in new_rows.values():
+ if (v!=v2) and (v2["numHPCSlivers"]>=0):
+ d = haversine(v["lat"],v["long"],v2["lat"],v2["long"])
+ if d<100:
+ bad=True
+ if bad:
+ del new_rows[k]