From: Kevin Webb Date: Fri, 29 Jan 2010 15:18:15 +0000 (+0000) Subject: Added my ugly FPS graphing script. X-Git-Tag: DistributedRateLimiting-0.1-0~5 X-Git-Url: http://git.onelab.eu/?p=distributedratelimiting.git;a=commitdiff_plain;h=8c47edadda66e99456909f5be103f6e8631f9087 Added my ugly FPS graphing script. --- diff --git a/DRL-graphResults-FPS.py b/DRL-graphResults-FPS.py new file mode 100755 index 0000000..befbe84 --- /dev/null +++ b/DRL-graphResults-FPS.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python + +import sys +from pylab import figure, plot, show, subplots_adjust, title + +def get_closet_index(time, timelist, current_index): + #print current_index + best_difference = float('inf') + best_index = current_index + for i in xrange(best_index, len(timelist)): + difference = abs(time - timelist[i]) + if difference < best_difference: + best_difference = difference + best_index = i + if difference > best_difference: + return best_index + + return best_index + +def sum_best_time(current_indicies, time, timelists, datalists, type): + #For the given time, search each of the time lists and get the best index. + #By best, we mean with time closet to the specified time. + #Use that index in the corresponding datalist and add the value to the sum. + + sum = type(0) + + for i in xrange(len(timelists)): + current_indicies[i] = get_closet_index(time, timelists[i], current_indicies[i]) + sum = sum + datalists[i][current_indicies[i]] + + return sum + +def get_sum_with_times(timelists, datalists, type): + current_indicies = [0 for l in datalists] + target_length = min([len(l) for l in datalists]) + result_timelist = [] + result_datalist = [] + for t in xrange(len(timelists)): + if len(timelists[t]) == target_length: # + result_timelist = timelists[t] + + for i in xrange(len(result_timelist)): + result_datalist.append(sum_best_time(current_indicies, result_timelist[i], timelists, datalists, type)) + + return (result_timelist, result_datalist) + +def get_smallest_list(lists): + minlength = min([len(l) for l in lists]) + for l in lists: + if len(l) == minlength: + return l + +def add_plot(timelists, datalists, index, count, title, figure, type=float, add_sum=False, ymax=None, hline=None): + subplot = figure.add_subplot(count, 1, index) + subplot.set_title(title) + + for i in xrange(len(datalists)): + subplot.plot(timelists[i], datalists[i]) + + if add_sum and len(datalists) > 1: + #subplot.plot(get_smallest_list(timelists), get_sum(datalists, type)) + sum_tuple = get_sum_with_times(timelists, datalists, type) + subplot.plot(sum_tuple[0], sum_tuple[1]) + + if hline != None: + subplot.axhline(y = hline) + subplot.set_ylim(ymax=ymax) + + return subplot + +# File Objects +files = [] + +# Data points +times = [] +time_begin = float('inf') +local_rates = [] +ideal_weights = [] +local_weights = [] +total_weights = [] +flows = [] +flows_5k = [] +flows_10k = [] +flows_20k = [] +flows_50k = [] +flows_avg = [] +max_flow_rates = [] +max_flow_hashs = [] +local_limits = [] +total_over_max_weights = [] + +# On/off points +on_times = [] +off_times = [] + +# Open all the files. +for i in xrange(1, len(sys.argv)): + files.append(open(sys.argv[i], "r")) + +for file in files: + time = [] + local_rate = [] + ideal_weight = [] + local_weight = [] + total_weight = [] + flow = [] + flow_5k = [] + flow_10k = [] + flow_20k = [] + flow_50k = [] + flow_avg = [] + max_flow_rate = [] + max_flow_hash = [] + local_limit = [] + total_over_max_weight = [] + + last_time = 0.0 + + for line in file: + if line == "--Switching enforcement on.--\n": + on_times.append(last_time) + if line == "--Switching enforcement off.--\n": + off_times.append(last_time) + splitline = line.split(" ") + if len(splitline) == 12: + # It's a data line. + time.append(float(splitline[0])) + local_rate.append(int(splitline[1])) + ideal_weight.append(float(splitline[2])) + local_weight.append(float(splitline[3])) + total_weight.append(float(splitline[4])) + flow.append(int(splitline[5])) + flow_5k.append(int(splitline[6])) + flow_10k.append(int(splitline[7])) + flow_20k.append(int(splitline[8])) + flow_50k.append(int(splitline[9])) + flow_avg.append(int(splitline[10])) + local_limit.append(int(splitline[11])) + last_time = float(splitline[0]) + if len(splitline) == 14 or len(splitline) == 15: + # It's a data line. + time.append(float(splitline[0])) + local_rate.append(int(splitline[1])) + ideal_weight.append(float(splitline[2])) + local_weight.append(float(splitline[3])) + total_weight.append(float(splitline[4])) + flow.append(int(splitline[5])) + flow_5k.append(int(splitline[6])) + flow_10k.append(int(splitline[7])) + flow_20k.append(int(splitline[8])) + flow_50k.append(int(splitline[9])) + flow_avg.append(int(splitline[10])) + max_flow_rate.append(int(splitline[11])) + max_flow_hash.append(int(splitline[12])) + local_limit.append(int(splitline[13])) + last_time = float(splitline[0]) + if len(splitline) == 16: + try: + # It's a data line. + time.append(float(splitline[0])) + local_rate.append(int(splitline[1])) + ideal_weight.append(float(splitline[2])) + local_weight.append(float(splitline[3])) + total_weight.append(float(splitline[4])) + flow.append(int(splitline[5])) + flow_5k.append(int(splitline[6])) + flow_10k.append(int(splitline[7])) + flow_20k.append(int(splitline[8])) + flow_50k.append(int(splitline[9])) + flow_avg.append(int(splitline[10])) + max_flow_rate.append(int(splitline[11])) + max_flow_hash.append(int(splitline[12])) + local_limit.append(int(splitline[13])) + total_over_max_weight.append(float(splitline[15])) + last_time = float(splitline[0]) + except ValueError: + print "Warning: Caught ValueError on line: " + line + + file.close() + + times.append(time) + local_rates.append(local_rate) + ideal_weights.append(ideal_weight) + local_weights.append(local_weight) + total_weights.append(total_weight) + flows.append(flow) + flows_5k.append(flow_5k) + flows_10k.append(flow_10k) + flows_20k.append(flow_20k) + flows_50k.append(flow_50k) + flows_avg.append(flow_avg) + max_flow_rates.append(max_flow_rate) + max_flow_hashs.append(max_flow_hash) + local_limits.append(local_limit) + total_over_max_weights.append(total_over_max_weight) + +for t in xrange(len(times)): + mintime = min(times[t]) + if mintime < time_begin: + time_begin = mintime + +for t in xrange(len(times)): + for i in xrange(len(times[t])): + times[t][i] -= time_begin + +print time_begin + +fig = figure() +subplots = [] +subplots_adjust(left = 0.12, right = 0.94, bottom = 0.05, top = 0.94) + +graph_count = 5 + +subplots.append(add_plot(times, local_rates, 1, graph_count, "Local Rate", fig, int, True)) + +subplots.append(add_plot(times, local_limits, 2, graph_count, "Local Limit", fig, int, True)) + +subplots.append(add_plot(times, local_weights, 3, graph_count, "Weight", fig, float, False)) + +#subplots.append(add_plot(times, ideal_weights, 4, graph_count, "Ideal Weight", fig, float, False)) + +#subplots.append(add_plot(times, total_over_max_weights, 5, graph_count, "Ideal Weight", fig, float, False)) + +subplots.append(add_plot(times, flows, 4, graph_count, "# of flows", fig, int, False)) + +#add_plot(times, flows_5k, 6, graph_count, "# of flows > 5KB/s", fig, int, False) +#add_plot(times, flows_20k, 7, graph_count, "# of flows > 20KB/s", fig, int, False) +#add_plot(times, flows_50k, 6, graph_count, "# of flows > 50KB/s", fig, int, False) + +subplots.append(add_plot(times, flows_avg, 5, graph_count, "Average flow rate", fig, int, False)) + +#subplots.append(add_plot(times, max_flow_rates, 6, graph_count, "Max flow rate", fig, int, False, ymax=160000)) +#subplots.append(add_plot(times, max_flow_hashs, 7, graph_count, "Max flow hash", fig, int, False)) + +xlimits = subplots[0].get_xlim() + +for sub in subplots: + for on in on_times: + if on < time_begin: + on = time_begin + sub.axvline(x = (on - time_begin), color = 'green') + + for off in off_times: + if off < time_begin: + off = time_begin + sub.axvline(x = (off - time_begin), color = 'red') + + sub.set_xlim(xmin = xlimits[0], xmax = xlimits[1]) + +show()