# Copyright (C) 2014 INRIA
#
# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# \ nid3.log
#
+from __future__ import print_function
+
import collections
import functools
import networkx
try:
size = int((cols[6]).replace('(',''))
except:
- print "interest_expiry without face id!", line
+ print("interest_expiry without face id!", line)
continue
# If no external IP address was identified for this face
# asume it is a local face
- hostip = "localhost"
+ peer = "localhost"
if face_id in faces:
- hostip, port = faces[face_id]
+ peer, port = faces[face_id]
- data.append((content_name, timestamp, message_type, hostip, face_id,
+ data.append((content_name, timestamp, message_type, peer, face_id,
size, nonce, line))
f.close()
return content_history
def annotate_cn_node(graph, nid, ips2nid, data, content_history):
- for (content_name, timestamp, message_type, hostip, face_id,
+ for (content_name, timestamp, message_type, peer, face_id,
size, nonce, line) in data:
- # Ignore local messages for the time being.
- # They could later be used to calculate the processing times
- # of messages.
- if peer == "localhost":
- return
-
# Ignore control messages for the time being
if is_control(content_name):
- return
+ continue
if message_type == "interest_from" and \
peer == "localhost":
peer == "localhost":
graph.node[nid]["ccn_producer"] = True
+ # Ignore local messages for the time being.
+ # They could later be used to calculate the processing times
+ # of messages.
+ if peer == "localhost":
+ continue
+
# remove digest
if message_type in ["content_from", "content_to"]:
content_name = "/".join(content_name.split("/")[:-1])
content_history[content_name] = list()
peernid = ips2nid[peer]
- add_edge(graph, nid, peernid)
+ graph.add_edge(nid, peernid)
content_history[content_name].append((timestamp, message_type, nid,
peernid, nonce, size, line))
""" Adds CCN content history for each node in the topology graph.
"""
+
# Make a copy of the graph to ensure integrity
graph = graph.copy()
- ips2nids = dict()
+ ips2nid = dict()
for nid in graph.nodes():
ips = graph.node[nid]["ips"]
for ip in ips:
- ips2nids[ip] = nid
+ ips2nid[ip] = nid
+
+ found_files = False
# Now walk through the ccnd logs...
for dirpath, dnames, fnames in os.walk(logs_dir):
# Each dirpath correspond to a different node
nid = os.path.basename(dirpath)
+
+ # Cast to numeric nid if necessary
+ if int(nid) in graph.nodes():
+ nid = int(nid)
content_history = dict()
for fname in fnames:
if fname.endswith(".log"):
+ found_files = True
filename = os.path.join(dirpath, fname)
data = parse_file(filename)
- annotate_cn_node(graph, nid, ips2nids, data, content_history)
+ annotate_cn_node(graph, nid, ips2nid, data, content_history)
# Avoid storing everything in memory, instead dump to a file
# and reference the file
fname = dump_content_history(content_history)
graph.node[nid]["history"] = fname
+ if not found_files:
+ msg = "No CCND output files were found to parse at %s " % logs_dir
+ raise RuntimeError, msg
+
if parse_ping_logs:
ping_parser.annotate_cn_graph(logs_dir, graph)
content_names[content_name]["rtt"] = rtt
content_names[content_name]["lapse"] = (interest_timestamp, content_timestamp)
- return (content_names,
+ return (graph,
+ content_names,
interest_expiry_count,
interest_dupnonce_count,
interest_count,
content_count)
-def process_content_history_logs(logs_dir, graph):
+def process_content_history_logs(logs_dir, graph, parse_ping_logs = False):
""" Parse CCN logs and aggregate content history information in graph.
Returns annotated graph and message countn and content names history.
## Process logs and analyse data
try:
graph = annotate_cn_graph(logs_dir, graph,
- parse_ping_logs = True)
+ parse_ping_logs = parse_ping_logs)
except:
- print "Skipping: Error parsing ccnd logs", logs_dir
+ print("Skipping: Error parsing ccnd logs", logs_dir)
raise
- source = consumers(graph)[0]
- target = producers(graph)[0]
+ source = ccn_consumers(graph)[0]
+ target = ccn_producers(graph)[0]
# Process the data from the ccnd logs, but do not re compute
# the link delay.
interest_count,
content_count) = process_content_history(graph)
except:
- print "Skipping: Error processing ccn data", logs_dir
+ print("Skipping: Error processing ccn data", logs_dir)
raise
return (graph,