from __future__ import print_function import sys import os.path import pickle import time import socket import traceback from urlparse import urlparse try: import pygraphviz except: print('Warning, could not import pygraphviz, test mode only') from optparse import OptionParser from sfa.client.return_value import ReturnValue from sfa.client.sfi import Sfi from sfa.util.sfalogging import logger, DEBUG from sfa.client.sfaserverproxy import SfaServerProxy def url_hostname_port(url): if url.find("://") < 0: url = "http://" + url parsed_url = urlparse(url) # 0(scheme) returns protocol default_port = '80' if parsed_url[0] == 'https': default_port = '443' # 1(netloc) returns the hostname+port part parts = parsed_url[1].split(":") # just a hostname if len(parts) == 1: return (url, parts[0], default_port) else: return (url, parts[0], parts[1]) # a very simple cache mechanism so that successive runs (see make) # will go *much* faster # assuming everything is sequential, as simple as it gets # { url -> (timestamp, version)} class VersionCache: # default expiration period is 1h def __init__(self, filename=None, expires=60 * 60): # default is to store cache in the same dir as argv[0] if filename is None: filename = os.path.join(os.path.dirname( sys.argv[0]), "sfascan-version-cache.pickle") self.filename = filename self.expires = expires self.url2version = {} self.load() def load(self): try: infile = open(self.filename, 'r') self.url2version = pickle.load(infile) infile.close() except: logger.debug("Cannot load version cache, restarting from scratch") self.url2version = {} logger.debug("loaded version cache with %d entries %s" % (len(self.url2version), self.url2version.keys())) def save(self): try: outfile = open(self.filename, 'w') pickle.dump(self.url2version, outfile) outfile.close() except: logger.log_exc("Cannot save version cache into %s" % self.filename) def clean(self): try: retcod = os.unlink(self.filename) logger.info("Cleaned up version cache %s, retcod=%d" % (self.filename, retcod)) except: logger.info("Could not unlink version cache %s" % self.filename) def show(self): entries = len(self.url2version) print("version cache from file %s has %d entries" % (self.filename, entries)) key_values = self.url2version.items() def old_first(kv1, kv2): return int(kv1[1][0] - kv2[1][0]) key_values.sort(old_first) for key_value in key_values: (url, tuple) = key_value (timestamp, version) = tuple how_old = time.time() - timestamp if how_old <= self.expires: print(url, "-- %d seconds ago" % how_old) else: print("OUTDATED", url, "(%d seconds ago, expires=%d)" % (how_old, self.expires)) # turns out we might have trailing slashes or not def normalize(self, url): return url.strip("/") def set(self, url, version): url = self.normalize(url) self.url2version[url] = (time.time(), version) def get(self, url): url = self.normalize(url) try: (timestamp, version) = self.url2version[url] how_old = time.time() - timestamp if how_old <= self.expires: return version else: return None except: return None ### # non-existing hostnames happen... # for better perfs we cache the result of gethostbyname too class Interface: def __init__(self, url, mentioned_in=None, verbose=False): self._url = url self.verbose = verbose cache = VersionCache() key = "interface:%s" % url try: (self._url, self.hostname, self.port) = url_hostname_port(url) # look for ip in the cache tuple = cache.get(key) if tuple: (self.hostname, self.ip, self.port) = tuple else: self.ip = socket.gethostbyname(self.hostname) except: msg = "can't resolve hostname %s\n\tfound in url %s" % ( self.hostname, self._url) if mentioned_in: msg += "\n\t(mentioned at %s)" % mentioned_in logger.warning(msg) self.hostname = "unknown" self.ip = '0.0.0.0' self.port = "???" cache.set(key, (self.hostname, self.ip, self.port,)) cache.save() self.probed = False # mark unknown interfaces as probed to avoid unnecessary attempts if self.hostname == 'unknown': # don't really try it self.probed = True self._version = {} def url(self): return self._url # this is used as a key for creating graph nodes and to avoid duplicates def uid(self): return "%s:%s" % (self.ip, self.port) # connect to server and trigger GetVersion def get_version(self): # if we already know the answer: if self.probed: return self._version # otherwise let's look in the cache file logger.debug("searching in version cache %s" % self.url()) cached_version = VersionCache().get(self.url()) if cached_version is not None: logger.info("Retrieved version info from cache %s" % self.url()) return cached_version # otherwise let's do the hard work # dummy to meet Sfi's expectations for its 'options' field class DummyOptions: pass options = DummyOptions() options.verbose = self.verbose options.timeout = 10 try: client = Sfi(options) client.read_config() client.bootstrap() key_file = client.private_key cert_file = client.my_gid logger.debug("using key %s & cert %s" % (key_file, cert_file)) url = self.url() logger.info('issuing GetVersion at %s' % url) # setting timeout here seems to get the call to fail - even though the response time is fast #server=SfaServerProxy(url, key_file, cert_file, verbose=self.verbose, timeout=options.timeout) server = SfaServerProxy( url, key_file, cert_file, verbose=self.verbose) self._version = ReturnValue.get_value(server.GetVersion()) except: logger.log_exc("failed to get version") self._version = {} # so that next run from this process will find out self.probed = True # store in version cache so next processes will remember for an hour cache = VersionCache() cache.set(self.url(), self._version) cache.save() logger.debug("Saved version for url=%s in version cache" % self.url()) # that's our result return self._version @staticmethod def multi_lines_label(*lines): result = '<
' + \ ' |
'.join(lines) + \ ' |